From 275ad212fa89c902473b0c7b3b174b81fa9a370b Mon Sep 17 00:00:00 2001 From: ketiltrout Date: Mon, 4 Nov 2024 11:59:11 -0800 Subject: [PATCH] fix(db): Deal with 3.12+ naive UTC datetimes peewee-3.17.1 has already done the work of abstracting `utcnow` and `utcfromtimestamp` for the deprecation in 3.12 for us, so let's just import those into `alpenhorn.db` and call it a day. Closes #191 --- alpenhorn/db/__init__.py | 4 ++++ alpenhorn/db/acquisition.py | 2 +- alpenhorn/db/archive.py | 4 ++-- alpenhorn/db/storage.py | 2 +- alpenhorn/io/_default_asyncs.py | 9 ++++----- alpenhorn/io/ioutil.py | 17 +++++++++-------- alpenhorn/io/lustrehsm.py | 31 +++++++++++++++---------------- alpenhorn/server/auto_import.py | 7 +++---- alpenhorn/server/update.py | 11 ++++++++--- pyproject.toml | 2 +- tests/db/test_acquisition.py | 5 ++--- tests/db/test_archive.py | 6 ++---- tests/db/test_storage.py | 5 ++--- tests/io/test_ioutil.py | 4 ++-- tests/io/test_ioutil_crd.py | 7 ++++--- tests/io/test_lustrehsmnode.py | 10 +++++----- tests/server/test_auto_import.py | 12 ++++-------- tests/server/test_update_node.py | 13 +++++++------ 18 files changed, 76 insertions(+), 75 deletions(-) diff --git a/alpenhorn/db/__init__.py b/alpenhorn/db/__init__.py index c21c9f5b4..1d8d25368 100644 --- a/alpenhorn/db/__init__.py +++ b/alpenhorn/db/__init__.py @@ -45,3 +45,7 @@ # Prototypes from ._base import EnumField, base_model + +# Naive-UTC stuff courtesy peewee. These were originally in datetime +# but were deprecated in 3.12 as too confusing. +from peewee import utcnow as utcnow, utcfromtimestamp as utcfromtimestamp diff --git a/alpenhorn/db/acquisition.py b/alpenhorn/db/acquisition.py index 2c9282c0a..6ae1779f8 100644 --- a/alpenhorn/db/acquisition.py +++ b/alpenhorn/db/acquisition.py @@ -49,7 +49,7 @@ class ArchiveFile(base_model): md5sum = pw.CharField(null=True, max_length=32) # Note: default here is the now method itself (i.e. "now", not "now()"). # Will be evaulated by peewee at row-creation time. - registered = pw.DateTimeField(default=datetime.datetime.utcnow) + registered = pw.DateTimeField(default=pw.utcnow) class Meta: # (acq,name) is unique diff --git a/alpenhorn/db/archive.py b/alpenhorn/db/archive.py index e749837cc..29d808bc2 100644 --- a/alpenhorn/db/archive.py +++ b/alpenhorn/db/archive.py @@ -48,7 +48,7 @@ class ArchiveFileCopy(base_model): wants_file = EnumField(["Y", "M", "N"], default="Y") ready = pw.BooleanField(default=False) size_b = pw.BigIntegerField(null=True) - last_update = pw.DateTimeField(default=datetime.datetime.utcnow) + last_update = pw.DateTimeField(default=pw.utcnow) @property def path(self) -> pathlib.Path: @@ -91,7 +91,7 @@ class ArchiveFileCopyRequest(base_model): node_from = pw.ForeignKeyField(StorageNode, backref="requests_from") completed = pw.BooleanField(default=False) cancelled = pw.BooleanField(default=False) - timestamp = pw.DateTimeField(default=datetime.datetime.utcnow, null=True) + timestamp = pw.DateTimeField(default=pw.utcnow, null=True) transfer_started = pw.DateTimeField(null=True) transfer_completed = pw.DateTimeField(null=True) diff --git a/alpenhorn/db/storage.py b/alpenhorn/db/storage.py index 2be2fb023..2fc243aa0 100644 --- a/alpenhorn/db/storage.py +++ b/alpenhorn/db/storage.py @@ -367,7 +367,7 @@ def update_avail_gb(self, new_avail: int | None) -> None: self.avail_gb = None else: self.avail_gb = new_avail / 2**30 - self.avail_gb_last_checked = datetime.datetime.utcnow() + self.avail_gb_last_checked = pw.utcnow() # Update the DB with the free space but don't clobber changes made # manually to the database diff --git a/alpenhorn/io/_default_asyncs.py b/alpenhorn/io/_default_asyncs.py index acf276ea8..16b926f50 100644 --- a/alpenhorn/io/_default_asyncs.py +++ b/alpenhorn/io/_default_asyncs.py @@ -8,10 +8,9 @@ import shutil import logging import pathlib -from datetime import datetime from . import ioutil -from ..db import ArchiveFileCopy, ArchiveFileCopyRequest +from ..db import ArchiveFileCopy, ArchiveFileCopyRequest, utcnow from ..server.update import RemoteNode if TYPE_CHECKING: @@ -224,7 +223,7 @@ def check_async(task: Task, io: BaseNodeIO, copy: ArchiveFileCopy) -> None: log.info( f"Updating file copy #{copy.id} for file {copyname} on node {io.node.name}." ) - copy.last_update = datetime.utcnow() + copy.last_update = utcnow() copy.save() @@ -288,7 +287,7 @@ def delete_async( # Update the DB ArchiveFileCopy.update( - has_file="N", wants_file="N", last_update=datetime.utcnow() + has_file="N", wants_file="N", last_update=utcnow() ).where(ArchiveFileCopy.id == copy.id).execute() @@ -341,7 +340,7 @@ def group_search_async( has_file="M", wants_file="Y", ready=False, - last_update=datetime.utcnow(), + last_update=utcnow(), ) .where( ArchiveFileCopy.file == req.file, diff --git a/alpenhorn/io/ioutil.py b/alpenhorn/io/ioutil.py index 17fe10500..48a6ceb52 100644 --- a/alpenhorn/io/ioutil.py +++ b/alpenhorn/io/ioutil.py @@ -8,7 +8,6 @@ import errno import pathlib import peewee as pw -from datetime import datetime from tempfile import TemporaryDirectory from .. import db @@ -17,6 +16,8 @@ ArchiveFileCopyRequest, StorageNode, StorageTransferAction, + utcfromtimestamp, + utcnow, ) from ..scheduler import threadlocal from ..common import config, util @@ -369,7 +370,7 @@ def post_add(node: StorageNode, file_: ArchiveFile) -> None: StorageTransferAction.autoclean == True, # noqa: E712 ): count = ( - ArchiveFileCopy.update(wants_file="N", last_update=datetime.utcnow()) + ArchiveFileCopy.update(wants_file="N", last_update=utcnow()) .where( ArchiveFileCopy.file == file_, ArchiveFileCopy.node == edge.node_from, @@ -427,7 +428,7 @@ def copy_request_done( if check_src: # If the copy didn't work, then the remote file may be corrupted. log.error(f"Copy failed: {stderr}; Marking source file suspect.") - ArchiveFileCopy.update(has_file="M", last_update=datetime.utcnow()).where( + ArchiveFileCopy.update(has_file="M", last_update=utcnow()).where( ArchiveFileCopy.file == req.file, ArchiveFileCopy.node == req.node_from, ).execute() @@ -448,7 +449,7 @@ def copy_request_done( f"MD5 mismatch on node {io.node.name}; " f"Marking source file {req.file.name} on node {req.node_from} suspect." ) - ArchiveFileCopy.update(has_file="M", last_update=datetime.utcnow()).where( + ArchiveFileCopy.update(has_file="M", last_update=utcnow()).where( ArchiveFileCopy.file == req.file, ArchiveFileCopy.node == req.node_from, ).execute() @@ -474,7 +475,7 @@ def copy_request_done( wants_file="Y", ready=True, size_b=size, - last_update=datetime.utcnow(), + last_update=utcnow(), ).execute() except pw.IntegrityError: ArchiveFileCopy.update( @@ -482,7 +483,7 @@ def copy_request_done( wants_file="Y", ready=True, size_b=size, - last_update=datetime.utcnow(), + last_update=utcnow(), ).where( ArchiveFileCopy.file == req.file, ArchiveFileCopy.node == io.node ).execute() @@ -490,8 +491,8 @@ def copy_request_done( # Mark AFCR as completed ArchiveFileCopyRequest.update( completed=True, - transfer_started=datetime.utcfromtimestamp(start_time), - transfer_completed=datetime.utcfromtimestamp(end_time), + transfer_started=utcfromtimestamp(start_time), + transfer_completed=utcfromtimestamp(end_time), ).where(ArchiveFileCopyRequest.id == req.id).execute() # Run post-add actions, if any diff --git a/alpenhorn/io/lustrehsm.py b/alpenhorn/io/lustrehsm.py index 3baee5162..5e527f132 100644 --- a/alpenhorn/io/lustrehsm.py +++ b/alpenhorn/io/lustrehsm.py @@ -19,10 +19,9 @@ import logging import pathlib import peewee as pw -from datetime import datetime from ..common.util import pretty_bytes, pretty_deltat -from ..db import ArchiveFileCopy +from ..db import ArchiveFileCopy, utcnow from ..scheduler import Task from ..server.querywalker import QueryWalker from .base import BaseNodeRemote @@ -249,9 +248,9 @@ def _async(task, node, lfs, headroom_needed): ) lfs.hsm_release(copy.path) # Update copy record immediately - ArchiveFileCopy.update( - ready=False, last_update=datetime.utcnow() - ).where(ArchiveFileCopy.id == copy.id).execute() + ArchiveFileCopy.update(ready=False, last_update=utcnow()).where( + ArchiveFileCopy.id == copy.id + ).execute() total_files += 1 total_bytes += copy.file.size_b if total_bytes >= headroom_needed: @@ -337,20 +336,20 @@ def _async(task, node, lfs, copies): f"File copy {copy.file.path} on node {node.name} is missing!" ) ArchiveFileCopy.update( - has_file="N", ready=False, last_update=datetime.utcnow() + has_file="N", ready=False, last_update=utcnow() ).where(ArchiveFileCopy.id == copy.id).execute() elif state == lfs.HSM_RELEASED: if copy.ready: log.info(f"Updating file copy {copy.file.path}: ready -> False") - ArchiveFileCopy.update( - ready=False, last_update=datetime.utcnow() - ).where(ArchiveFileCopy.id == copy.id).execute() + ArchiveFileCopy.update(ready=False, last_update=utcnow()).where( + ArchiveFileCopy.id == copy.id + ).execute() else: # i.e. RESTORED or UNARCHIVED if not copy.ready: log.info(f"Updating file copy {copy.file.path}: ready -> True") - ArchiveFileCopy.update( - ready=True, last_update=datetime.utcnow() - ).where(ArchiveFileCopy.id == copy.id).execute() + ArchiveFileCopy.update(ready=True, last_update=utcnow()).where( + ArchiveFileCopy.id == copy.id + ).execute() # Copies get checked in an async Task( @@ -405,9 +404,9 @@ def _async(task: Task, node_io: LustreHSMNodeIO, copy: ArchiveFileCopy) -> None: "File copy missing during check: " f"{copy.path}. Updating database." ) - ArchiveFileCopy.update( - has_file="N", last_update=datetime.utcnow() - ).where(ArchiveFileCopy.id == copy.id).execute() + ArchiveFileCopy.update(has_file="N", last_update=utcnow()).where( + ArchiveFileCopy.id == copy.id + ).execute() return # Trigger restore, if necessary @@ -572,7 +571,7 @@ def _async(task: Task, node_io: LustreHSMNodeIO, file_: ArchiveFile): if copy.ready != ready: copy.ready = ready - copy.last_update = datetime.utcnow() + copy.last_update = utcnow() copy.save() log.info( f"File copy {file_.path} on node {node_io.node.name} now " diff --git a/alpenhorn/server/auto_import.py b/alpenhorn/server/auto_import.py index 6c119a3b7..b67403304 100644 --- a/alpenhorn/server/auto_import.py +++ b/alpenhorn/server/auto_import.py @@ -6,12 +6,11 @@ import logging import pathlib import peewee as pw -from datetime import datetime from watchdog.events import FileSystemEventHandler from .. import db from ..common import config, extensions -from ..db import ArchiveAcq, ArchiveFile, ArchiveFileCopy +from ..db import ArchiveAcq, ArchiveFile, ArchiveFileCopy, utcnow from ..io import ioutil from ..scheduler import Task @@ -163,7 +162,7 @@ def _import_file(task: Task, node: StorageNode, path: pathlib.PurePath) -> None: copy.has_file = "M" copy.wants_file = "Y" copy.ready = True - copy.last_update = datetime.utcnow() + copy.last_update = utcnow() copy.save() log.warning( f'Imported file "{path}" formerly present on node {node.name}! Marking suspect.' @@ -177,7 +176,7 @@ def _import_file(task: Task, node: StorageNode, path: pathlib.PurePath) -> None: wants_file="Y", ready=True, size_b=node.io.filesize(path, actual=True), - last_update=datetime.utcnow(), + last_update=utcnow(), ) log.info(f'Registered file copy "{path}" on node "{node.name}".') diff --git a/alpenhorn/server/update.py b/alpenhorn/server/update.py index 6a9569757..34380d527 100644 --- a/alpenhorn/server/update.py +++ b/alpenhorn/server/update.py @@ -8,11 +8,16 @@ import time import logging import peewee as pw -from datetime import datetime from ..common import config, util from ..common.extensions import io_module -from ..db import ArchiveFileCopy, ArchiveFileCopyRequest, StorageNode, StorageGroup +from ..db import ( + ArchiveFileCopy, + ArchiveFileCopyRequest, + StorageNode, + StorageGroup, + utcnow, +) from ..scheduler import global_abort, WorkerPool, EmptyPool from . import auto_import from .querywalker import QueryWalker @@ -337,7 +342,7 @@ def run_auto_verify(self) -> None: # Mark file as needing check copy.has_file = "M" - copy.last_update = datetime.utcnow() + copy.last_update = utcnow() copy.save() def update_idle(self) -> None: diff --git a/pyproject.toml b/pyproject.toml index d363d0eed..5b33320b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ license = {file = "LICENSE"} dependencies = [ "Click >= 6.0", "concurrent-log-handler", - "peewee >= 3.16", + "peewee >= 3.17.1", "PyYAML", "tabulate", "watchdog" diff --git a/tests/db/test_acquisition.py b/tests/db/test_acquisition.py index 8745e9dc3..2d7346ec6 100644 --- a/tests/db/test_acquisition.py +++ b/tests/db/test_acquisition.py @@ -2,7 +2,6 @@ import pytest import pathlib -import datetime import peewee as pw from alpenhorn.db.acquisition import ArchiveAcq, ArchiveFile @@ -39,9 +38,9 @@ def test_acq_model(archiveacq): def test_file_model(archiveacq, archivefile): acq1 = archiveacq(name="acq1") - before = datetime.datetime.utcnow().replace(microsecond=0) + before = pw.utcnow().replace(microsecond=0) archivefile(name="min", acq=acq1) - after = datetime.datetime.utcnow() + after = pw.utcnow() archivefile( name="max", acq=acq1, diff --git a/tests/db/test_archive.py b/tests/db/test_archive.py index 1f63e6f89..9b9175afd 100644 --- a/tests/db/test_archive.py +++ b/tests/db/test_archive.py @@ -79,11 +79,9 @@ def test_archivefilecopyrequest_model( """Test ArchiveFileCopyRequest model""" minnode = storagenode(name="min", group=simplegroup) maxnode = storagenode(name="max", group=simplegroup) - before = (datetime.datetime.utcnow() - datetime.timedelta(seconds=1)).replace( - microsecond=0 - ) + before = (pw.utcnow() - datetime.timedelta(seconds=1)).replace(microsecond=0) archivefilecopyrequest(file=simplefile, node_from=minnode, group_to=simplegroup) - after = datetime.datetime.utcnow() + datetime.timedelta(seconds=1) + after = pw.utcnow() + datetime.timedelta(seconds=1) archivefilecopyrequest( file=simplefile, node_from=maxnode, diff --git a/tests/db/test_storage.py b/tests/db/test_storage.py index 5749cb09b..70b754f64 100644 --- a/tests/db/test_storage.py +++ b/tests/db/test_storage.py @@ -2,7 +2,6 @@ import pytest import pathlib -import datetime import peewee as pw from alpenhorn.db.storage import StorageGroup, StorageNode, StorageTransferAction @@ -355,11 +354,11 @@ def test_update_avail_gb(simplenode): assert simplenode.avail_gb is None # Test a number - before = datetime.datetime.utcnow() + before = pw.utcnow() simplenode.update_avail_gb(10000) # Now the value is set node = StorageNode.get(id=simplenode.id) - after = datetime.datetime.utcnow() + after = pw.utcnow() assert node.avail_gb == 10000.0 / 2.0**30 assert node.avail_gb_last_checked >= before diff --git a/tests/io/test_ioutil.py b/tests/io/test_ioutil.py index ac8fdd882..a86794805 100644 --- a/tests/io/test_ioutil.py +++ b/tests/io/test_ioutil.py @@ -345,7 +345,7 @@ def test_autoclean( ): """Test post_add running autoclean.""" - before = datetime.datetime.utcnow() - datetime.timedelta(seconds=2) + before = pw.utcnow() - datetime.timedelta(seconds=2) destnode = storagenode(name="dest", group=simplegroup) @@ -370,7 +370,7 @@ def test_autoclean_state( ): """post_add autoclean only deletes copies with has_file=='Y'.""" - then = datetime.datetime.utcnow() - datetime.timedelta(seconds=200) + then = pw.utcnow() - datetime.timedelta(seconds=200) srcnode = storagenode(name="src", group=simplegroup) storagetransferaction(node_from=srcnode, group_to=simplenode.group, autoclean=True) diff --git a/tests/io/test_ioutil_crd.py b/tests/io/test_ioutil_crd.py index 8ffd8676a..c8a23ab46 100644 --- a/tests/io/test_ioutil_crd.py +++ b/tests/io/test_ioutil_crd.py @@ -5,6 +5,7 @@ import datetime from unittest.mock import patch, MagicMock +from alpenhorn.db import utcfromtimestamp, utcnow from alpenhorn.db.archive import ArchiveFileCopy, ArchiveFileCopyRequest from alpenhorn.io.ioutil import copy_request_done from alpenhorn.server.update import UpdateableNode @@ -173,7 +174,7 @@ def test_md5ok_true(db_setup): io, copy, req, start_time, post_add = db_setup - before = datetime.datetime.utcnow() - datetime.timedelta(seconds=2) + before = utcnow() - datetime.timedelta(seconds=2) assert ( copy_request_done( req, @@ -184,13 +185,13 @@ def test_md5ok_true(db_setup): ) is True ) - after = datetime.datetime.utcnow() + after = utcnow() # request is resolved afcr = ArchiveFileCopyRequest.get(id=req.id) assert afcr.completed assert not afcr.cancelled - assert afcr.transfer_started == datetime.datetime.utcfromtimestamp(start_time) + assert afcr.transfer_started == utcfromtimestamp(start_time) assert afcr.transfer_completed >= before assert afcr.transfer_completed <= after diff --git a/tests/io/test_lustrehsmnode.py b/tests/io/test_lustrehsmnode.py index a54aa4c58..babed8534 100644 --- a/tests/io/test_lustrehsmnode.py +++ b/tests/io/test_lustrehsmnode.py @@ -1,7 +1,7 @@ """Test LustreHSMNodeIO.""" import pytest -import datetime +import peewee as pw from unittest.mock import patch, MagicMock from alpenhorn.db.archive import ArchiveFileCopy @@ -94,7 +94,7 @@ def test_release_files(queue, mock_lfs, node): node.io.release_files() - before = datetime.datetime.utcnow().replace(microsecond=0) + before = pw.utcnow().replace(microsecond=0) # Job in queue assert queue.qsize == 1 @@ -368,7 +368,7 @@ def test_ready_path(mock_lfs, node): def test_ready_pull_restored(mock_lfs, node, queue, archivefilecopyrequest): """Test LustreHSMNodeIO.ready_pull on a restored file that isn't ready.""" - before = datetime.datetime.utcnow().replace(microsecond=0) + before = pw.utcnow().replace(microsecond=0) copy = ArchiveFileCopy.get(id=1) copy.ready = False @@ -464,7 +464,7 @@ def test_idle_update_empty(queue, mock_lfs, node): def test_idle_update_ready(xfs, queue, mock_lfs, node): """Test LustreHSMNodeIO.idle_update with copies ready""" - before = datetime.datetime.utcnow().replace(microsecond=0) + before = pw.utcnow().replace(microsecond=0) node.io.idle_update(False) @@ -504,7 +504,7 @@ def test_idle_update_ready(xfs, queue, mock_lfs, node): def test_idle_update_not_ready(xfs, queue, mock_lfs, node): """Test LustreHSMNodeIO.idle_update with copies not ready""" - before = datetime.datetime.utcnow().replace(microsecond=0) + before = pw.utcnow().replace(microsecond=0) # Update all copies ArchiveFileCopy.update(ready=False).execute() diff --git a/tests/server/test_auto_import.py b/tests/server/test_auto_import.py index 2448b0b32..2b7310f2a 100644 --- a/tests/server/test_auto_import.py +++ b/tests/server/test_auto_import.py @@ -107,9 +107,7 @@ def test_import_file_create(xfs, dbtables, unode): xfs.create_file("/node/simplefile_acq/simplefile") - before = (datetime.datetime.utcnow() - datetime.timedelta(seconds=1)).replace( - microsecond=0 - ) + before = (pw.utcnow() - datetime.timedelta(seconds=1)).replace(microsecond=0) with patch( "alpenhorn.common.extensions._id_ext", @@ -122,7 +120,7 @@ def test_import_file_create(xfs, dbtables, unode): ) ) - after = datetime.datetime.utcnow() + datetime.timedelta(seconds=1) + after = pw.utcnow() + datetime.timedelta(seconds=1) # Check DB acq = ArchiveAcq.get(name="simplefile_acq") @@ -212,9 +210,7 @@ def test_import_file_exists(xfs, dbtables, unode, simplefile, archivefilecopy): ) xfs.create_file("/node/simplefile_acq/simplefile") - before = (datetime.datetime.utcnow() - datetime.timedelta(seconds=1)).replace( - microsecond=0 - ) + before = (pw.utcnow() - datetime.timedelta(seconds=1)).replace(microsecond=0) with patch( "alpenhorn.common.extensions._id_ext", @@ -227,7 +223,7 @@ def test_import_file_exists(xfs, dbtables, unode, simplefile, archivefilecopy): ) ) - after = datetime.datetime.utcnow() + datetime.timedelta(seconds=1) + after = pw.utcnow() + datetime.timedelta(seconds=1) # Check DB acq = ArchiveAcq.get(name="simplefile_acq") diff --git a/tests/server/test_update_node.py b/tests/server/test_update_node.py index d1bd4583a..0e7ef8899 100644 --- a/tests/server/test_update_node.py +++ b/tests/server/test_update_node.py @@ -2,6 +2,7 @@ import pytest import datetime +import peewee as pw from unittest.mock import call, patch, MagicMock from alpenhorn.db.archive import ArchiveFileCopy @@ -147,7 +148,7 @@ def test_update_free_space(unode): unode.db.avail_gb = 3 unode.db.save() - now = datetime.datetime.utcnow() + now = pw.utcnow() # 2 ** 32 bytes is 4 GiB with patch.object(unode.io, "bytes_avail", lambda fast: 2**32): unode.update_free_space() @@ -165,7 +166,7 @@ def test_auto_verify(unode, simpleacq, archivefile, archivefilecopy): unode.db.auto_verify = 4 # Last Update time to permit auto verification - last_update = datetime.datetime.utcnow() - datetime.timedelta(days=10) + last_update = pw.utcnow() - datetime.timedelta(days=10) # Make some files to verify copyY = archivefilecopy( @@ -213,25 +214,25 @@ def test_auto_verify_time(unode, simpleacq, archivefile, archivefilecopy): node=unode.db, file=archivefile(name="file9", acq=simpleacq), has_file="Y", - last_update=datetime.datetime.utcnow() - datetime.timedelta(days=9), + last_update=pw.utcnow() - datetime.timedelta(days=9), ) copy8 = archivefilecopy( node=unode.db, file=archivefile(name="file8", acq=simpleacq), has_file="Y", - last_update=datetime.datetime.utcnow() - datetime.timedelta(days=8), + last_update=pw.utcnow() - datetime.timedelta(days=8), ) copy6 = archivefilecopy( node=unode.db, file=archivefile(name="file6", acq=simpleacq), has_file="Y", - last_update=datetime.datetime.utcnow() - datetime.timedelta(days=6), + last_update=pw.utcnow() - datetime.timedelta(days=6), ) copy5 = archivefilecopy( node=unode.db, file=archivefile(name="file5", acq=simpleacq), has_file="Y", - last_update=datetime.datetime.utcnow() - datetime.timedelta(days=5), + last_update=pw.utcnow() - datetime.timedelta(days=5), ) unode.run_auto_verify()