Skip to content

Commit

Permalink
Merge pull request #282 from PrivateStorageio/280.tahoe-lafs-1.17.1
Browse files Browse the repository at this point in the history
Depend on Tahoe-LAFS 1.17.1
  • Loading branch information
exarkun authored Jan 12, 2022
2 parents a263d17 + 78115ef commit 14cc987
Show file tree
Hide file tree
Showing 9 changed files with 96 additions and 60 deletions.
2 changes: 1 addition & 1 deletion default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ in
# the `.post999` looks weird enough that if someone really cares about
# the version in use they will notice it and go searching for what's
# going on and discover the real version specified by `src` below.
version = "1.17.0.post999";
version = "1.17.1.post999";
# See https://github.com/DavHau/mach-nix/issues/190
requirementsExtra =
''
Expand Down
6 changes: 3 additions & 3 deletions nix/sources.json
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,10 @@
"homepage": "https://tahoe-lafs.org/",
"owner": "tahoe-lafs",
"repo": "tahoe-lafs",
"rev": "tahoe-lafs-1.17.0",
"sha256": "0vjq7g1lfjd16y0iwnfsccp5k3q3av7wllkyqbsyd877a29nibzi",
"rev": "tahoe-lafs-1.17.1",
"sha256": "0vzl8xz4iwbq7d7saa6rimzgwj23s3vfgr3f428rbg0wp7dshs9s",
"type": "tarball",
"url": "https://github.com/tahoe-lafs/tahoe-lafs/archive/tahoe-lafs-1.17.0.tar.gz",
"url": "https://github.com/tahoe-lafs/tahoe-lafs/archive/tahoe-lafs-1.17.1.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ install_requires =
# Tahoe has no stable Python API but we use its Python API so there's
# basically no wiggle room here. We still use a (really tiny) range
# because our Nix packaging provides a Tahoe-LAFS with a .postNNN version.
tahoe-lafs >=1.17.0,<1.17.1
tahoe-lafs >=1.17.1,<1.17.2
treq
pyutil
prometheus-client
Expand Down
65 changes: 53 additions & 12 deletions src/_zkapauthorizer/_storage_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,17 @@
from os import listdir, stat
from os.path import join
from struct import calcsize, unpack
from typing import Dict, List, Optional
from typing import Any, Dict, List, Optional, Tuple

import attr
from allmydata.interfaces import RIStorageServer, TestAndWriteVectorsForShares
from allmydata.interfaces import TestAndWriteVectorsForShares
from allmydata.storage.common import storage_index_to_dir
from allmydata.storage.immutable import ShareFile
from allmydata.storage.immutable import (
BucketWriter,
FoolscapBucketReader,
FoolscapBucketWriter,
ShareFile,
)
from allmydata.storage.lease import LeaseInfo
from allmydata.storage.mutable import MutableShareFile
from allmydata.storage.server import StorageServer
Expand All @@ -47,6 +52,7 @@
)
from eliot import log_call, start_action
from foolscap.api import Referenceable
from foolscap.ipb import IRemoteReference
from prometheus_client import CollectorRegistry, Histogram
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IReactorTime
Expand Down Expand Up @@ -189,7 +195,10 @@ class ZKAPAuthorizerStorageServer(Referenceable):
# control it ourselves.
LEASE_PERIOD = timedelta(days=31)

_original = attr.ib(validator=provides(RIStorageServer))
# A StorageServer instance, but not validated because of the fake used in
# the test suite.
_original = attr.ib()

_pass_value = pass_value_attribute()
_signing_key = attr.ib(validator=instance_of(SigningKey))
_spender = attr.ib(validator=provides(ISpender))
Expand All @@ -203,6 +212,12 @@ class ZKAPAuthorizerStorageServer(Referenceable):
)
_public_key = attr.ib(init=False)
_metric_spending_successes = attr.ib(init=False)
_bucket_writer_disconnect_markers: Dict[
BucketWriter, Tuple[IRemoteReference, Any]
] = attr.ib(
init=False,
default=attr.Factory(dict),
)

@_public_key.default
def _get_public_key(self):
Expand All @@ -211,6 +226,27 @@ def _get_public_key(self):
# so that `self._signing_key` will be assigned when this runs.
return PublicKey.from_signing_key(self._signing_key)

def _bucket_writer_closed(self, bw: BucketWriter):
"""
This is registered as a callback with the storage backend and receives
notification when a bucket writer is closed. It removes the
disconnection-based cleanup callback for the given bucket.
"""
# This implementation was originally copied from
# allmydata.storage.server.FoolscapStorageServer. Since we don't use
# Tahoe's Foolscap storage server layer we need to do this bucket
# writer bookkeeping ourselves.
if bw in self._bucket_writer_disconnect_markers:
canary, disconnect_marker = self._bucket_writer_disconnect_markers.pop(bw)
canary.dontNotifyOnDisconnect(disconnect_marker)

def __attrs_post_init__(self):
"""
Finish initialization after attrs does its job. This consists of
registering a cleanup handler with the storage backend.
"""
self._original.register_bucket_writer_close_handler(self._bucket_writer_closed)

def _get_spending_histogram_buckets(self):
"""
Create the upper bounds for the ZKAP spending histogram.
Expand Down Expand Up @@ -253,7 +289,7 @@ def remote_get_version(self):
Pass-through without pass check to allow clients to learn about our
version and configuration in case it helps them decide how to behave.
"""
return self._original.remote_get_version()
return self._original.get_version()

def remote_allocate_buckets(
self,
Expand Down Expand Up @@ -302,7 +338,7 @@ def remote_allocate_buckets(
allocated_size,
)

alreadygot, bucketwriters = self._original._allocate_buckets(
alreadygot, bucketwriters = self._original.allocate_buckets(
storage_index,
renew_secret,
cancel_secret,
Expand Down Expand Up @@ -330,22 +366,27 @@ def remote_allocate_buckets(
# StorageServer.remote_allocate_buckets.
for bw in bucketwriters.values():
disconnect_marker = canary.notifyOnDisconnect(bw.disconnected)
self._original._bucket_writer_disconnect_markers[bw] = (
self._bucket_writer_disconnect_markers[bw] = (
canary,
disconnect_marker,
)
self._spender.mark_as_spent(
self._public_key,
validation.valid[:spent_passes],
)
return alreadygot, bucketwriters
return alreadygot, {
k: FoolscapBucketWriter(bw) for (k, bw) in bucketwriters.items()
}

def remote_get_buckets(self, storage_index):
"""
Pass-through without pass check to let clients read immutable shares as
long as those shares exist.
"""
return self._original.remote_get_buckets(storage_index)
return {
k: FoolscapBucketReader(bucket)
for (k, bucket) in self._original.get_buckets(storage_index).items()
}

def remote_add_lease(self, passes, storage_index, *a, **kw):
"""
Expand All @@ -363,7 +404,7 @@ def remote_add_lease(self, passes, storage_index, *a, **kw):
validation,
self._original,
)
result = self._original.remote_add_lease(storage_index, *a, **kw)
result = self._original.add_lease(storage_index, *a, **kw)
self._spender.mark_as_spent(
self._public_key,
validation.valid,
Expand All @@ -376,7 +417,7 @@ def remote_advise_corrupt_share(self, *a, **kw):
Pass-through without a pass check to let clients inform us of possible
issues with the system without incurring any cost to themselves.
"""
return self._original.remote_advise_corrupt_share(*a, **kw)
return self._original.advise_corrupt_share(*a, **kw)

def remote_share_sizes(self, storage_index_or_slot, sharenums):
with start_action(
Expand Down Expand Up @@ -521,7 +562,7 @@ def remote_slot_readv(self, *a, **kw):
Pass-through without a pass check to let clients read mutable shares as
long as those shares exist.
"""
return self._original.remote_slot_readv(*a, **kw)
return self._original.slot_readv(*a, **kw)


def check_pass_quantity(pass_value, validation, share_sizes):
Expand Down
17 changes: 9 additions & 8 deletions src/_zkapauthorizer/tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,23 +28,24 @@
from ..model import VoucherStore, memory_connect, open_and_initialize


@attr.s
@attr.s(auto_attribs=True)
class AnonymousStorageServer(Fixture):
"""
Supply an instance of allmydata.storage.server.StorageServer which
implements anonymous access to Tahoe-LAFS storage server functionality.
:ivar FilePath tempdir: The path to the server's storage on the
filesystem.
:ivar tempdir: The path to the server's storage on the filesystem.
:ivar allmydata.storage.server.StorageServer storage_server: The storage
server.
:ivar storage_server: The protocol-agnostic storage server backend.
:ivar twisted.internet.task.Clock clock: The ``IReactorTime`` provider to
supply to ``StorageServer`` for its time-checking needs.
:ivar clock: The ``IReactorTime`` provider to supply to ``StorageServer``
for its time-checking needs.
"""

clock = attr.ib()
clock: Clock = attr.ib()

tempdir: FilePath = attr.ib(default=None)
storage_server: StorageServer = attr.ib(default=None)

def _setUp(self):
self.tempdir = FilePath(self.useFixture(TempDir()).join(u"storage"))
Expand Down
5 changes: 2 additions & 3 deletions src/_zkapauthorizer/tests/foolscap.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
"""

import attr
from allmydata.interfaces import RIStorageServer
from foolscap.api import Any, Copyable, Referenceable, RemoteInterface
from foolscap.copyable import CopyableSlicer, ICopyable
from twisted.internet.defer import fail, succeed
Expand All @@ -33,9 +32,9 @@ def echo(argument=Any()):
return Any()


@implementer(RIStorageServer)
class StubStorageServer(object):
pass
def register_bucket_writer_close_handler(self, handler):
pass


def get_anonymous_storage_server():
Expand Down
17 changes: 9 additions & 8 deletions src/_zkapauthorizer/tests/storage_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,17 @@
LEASE_INTERVAL = 60 * 60 * 24 * 31


def cleanup_storage_server(storage_server):
def reset_storage_server(storage_server):
"""
Delete all of the shares held by the given storage server.
Restore a storage server to a default state. This includes
deleting all of the shares it holds.
:param allmydata.storage.server.StorageServer storage_server: The storage
server with some on-disk shares to delete.
"""
# A storage server is read-write by default.
storage_server.readonly_storage = False

starts = [
FilePath(storage_server.sharedir),
FilePath(storage_server.corruption_advisory_dir),
Expand All @@ -60,7 +64,6 @@ def write_toy_shares(
cancel_secret,
sharenums,
size,
canary,
):
"""
Write some immutable shares to the given storage server.
Expand All @@ -71,19 +74,17 @@ def write_toy_shares(
:param bytes cancel_secret:
:param set[int] sharenums:
:param int size:
:param IRemoteReference canary:
"""
_, allocated = storage_server.remote_allocate_buckets(
_, allocated = storage_server.allocate_buckets(
storage_index,
renew_secret,
cancel_secret,
sharenums,
size,
canary=canary,
)
for (sharenum, writer) in allocated.items():
writer.remote_write(0, bytes_for_share(sharenum, size))
writer.remote_close()
writer.write(0, bytes_for_share(sharenum, size))
writer.close()


def whitebox_write_sparse_share(sharepath, version, size, leases, now):
Expand Down
10 changes: 3 additions & 7 deletions src/_zkapauthorizer/tests/test_storage_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,10 @@
from .matchers import matches_spent_passes, matches_version_dictionary
from .storage_common import (
LEASE_INTERVAL,
cleanup_storage_server,
get_passes,
pass_factory,
privacypass_passes,
reset_storage_server,
whitebox_write_sparse_share,
write_toy_shares,
)
Expand Down Expand Up @@ -191,7 +191,7 @@ def setup_example(self):
self.spending_recorder.reset()

# And clean out any shares that might confuse things.
cleanup_storage_server(self.anonymous_storage_server)
reset_storage_server(self.anonymous_storage_server)

def test_get_version(self):
"""
Expand Down Expand Up @@ -416,7 +416,6 @@ def allocate_buckets(sharenums):
cancel_secret,
existing_sharenums,
size,
canary=self.canary,
)

# Let some time pass so leases added after this point will look
Expand Down Expand Up @@ -510,7 +509,6 @@ def test_add_lease(
cancel_secret,
sharenums,
size,
canary=self.canary,
)

self.assertThat(
Expand Down Expand Up @@ -550,7 +548,7 @@ def _stat_shares_immutable_test(
# Perhaps put some more leases on it. Leases might impact our
# ability to determine share data size.
for renew_secret in leases:
self.anonymous_storage_server.remote_add_lease(
self.anonymous_storage_server.add_lease(
storage_index,
renew_secret,
cancel_secret,
Expand Down Expand Up @@ -598,7 +596,6 @@ def test_stat_shares_immutable(
cancel_secret,
sharenums,
size,
canary,
),
)

Expand Down Expand Up @@ -825,7 +822,6 @@ def test_advise_corrupt_share(
cancel_secret,
{sharenum},
size,
canary=self.canary,
)

self.assertThat(
Expand Down
Loading

0 comments on commit 14cc987

Please sign in to comment.