Skip to content

Commit

Permalink
Progress to making signify fully typed and use black
Browse files Browse the repository at this point in the history
  • Loading branch information
ralphje committed Apr 28, 2023
1 parent 7a1b614 commit 59b8b00
Show file tree
Hide file tree
Showing 21 changed files with 930 additions and 500 deletions.
14 changes: 14 additions & 0 deletions .mypy.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[mypy]
python_version = 3.7
disallow_incomplete_defs = True

[mypy-asn1crypto.*]
ignore_missing_imports = True
[mypy-certvalidator.*]
ignore_missing_imports = True
[mypy-oscrypto.*]
ignore_missing_imports = True
[mypy-pyasn1.*]
ignore_missing_imports = True
[mypy-pyasn1_modules.*]
ignore_missing_imports = True
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ asn1crypto>=1.3,<2
oscrypto>=1.1,<2
pyasn1-modules>=0.2.8
mscerts
typing_extensions
5 changes: 4 additions & 1 deletion signify/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
from typing import Any


__version__ = "0.5.2"


def _print_type(t):
def _print_type(t: Any) -> str:
if t is None:
return ""
elif isinstance(t, tuple):
Expand Down
33 changes: 22 additions & 11 deletions signify/_compat.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,45 @@
# This code is copied from Python itself to ensure we can provide cached_property
from __future__ import annotations

# This code is copied from Python itself to ensure we can provide cached_property in Python 3.7
try:
from functools import cached_property
from functools import cached_property # type: ignore[attr-defined]
except ImportError:
from threading import RLock

from typing import Generic, TypeVar, Callable, Any, overload
from typing_extensions import Self

_T = TypeVar("_T")
_NOT_FOUND = object()

class cached_property:
def __init__(self, func):
class cached_property(Generic[_T]): # type: ignore[no-redef]
def __init__(self, func: Callable[[Any], _T]):
self.func = func
self.attrname = None
self.attrname: str | None = None
self.__doc__ = func.__doc__
self.lock = RLock()

def __set_name__(self, owner, name):
def __set_name__(self, owner: type[Any], name: str) -> None:
if self.attrname is None:
self.attrname = name
elif name != self.attrname:
raise TypeError(
"Cannot assign the same cached_property to two different names "
f"({self.attrname!r} and {name!r})."
f"Cannot assign the same cached_property to two different names ({self.attrname!r} and {name!r})."
)

def __get__(self, instance, owner=None):
@overload
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self:
...

@overload
def __get__(self, instance: object, owner: type[Any] | None = None) -> _T:
...

def __get__(self, instance: object | None, owner: type[Any] | None = None) -> Self | _T:
if instance is None:
return self
if self.attrname is None:
raise TypeError(
"Cannot use cached_property instance without calling __set_name__ on it.")
raise TypeError("Cannot use cached_property instance without calling __set_name__ on it.")
try:
cache = instance.__dict__
except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
Expand Down
7 changes: 7 additions & 0 deletions signify/_typing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import hashlib
from typing import Callable, Tuple

from typing_extensions import TypeAlias

HashFunction: TypeAlias = Callable[[], "hashlib._Hash"]
OidTuple: TypeAlias = Tuple[int, ...]
35 changes: 32 additions & 3 deletions signify/asn1/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,28 @@
from __future__ import annotations

from typing import Any, TypeVar, overload

from pyasn1.type.base import Asn1Type

from . import pkcs7, spc, oids, ctl

__all__ = ['pkcs7', 'spc', 'oids', 'ctl', 'guarded_ber_decode', 'guarded_der_decode']
__all__ = ["pkcs7", "spc", "oids", "ctl", "guarded_ber_decode", "guarded_der_decode"]


_T = TypeVar("_T", bound=Asn1Type)


@overload
def guarded_ber_decode(data: Any, asn1_spec: _T) -> _T:
...


def guarded_ber_decode(data, asn1_spec=None):
@overload
def guarded_ber_decode(data: Any, asn1_spec: None = None) -> Asn1Type:
...


def guarded_ber_decode(data: Any, asn1_spec: _T | None = None) -> Asn1Type | _T:
from pyasn1.codec.ber import decoder as ber_decoder
from signify.exceptions import ParseError
from signify import _print_type
Expand All @@ -17,7 +36,17 @@ def guarded_ber_decode(data, asn1_spec=None):
return result


def guarded_der_decode(data, asn1_spec=None):
@overload
def guarded_der_decode(data: Any, asn1_spec: _T) -> _T:
...


@overload
def guarded_der_decode(data: Any, asn1_spec: None = None) -> Asn1Type:
...


def guarded_der_decode(data: Any, asn1_spec: _T | None = None) -> Asn1Type | _T:
from pyasn1.codec.der import decoder as der_decoder
from signify.exceptions import ParseError
from signify import _print_type
Expand Down
47 changes: 47 additions & 0 deletions signify/asn1/hashing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from __future__ import annotations

import hashlib
from typing import Iterable, cast

from pyasn1.type import univ
from pyasn1_modules import rfc5280

from signify import asn1, _print_type
from signify._typing import HashFunction
from signify.asn1 import guarded_ber_decode
from signify.exceptions import ParseError

# this list must be in the order of worst to best
ACCEPTED_DIGEST_ALGORITHMS = (hashlib.md5, hashlib.sha1, hashlib.sha256, hashlib.sha384, hashlib.sha512)


def _verify_empty_algorithm_parameters(algorithm: rfc5280.AlgorithmIdentifier, location: str) -> None:
if "parameters" in algorithm and algorithm["parameters"].isValue:
parameters = guarded_ber_decode(algorithm["parameters"])
if not isinstance(parameters, univ.Null):
raise ParseError("%s has parameters set, which is unexpected" % (location,))


def _get_digest_algorithm(
algorithm: rfc5280.AlgorithmIdentifier,
location: str,
acceptable: Iterable[HashFunction] = ACCEPTED_DIGEST_ALGORITHMS,
) -> HashFunction:
result = asn1.oids.get(algorithm["algorithm"], asn1.oids.OID_TO_HASH)
if isinstance(result, tuple) or result not in acceptable:
raise ParseError(
"%s must be one of %s, not %s" % (location, [x().name for x in acceptable], _print_type(result))
)

_verify_empty_algorithm_parameters(algorithm, location)

return cast(HashFunction, result)


def _get_encryption_algorithm(algorithm: univ.Sequence, location: str) -> str:
result = asn1.oids.OID_TO_PUBKEY.get(algorithm["algorithm"])
if result is None:
raise ParseError("%s: %s is not acceptable as encryption algorithm" % (location, algorithm["algorithm"]))

_verify_empty_algorithm_parameters(algorithm, location)
return result
14 changes: 9 additions & 5 deletions signify/asn1/helpers.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
from __future__ import annotations

import contextlib
import datetime
from typing import Iterator

from pyasn1_modules import rfc5652
from pyasn1.type.useful import GeneralizedTime, UTCTime
from pyasn1_modules import rfc5652, rfc3161


def time_to_python(time):
def time_to_python(time: GeneralizedTime | UTCTime) -> datetime.datetime | None:
if 'utcTime' in time:
return time['utcTime'].asDateTime
elif 'generalTime' in time:
Expand All @@ -13,7 +17,7 @@ def time_to_python(time):
return None


def accuracy_to_python(accuracy):
def accuracy_to_python(accuracy: rfc3161.Accuracy) -> datetime.timedelta:
delta = datetime.timedelta()
if 'seconds' in accuracy and accuracy['seconds'].isValue:
delta += datetime.timedelta(seconds=int(accuracy['seconds']))
Expand All @@ -24,13 +28,13 @@ def accuracy_to_python(accuracy):
return delta


def bitstring_to_bytes(s):
def bitstring_to_bytes(s: str) -> bytes:
# based on https://stackoverflow.com/questions/32675679/convert-binary-string-to-bytearray-in-python-3
return int(str(s), 2).to_bytes((len(s) + 7) // 8, byteorder='big')


@contextlib.contextmanager
def patch_rfc5652_signeddata():
def patch_rfc5652_signeddata() -> Iterator[rfc5652.SignedData]:
"""Due to a specific error in the implementation of RFC5652 by (presumably) Microsoft, there is some issue
where v2AttrCerts are incorrectly tagged as AttributeCertificateV1 in the CertificateChoices structure. See
https://github.com/ralphje/signify/issues/9#issuecomment-633510304 for more details. This function monkey-patches
Expand Down
75 changes: 49 additions & 26 deletions signify/asn1/oids.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,26 @@

"""ASN.1 OIDs mappings to parser classes or strings, where there is no class."""

from __future__ import annotations

import hashlib
from typing import Type, TypeVar, overload

from pyasn1.type.base import Asn1Type
from pyasn1_modules import rfc3161, rfc5652, rfc2315

from . import pkcs7, spc, ctl
from .._typing import HashFunction, OidTuple

OID_TO_CLASS = {
OID_TO_HASH: dict[OidTuple, HashFunction] = {
(1, 2, 840, 113549, 2, 5): hashlib.md5,
(1, 3, 14, 3, 2, 26): hashlib.sha1,
(2, 16, 840, 1, 101, 3, 4, 2, 1): hashlib.sha256,
(2, 16, 840, 1, 101, 3, 4, 2, 2): hashlib.sha384,
(2, 16, 840, 1, 101, 3, 4, 2, 3): hashlib.sha512,
}

OID_TO_CLASS: dict[OidTuple, Type[Asn1Type]] = {
(1, 2, 840, 113549, 1, 7, 1): pkcs7.Data,
(1, 2, 840, 113549, 1, 7, 2): rfc2315.SignedData,
(1, 2, 840, 113549, 1, 9, 3): rfc2315.ContentType,
Expand All @@ -59,37 +66,53 @@
(1, 3, 6, 1, 4, 1, 311, 10, 11, 127): ctl.NotBeforeEnhkeyUsage,
}

OID_TO_PUBKEY = {
(1, 2, 840, 113549, 1, 1, 1): 'rsa',
(1, 2, 840, 113549, 1, 1, 5): 'rsa-sha1',
(1, 2, 840, 113549, 1, 1, 11): 'rsa-sha256',
(1, 2, 840, 113549, 1, 1, 12): 'rsa-sha384',
(1, 2, 840, 113549, 1, 1, 13): 'rsa-sha512',
(1, 2, 840, 10040, 4, 1): 'dsa',
(1, 2, 840, 10040, 4, 3): 'dsa-sha1',
(1, 2, 840, 10045, 2, 1): 'ecc',
(1, 2, 840, 10045, 4, 1): 'ecdsa-sha1',
(1, 2, 840, 10045, 4, 3, 1): 'ecdsa-sha224',
(1, 2, 840, 10045, 4, 3, 2): 'ecdsa-sha256',
(1, 2, 840, 10045, 4, 3, 3): 'ecdsa-sha384',
(1, 2, 840, 10045, 4, 3, 4): 'ecdsa-sha512',
OID_TO_PUBKEY: dict[OidTuple, str] = {
(1, 2, 840, 113549, 1, 1, 1): "rsa",
(1, 2, 840, 113549, 1, 1, 5): "rsa-sha1",
(1, 2, 840, 113549, 1, 1, 11): "rsa-sha256",
(1, 2, 840, 113549, 1, 1, 12): "rsa-sha384",
(1, 2, 840, 113549, 1, 1, 13): "rsa-sha512",
(1, 2, 840, 10040, 4, 1): "dsa",
(1, 2, 840, 10040, 4, 3): "dsa-sha1",
(1, 2, 840, 10045, 2, 1): "ecc",
(1, 2, 840, 10045, 4, 1): "ecdsa-sha1",
(1, 2, 840, 10045, 4, 3, 1): "ecdsa-sha224",
(1, 2, 840, 10045, 4, 3, 2): "ecdsa-sha256",
(1, 2, 840, 10045, 4, 3, 3): "ecdsa-sha384",
(1, 2, 840, 10045, 4, 3, 4): "ecdsa-sha512",
}

OID_TO_RDN = {
(2, 5, 4, 3): 'CN', # common name
(2, 5, 4, 6): 'C', # country
(2, 5, 4, 7): 'L', # locality
(2, 5, 4, 8): 'ST', # stateOrProvince
(2, 5, 4, 9): 'STREET', # street
(2, 5, 4, 10): 'O', # organization
(2, 5, 4, 11): 'OU', # organizationalUnit
(0, 9, 2342, 19200300, 100, 1, 25): 'DC', # domainComponent
(1, 2, 840, 113549, 1, 9, 1): 'EMAIL', # emailaddress
OID_TO_RDN: dict[OidTuple, str] = {
(2, 5, 4, 3): "CN", # common name
(2, 5, 4, 6): "C", # country
(2, 5, 4, 7): "L", # locality
(2, 5, 4, 8): "ST", # stateOrProvince
(2, 5, 4, 9): "STREET", # street
(2, 5, 4, 10): "O", # organization
(2, 5, 4, 11): "OU", # organizationalUnit
(0, 9, 2342, 19200300, 100, 1, 25): "DC", # domainComponent
(1, 2, 840, 113549, 1, 9, 1): "EMAIL", # emailaddress
}

EKU_CODE_SIGNING = (1, 3, 6, 1, 5, 5, 7, 3, 3) # codeSigning
EKU_TIME_STAMPING = (1, 3, 6, 1, 5, 5, 7, 3, 8) # timeStamping


def get(key, oids=OID_TO_CLASS):
_V = TypeVar("_V")


@overload
def get(key: OidTuple, oids: None = None) -> Type[Asn1Type] | OidTuple:
...


@overload
def get(key: OidTuple, oids: dict[OidTuple, _V]) -> _V | OidTuple:
...


def get(key: OidTuple, oids: dict[OidTuple, _V] | None = None) -> _V | OidTuple:
if oids is None:
oids = OID_TO_CLASS # type: ignore[assignment]
assert oids is not None
return oids.get(key, tuple(key))
2 changes: 1 addition & 1 deletion signify/asn1/preserving_der.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
class SetOfEncoder(cer_encoder.SetOfEncoder):
"""This class is identical to the one of the CER encoder, except that the sorting has been removed. """

def encodeValue(self, value, asn1Spec, encodeFun, **options):
def encodeValue(self, value, asn1Spec, encodeFun, **options): # type: ignore[no-untyped-def]
chunks = self._encodeComponents(
value, asn1Spec, encodeFun, **options)

Expand Down
9 changes: 7 additions & 2 deletions signify/asn1/spc.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

"""Authenticode-specific ASN.1 data structures."""

from pyasn1.type import char
Expand Down Expand Up @@ -60,11 +62,12 @@ class SpcString(univ.Choice):
))
)

def to_python(self):
def to_python(self) -> str | None:
if 'unicode' in self:
return str(self['unicode'])
elif 'ascii' in self:
return str(self['ascii'])
return None


class SpcLink(univ.Choice):
Expand All @@ -81,13 +84,15 @@ class SpcLink(univ.Choice):
))
)

def to_python(self):
def to_python(self) -> str | None:
if 'url' in self:
return str(self['url'])
elif 'moniker' in self:
return None # TODO
elif 'file' in self:
return self['file'].to_python()
else:
return None


class SpcSpOpusInfo(univ.Sequence):
Expand Down
Loading

0 comments on commit 59b8b00

Please sign in to comment.