Skip to content

Commit

Permalink
flake8
Browse files Browse the repository at this point in the history
  • Loading branch information
klausfmh committed Nov 30, 2018
1 parent f98ed5a commit 1309368
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 7 deletions.
1 change: 0 additions & 1 deletion pypeman/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
DEFAULT_ENCODER_CLS = B64PickleEncoder



class Message():
"""
A message is the unity of informations exchanged between nodes of a
Expand Down
2 changes: 1 addition & 1 deletion pypeman/msgstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ async def get(self, id):
# TODO: we might implement an async version for huge files
# - use either https://github.com/Tinche/aiofiles
# - read chunks + add sleep(0) (not good for blocking network file systems)

msg = Message.from_json(f.read().decode('utf-8'))
return {'id': id, 'state': await self.get_message_state(id), 'message': msg}

Expand Down
9 changes: 4 additions & 5 deletions pypeman/tests/test_hlp_serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class TestSerializer:
"utf_string": "h\xe9llo",
"utf_bytes": b"h\xe9llo",
"allbytes": all_bytes,
}
}

# JsonableEncoder and B64PickleEncoder should be able to handle this
structvals_1 = {
Expand All @@ -49,10 +49,10 @@ class TestSerializer:
"list_with_asciibytes": [0, "1", b"3"],
"list_with_utfbytes": [0, "1", b"3'\xe9"],
"list_with_anybytes": [0, "1", all_bytes],
"dict": {"key1" : 1,
"dict": {"key1": 1,
2: "val",
"ke\xe0": 1.3,
},
},
"object": SampleObject(),
}

Expand All @@ -74,9 +74,8 @@ def test_enc_dec_match_B64Pickle(self):
codec = codec_cls()
for name, val in sorted(cls.structvals_1.items()):
self.check_encode_decode(codec, name, val)

for codec_cls in [B64PickleEncoder]:
codec = codec_cls()
for name, val in sorted(cls.structvals_2.items()):
self.check_encode_decode(codec, name, val)

0 comments on commit 1309368

Please sign in to comment.