Forking what is left of ZeroNet and hopefully adding an AT Proto Frontend/Proxy
1import io
2import os
3
4import msgpack
5import pytest
6
7from Config import config
8from util import Msgpack
9from collections import OrderedDict
10
11
12class TestMsgpack:
13 test_data = OrderedDict(
14 sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items())
15 )
16
17 def testPacking(self):
18 assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
19 assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
20
21 def testUnpackinkg(self):
22 assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data
23
24 @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker])
25 def testUnpacker(self, unpacker_class):
26 unpacker = unpacker_class(raw=False)
27
28 data = msgpack.packb(self.test_data, use_bin_type=True)
29 data += msgpack.packb(self.test_data, use_bin_type=True)
30
31 messages = []
32 for char in data:
33 unpacker.feed(bytes([char]))
34 for message in unpacker:
35 messages.append(message)
36
37 assert len(messages) == 2
38 assert messages[0] == self.test_data
39 assert messages[0] == messages[1]
40
41 def testStreaming(self):
42 bin_data = os.urandom(20)
43 f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb")
44 f.read_bytes = 30
45
46 data = {"cmd": "response", "body": f, "bin": bin_data}
47
48 out_buff = io.BytesIO()
49 Msgpack.stream(data, out_buff.write)
50 out_buff.seek(0)
51
52 data_packb = {
53 "cmd": "response",
54 "body": open("%s/users.json" % config.data_dir, "rb").read(30),
55 "bin": bin_data
56 }
57
58 out_buff.seek(0)
59 data_unpacked = Msgpack.unpack(out_buff.read())
60 assert data_unpacked == data_packb
61 assert data_unpacked["cmd"] == "response"
62 assert type(data_unpacked["body"]) == bytes
63
64 def testBackwardCompatibility(self):
65 packed = {}
66 packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False)
67 packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True)
68 for key, val in packed.items():
69 unpacked = Msgpack.unpack(val)
70 type(unpacked["utf8"]) == str
71 type(unpacked["bin"]) == bytes
72
73 # Packed with use_bin_type=False (pre-ZeroNet 0.7.0)
74 unpacked = Msgpack.unpack(packed["py3"], decode=True)
75 type(unpacked["utf8"]) == str
76 type(unpacked["bin"]) == bytes
77 assert len(unpacked["utf8"]) == 9
78 assert len(unpacked["bin"]) == 10
79 with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8
80 unpacked = Msgpack.unpack(packed["py3"], decode=False)
81
82 # Packed with use_bin_type=True
83 unpacked = Msgpack.unpack(packed["py3_bin"], decode=False)
84 type(unpacked["utf8"]) == str
85 type(unpacked["bin"]) == bytes
86 assert len(unpacked["utf8"]) == 9
87 assert len(unpacked["bin"]) == 10
88