Forking what is left of ZeroNet and hopefully adding an AT Proto Frontend/Proxy
at main 183 lines 12 kB view raw
1import os 2import io 3from collections import OrderedDict 4 5from Plugin import PluginManager 6from Config import config 7from util import Msgpack 8 9 10@PluginManager.registerTo("Actions") 11class ActionsPlugin: 12 def createZipFile(self, path): 13 import zipfile 14 test_data = b"Test" * 1024 15 file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8") 16 with zipfile.ZipFile(path, 'w') as archive: 17 for y in range(100): 18 zip_info = zipfile.ZipInfo(file_name % y, (1980, 1, 1, 0, 0, 0)) 19 zip_info.compress_type = zipfile.ZIP_DEFLATED 20 zip_info.create_system = 3 21 zip_info.flag_bits = 0 22 zip_info.external_attr = 25165824 23 archive.writestr(zip_info, test_data) 24 25 def testPackZip(self, num_run=1): 26 """ 27 Test zip file creating 28 """ 29 yield "x 100 x 5KB " 30 from Crypt import CryptHash 31 zip_path = '%s/test.zip' % config.data_dir 32 for i in range(num_run): 33 self.createZipFile(zip_path) 34 yield "." 35 36 archive_size = os.path.getsize(zip_path) / 1024 37 yield "(Generated file size: %.2fkB)" % archive_size 38 39 hash = CryptHash.sha512sum(open(zip_path, "rb")) 40 valid = "cb32fb43783a1c06a2170a6bc5bb228a032b67ff7a1fd7a5efb9b467b400f553" 41 assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid) 42 os.unlink(zip_path) 43 44 def testUnpackZip(self, num_run=1): 45 """ 46 Test zip file reading 47 """ 48 yield "x 100 x 5KB " 49 import zipfile 50 zip_path = '%s/test.zip' % config.data_dir 51 test_data = b"Test" * 1024 52 file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91".decode("utf8") 53 54 self.createZipFile(zip_path) 55 for i in range(num_run): 56 with zipfile.ZipFile(zip_path) as archive: 57 for f in archive.filelist: 58 assert f.filename.startswith(file_name), "Invalid filename: %s != %s" % (f.filename, file_name) 59 data = archive.open(f.filename).read() 60 assert archive.open(f.filename).read() == test_data, "Invalid data: %s..." % data[0:30] 61 yield "." 62 63 os.unlink(zip_path) 64 65 def createArchiveFile(self, path, archive_type="gz"): 66 import tarfile 67 import gzip 68 69 # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime 70 def nodate_write_gzip_header(self): 71 self._write_mtime = 0 72 original_write_gzip_header(self) 73 74 test_data_io = io.BytesIO(b"Test" * 1024) 75 file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8") 76 77 original_write_gzip_header = gzip.GzipFile._write_gzip_header 78 gzip.GzipFile._write_gzip_header = nodate_write_gzip_header 79 with tarfile.open(path, 'w:%s' % archive_type) as archive: 80 for y in range(100): 81 test_data_io.seek(0) 82 tar_info = tarfile.TarInfo(file_name % y) 83 tar_info.size = 4 * 1024 84 archive.addfile(tar_info, test_data_io) 85 86 def testPackArchive(self, num_run=1, archive_type="gz"): 87 """ 88 Test creating tar archive files 89 """ 90 yield "x 100 x 5KB " 91 from Crypt import CryptHash 92 93 hash_valid_db = { 94 "gz": "92caec5121a31709cbbc8c11b0939758e670b055bbbe84f9beb3e781dfde710f", 95 "bz2": "b613f41e6ee947c8b9b589d3e8fa66f3e28f63be23f4faf015e2f01b5c0b032d", 96 "xz": "ae43892581d770959c8d993daffab25fd74490b7cf9fafc7aaee746f69895bcb", 97 } 98 archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type) 99 for i in range(num_run): 100 self.createArchiveFile(archive_path, archive_type=archive_type) 101 yield "." 102 103 archive_size = os.path.getsize(archive_path) / 1024 104 yield "(Generated file size: %.2fkB)" % archive_size 105 106 hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, archive_type), "rb")) 107 valid = hash_valid_db[archive_type] 108 assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid) 109 110 if os.path.isfile(archive_path): 111 os.unlink(archive_path) 112 113 def testUnpackArchive(self, num_run=1, archive_type="gz"): 114 """ 115 Test reading tar archive files 116 """ 117 yield "x 100 x 5KB " 118 import tarfile 119 120 test_data = b"Test" * 1024 121 file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb1r\xc5\x91%s.txt".decode("utf8") 122 archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type) 123 self.createArchiveFile(archive_path, archive_type=archive_type) 124 for i in range(num_run): 125 with tarfile.open(archive_path, 'r:%s' % archive_type) as archive: 126 for y in range(100): 127 assert archive.extractfile(file_name % y).read() == test_data 128 yield "." 129 if os.path.isfile(archive_path): 130 os.unlink(archive_path) 131 132 def testPackMsgpack(self, num_run=1): 133 """ 134 Test msgpack encoding 135 """ 136 yield "x 100 x 5KB " 137 binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' 138 data = OrderedDict( 139 sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items()) 140 ) 141 data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00' 142 data_packed_valid += b'hello' * 1024 143 for y in range(num_run): 144 for i in range(100): 145 data_packed = Msgpack.pack(data) 146 yield "." 147 assert data_packed == data_packed_valid, "%s<br>!=<br>%s" % (repr(data_packed), repr(data_packed_valid)) 148 149 def testUnpackMsgpack(self, num_run=1): 150 """ 151 Test msgpack decoding 152 """ 153 yield "x 5KB " 154 binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' 155 data = OrderedDict( 156 sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items()) 157 ) 158 data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00' 159 data_packed += b'hello' * 1024 160 for y in range(num_run): 161 data_unpacked = Msgpack.unpack(data_packed, decode=False) 162 yield "." 163 assert data_unpacked == data, "%s<br>!=<br>%s" % (data_unpacked, data) 164 165 def testUnpackMsgpackStreaming(self, num_run=1, fallback=False): 166 """ 167 Test streaming msgpack decoding 168 """ 169 yield "x 1000 x 5KB " 170 binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' 171 data = OrderedDict( 172 sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items()) 173 ) 174 data_packed = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00' 175 data_packed += b'hello' * 1024 176 for i in range(num_run): 177 unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback) 178 for y in range(1000): 179 unpacker.feed(data_packed) 180 for data_unpacked in unpacker: 181 pass 182 yield "." 183 assert data == data_unpacked, "%s != %s" % (data_unpacked, data)