Forking what is left of ZeroNet and hopefully adding an AT Proto Frontend/Proxy
at main 562 lines 29 kB view raw
1import time 2 3import pytest 4import mock 5import gevent 6import gevent.event 7import os 8 9from Connection import ConnectionServer 10from Config import config 11from File import FileRequest 12from File import FileServer 13from Site.Site import Site 14from . import Spy 15 16 17@pytest.mark.usefixtures("resetTempSettings") 18@pytest.mark.usefixtures("resetSettings") 19class TestSiteDownload: 20 def testRename(self, file_server, site, site_temp): 21 assert site.storage.directory == config.data_dir + "/" + site.address 22 assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address 23 24 # Init source server 25 site.connection_server = file_server 26 file_server.sites[site.address] = site 27 28 # Init client server 29 client = FileServer(file_server.ip, 1545) 30 client.sites = {site_temp.address: site_temp} 31 site_temp.connection_server = client 32 site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net 33 34 35 site_temp.addPeer(file_server.ip, 1544) 36 37 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 38 39 assert site_temp.storage.isFile("content.json") 40 41 # Rename non-optional file 42 os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png")) 43 44 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 45 46 content = site.storage.loadJson("content.json") 47 assert "data/img/domain-new.png" in content["files"] 48 assert "data/img/domain.png" not in content["files"] 49 assert not site_temp.storage.isFile("data/img/domain-new.png") 50 assert site_temp.storage.isFile("data/img/domain.png") 51 settings_before = site_temp.settings 52 53 with Spy.Spy(FileRequest, "route") as requests: 54 site.publish() 55 time.sleep(0.1) 56 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download 57 assert "streamFile" not in [req[1] for req in requests] 58 59 content = site_temp.storage.loadJson("content.json") 60 assert "data/img/domain-new.png" in content["files"] 61 assert "data/img/domain.png" not in content["files"] 62 assert site_temp.storage.isFile("data/img/domain-new.png") 63 assert not site_temp.storage.isFile("data/img/domain.png") 64 65 assert site_temp.settings["size"] == settings_before["size"] 66 assert site_temp.settings["size_optional"] == settings_before["size_optional"] 67 68 assert site_temp.storage.deleteFiles() 69 [connection.close() for connection in file_server.connections] 70 71 def testRenameOptional(self, file_server, site, site_temp): 72 assert site.storage.directory == config.data_dir + "/" + site.address 73 assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address 74 75 # Init source server 76 site.connection_server = file_server 77 file_server.sites[site.address] = site 78 79 # Init client server 80 client = FileServer(file_server.ip, 1545) 81 client.sites = {site_temp.address: site_temp} 82 site_temp.connection_server = client 83 site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net 84 85 86 site_temp.addPeer(file_server.ip, 1544) 87 88 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 89 90 assert site_temp.settings["optional_downloaded"] == 0 91 92 site_temp.needFile("data/optional.txt") 93 94 assert site_temp.settings["optional_downloaded"] > 0 95 settings_before = site_temp.settings 96 hashfield_before = site_temp.content_manager.hashfield.tobytes() 97 98 # Rename optional file 99 os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt")) 100 101 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True) 102 103 content = site.storage.loadJson("content.json") 104 assert "data/optional-new.txt" in content["files_optional"] 105 assert "data/optional.txt" not in content["files_optional"] 106 assert not site_temp.storage.isFile("data/optional-new.txt") 107 assert site_temp.storage.isFile("data/optional.txt") 108 109 with Spy.Spy(FileRequest, "route") as requests: 110 site.publish() 111 time.sleep(0.1) 112 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download 113 assert "streamFile" not in [req[1] for req in requests] 114 115 content = site_temp.storage.loadJson("content.json") 116 assert "data/optional-new.txt" in content["files_optional"] 117 assert "data/optional.txt" not in content["files_optional"] 118 assert site_temp.storage.isFile("data/optional-new.txt") 119 assert not site_temp.storage.isFile("data/optional.txt") 120 121 assert site_temp.settings["size"] == settings_before["size"] 122 assert site_temp.settings["size_optional"] == settings_before["size_optional"] 123 assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"] 124 assert site_temp.content_manager.hashfield.tobytes() == hashfield_before 125 126 assert site_temp.storage.deleteFiles() 127 [connection.close() for connection in file_server.connections] 128 129 130 def testArchivedDownload(self, file_server, site, site_temp): 131 # Init source server 132 site.connection_server = file_server 133 file_server.sites[site.address] = site 134 135 # Init client server 136 client = FileServer(file_server.ip, 1545) 137 client.sites = {site_temp.address: site_temp} 138 site_temp.connection_server = client 139 140 # Download normally 141 site_temp.addPeer(file_server.ip, 1544) 142 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 143 bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] 144 145 assert not bad_files 146 assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents 147 assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") 148 assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 149 150 # Add archived data 151 assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"] 152 assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) 153 154 site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()} 155 site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 156 157 date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] 158 assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) 159 assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) 160 assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later 161 162 # Push archived update 163 assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] 164 site.publish() 165 time.sleep(0.1) 166 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download 167 168 # The archived content should disappear from remote client 169 assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] 170 assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents 171 assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") 172 assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 173 assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 174 175 assert site_temp.storage.deleteFiles() 176 [connection.close() for connection in file_server.connections] 177 178 def testArchivedBeforeDownload(self, file_server, site, site_temp): 179 # Init source server 180 site.connection_server = file_server 181 file_server.sites[site.address] = site 182 183 # Init client server 184 client = FileServer(file_server.ip, 1545) 185 client.sites = {site_temp.address: site_temp} 186 site_temp.connection_server = client 187 188 # Download normally 189 site_temp.addPeer(file_server.ip, 1544) 190 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 191 bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] 192 193 assert not bad_files 194 assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents 195 assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") 196 assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 197 198 # Add archived data 199 assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"] 200 assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) 201 202 content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"] 203 site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time 204 site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 205 206 date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] 207 assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) 208 assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) 209 assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later 210 211 # Push archived update 212 assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] 213 site.publish() 214 time.sleep(0.1) 215 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download 216 217 # The archived content should disappear from remote client 218 assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] 219 assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents 220 assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") 221 assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 222 assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 223 224 assert site_temp.storage.deleteFiles() 225 [connection.close() for connection in file_server.connections] 226 227 228 # Test when connected peer has the optional file 229 def testOptionalDownload(self, file_server, site, site_temp): 230 # Init source server 231 site.connection_server = file_server 232 file_server.sites[site.address] = site 233 234 # Init client server 235 client = ConnectionServer(file_server.ip, 1545) 236 site_temp.connection_server = client 237 site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net 238 239 site_temp.addPeer(file_server.ip, 1544) 240 241 # Download site 242 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 243 244 # Download optional data/optional.txt 245 site.storage.verifyFiles(quick_check=True) # Find what optional files we have 246 optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") 247 assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) 248 assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"]) 249 250 assert not site_temp.storage.isFile("data/optional.txt") 251 assert site.storage.isFile("data/optional.txt") 252 site_temp.needFile("data/optional.txt") 253 assert site_temp.storage.isFile("data/optional.txt") 254 255 # Optional user file 256 assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 257 optional_file_info = site_temp.content_manager.getFileInfo( 258 "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" 259 ) 260 assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) 261 assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"]) 262 263 site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 264 assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 265 assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"]) 266 267 assert site_temp.storage.deleteFiles() 268 [connection.close() for connection in file_server.connections] 269 270 # Test when connected peer does not has the file, so ask him if he know someone who has it 271 def testFindOptional(self, file_server, site, site_temp): 272 # Init source server 273 site.connection_server = file_server 274 file_server.sites[site.address] = site 275 276 # Init full source server (has optional files) 277 site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") 278 file_server_full = FileServer(file_server.ip, 1546) 279 site_full.connection_server = file_server_full 280 281 def listen(): 282 ConnectionServer.start(file_server_full) 283 ConnectionServer.listen(file_server_full) 284 285 gevent.spawn(listen) 286 time.sleep(0.001) # Port opening 287 file_server_full.sites[site_full.address] = site_full # Add site 288 site_full.storage.verifyFiles(quick_check=True) # Check optional files 289 site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server 290 hashfield = site_full_peer.updateHashfield() # Update hashfield 291 assert len(site_full.content_manager.hashfield) == 8 292 assert hashfield 293 assert site_full.storage.isFile("data/optional.txt") 294 assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 295 assert len(site_full_peer.hashfield) == 8 296 297 # Remove hashes from source server 298 for hash in list(site.content_manager.hashfield): 299 site.content_manager.hashfield.remove(hash) 300 301 # Init client server 302 site_temp.connection_server = ConnectionServer(file_server.ip, 1545) 303 site_temp.addPeer(file_server.ip, 1544) # Add source server 304 305 # Download normal files 306 site_temp.log.info("Start Downloading site") 307 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 308 309 # Download optional data/optional.txt 310 optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") 311 optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 312 assert not site_temp.storage.isFile("data/optional.txt") 313 assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 314 assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file 315 assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file 316 assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file 317 assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file 318 assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file 319 assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file 320 321 site_temp.log.info("Request optional files") 322 with Spy.Spy(FileRequest, "route") as requests: 323 # Request 2 file same time 324 threads = [] 325 threads.append(site_temp.needFile("data/optional.txt", blocking=False)) 326 threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) 327 gevent.joinall(threads) 328 329 assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once 330 331 assert site_temp.storage.isFile("data/optional.txt") 332 assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") 333 334 assert site_temp.storage.deleteFiles() 335 file_server_full.stop() 336 [connection.close() for connection in file_server.connections] 337 site_full.content_manager.contents.db.close("FindOptional test end") 338 339 def testUpdate(self, file_server, site, site_temp): 340 assert site.storage.directory == config.data_dir + "/" + site.address 341 assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address 342 343 # Init source server 344 site.connection_server = file_server 345 file_server.sites[site.address] = site 346 347 # Init client server 348 client = FileServer(file_server.ip, 1545) 349 client.sites = {site_temp.address: site_temp} 350 site_temp.connection_server = client 351 352 # Don't try to find peers from the net 353 site.announce = mock.MagicMock(return_value=True) 354 site_temp.announce = mock.MagicMock(return_value=True) 355 356 # Connect peers 357 site_temp.addPeer(file_server.ip, 1544) 358 359 # Download site from site to site_temp 360 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 361 assert len(site_temp.bad_files) == 1 362 363 # Update file 364 data_original = site.storage.open("data/data.json").read() 365 data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"') 366 assert data_original != data_new 367 368 site.storage.open("data/data.json", "wb").write(data_new) 369 370 assert site.storage.open("data/data.json").read() == data_new 371 assert site_temp.storage.open("data/data.json").read() == data_original 372 373 site.log.info("Publish new data.json without patch") 374 # Publish without patch 375 with Spy.Spy(FileRequest, "route") as requests: 376 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 377 site.publish() 378 time.sleep(0.1) 379 site.log.info("Downloading site") 380 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 381 assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1 382 383 assert site_temp.storage.open("data/data.json").read() == data_new 384 385 # Close connection to avoid update spam limit 386 list(site.peers.values())[0].remove() 387 site.addPeer(file_server.ip, 1545) 388 list(site_temp.peers.values())[0].ping() # Connect back 389 time.sleep(0.1) 390 391 # Update with patch 392 data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') 393 assert data_original != data_new 394 395 site.storage.open("data/data.json-new", "wb").write(data_new) 396 397 assert site.storage.open("data/data.json-new").read() == data_new 398 assert site_temp.storage.open("data/data.json").read() != data_new 399 400 # Generate diff 401 diffs = site.content_manager.getDiffs("content.json") 402 assert not site.storage.isFile("data/data.json-new") # New data file removed 403 assert site.storage.open("data/data.json").read() == data_new # -new postfix removed 404 assert "data/data.json" in diffs 405 assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] 406 407 # Publish with patch 408 site.log.info("Publish new data.json with patch") 409 with Spy.Spy(FileRequest, "route") as requests: 410 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 411 412 event_done = gevent.event.AsyncResult() 413 site.publish(diffs=diffs) 414 time.sleep(0.1) 415 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 416 assert [request for request in requests if request[1] in ("getFile", "streamFile")] == [] 417 418 assert site_temp.storage.open("data/data.json").read() == data_new 419 420 assert site_temp.storage.deleteFiles() 421 [connection.close() for connection in file_server.connections] 422 423 def testBigUpdate(self, file_server, site, site_temp): 424 # Init source server 425 site.connection_server = file_server 426 file_server.sites[site.address] = site 427 428 # Init client server 429 client = FileServer(file_server.ip, 1545) 430 client.sites = {site_temp.address: site_temp} 431 site_temp.connection_server = client 432 433 # Connect peers 434 site_temp.addPeer(file_server.ip, 1544) 435 436 # Download site from site to site_temp 437 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 438 assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] 439 440 # Update file 441 data_original = site.storage.open("data/data.json").read() 442 data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') 443 assert data_original != data_new 444 445 site.storage.open("data/data.json-new", "wb").write(data_new) 446 447 assert site.storage.open("data/data.json-new").read() == data_new 448 assert site_temp.storage.open("data/data.json").read() != data_new 449 450 # Generate diff 451 diffs = site.content_manager.getDiffs("content.json") 452 assert not site.storage.isFile("data/data.json-new") # New data file removed 453 assert site.storage.open("data/data.json").read() == data_new # -new postfix removed 454 assert "data/data.json" in diffs 455 456 content_json = site.storage.loadJson("content.json") 457 content_json["description"] = "BigZeroBlog" * 1024 * 10 458 site.storage.writeJson("content.json", content_json) 459 site.content_manager.loadContent("content.json", force=True) 460 461 # Publish with patch 462 site.log.info("Publish new data.json with patch") 463 with Spy.Spy(FileRequest, "route") as requests: 464 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 465 assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json 466 site.publish(diffs=diffs) 467 time.sleep(0.1) 468 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 469 file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")] 470 assert len(file_requests) == 1 471 472 assert site_temp.storage.open("data/data.json").read() == data_new 473 assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() 474 475 # Test what happened if the content.json of the site is bigger than the site limit 476 def testHugeContentSiteUpdate(self, file_server, site, site_temp): 477 # Init source server 478 site.connection_server = file_server 479 file_server.sites[site.address] = site 480 481 # Init client server 482 client = FileServer(file_server.ip, 1545) 483 client.sites = {site_temp.address: site_temp} 484 site_temp.connection_server = client 485 486 # Connect peers 487 site_temp.addPeer(file_server.ip, 1544) 488 489 # Download site from site to site_temp 490 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 491 site_temp.settings["size_limit"] = int(20 * 1024 *1024) 492 site_temp.saveSettings() 493 494 # Raise limit size to 20MB on site so it can be signed 495 site.settings["size_limit"] = int(20 * 1024 *1024) 496 site.saveSettings() 497 498 content_json = site.storage.loadJson("content.json") 499 content_json["description"] = "PartirUnJour" * 1024 * 1024 500 site.storage.writeJson("content.json", content_json) 501 changed, deleted = site.content_manager.loadContent("content.json", force=True) 502 503 # Make sure we have 2 differents content.json 504 assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read() 505 506 # Generate diff 507 diffs = site.content_manager.getDiffs("content.json") 508 509 # Publish with patch 510 site.log.info("Publish new content.json bigger than 10MB") 511 with Spy.Spy(FileRequest, "route") as requests: 512 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 513 assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB 514 time.sleep(0.1) 515 site.publish(diffs=diffs) 516 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 517 518 assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024 519 assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() 520 521 def testUnicodeFilename(self, file_server, site, site_temp): 522 assert site.storage.directory == config.data_dir + "/" + site.address 523 assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address 524 525 # Init source server 526 site.connection_server = file_server 527 file_server.sites[site.address] = site 528 529 # Init client server 530 client = FileServer(file_server.ip, 1545) 531 client.sites = {site_temp.address: site_temp} 532 site_temp.connection_server = client 533 site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net 534 535 site_temp.addPeer(file_server.ip, 1544) 536 537 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) 538 539 site.storage.write("data/img/árvíztűrő.png", b"test") 540 541 site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") 542 543 content = site.storage.loadJson("content.json") 544 assert "data/img/árvíztűrő.png" in content["files"] 545 assert not site_temp.storage.isFile("data/img/árvíztűrő.png") 546 settings_before = site_temp.settings 547 548 with Spy.Spy(FileRequest, "route") as requests: 549 site.publish() 550 time.sleep(0.1) 551 assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download 552 assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1 553 554 content = site_temp.storage.loadJson("content.json") 555 assert "data/img/árvíztűrő.png" in content["files"] 556 assert site_temp.storage.isFile("data/img/árvíztűrő.png") 557 558 assert site_temp.settings["size"] == settings_before["size"] 559 assert site_temp.settings["size_optional"] == settings_before["size_optional"] 560 561 assert site_temp.storage.deleteFiles() 562 [connection.close() for connection in file_server.connections]