Forking what is left of ZeroNet and hopefully adding an AT Proto Frontend/Proxy
at main 164 lines 6.4 kB view raw
1import os 2import json 3import logging 4import collections 5import time 6import hashlib 7 8from Debug import Debug 9from Plugin import PluginManager 10from Config import config 11from util import helper 12 13 14class ContentFilterStorage(object): 15 def __init__(self, site_manager): 16 self.log = logging.getLogger("ContentFilterStorage") 17 self.file_path = "%s/filters.json" % config.data_dir 18 self.site_manager = site_manager 19 self.file_content = self.load() 20 21 # Set default values for filters.json 22 if not self.file_content: 23 self.file_content = {} 24 25 # Site blacklist renamed to site blocks 26 if "site_blacklist" in self.file_content: 27 self.file_content["siteblocks"] = self.file_content["site_blacklist"] 28 del self.file_content["site_blacklist"] 29 30 for key in ["mutes", "siteblocks", "includes"]: 31 if key not in self.file_content: 32 self.file_content[key] = {} 33 34 self.include_filters = collections.defaultdict(set) # Merged list of mutes and blacklists from all include 35 self.includeUpdateAll(update_site_dbs=False) 36 37 def load(self): 38 # Rename previously used mutes.json -> filters.json 39 if os.path.isfile("%s/mutes.json" % config.data_dir): 40 self.log.info("Renaming mutes.json to filters.json...") 41 os.rename("%s/mutes.json" % config.data_dir, self.file_path) 42 if os.path.isfile(self.file_path): 43 try: 44 return json.load(open(self.file_path)) 45 except Exception as err: 46 self.log.error("Error loading filters.json: %s" % err) 47 return None 48 else: 49 return None 50 51 def includeUpdateAll(self, update_site_dbs=True): 52 s = time.time() 53 new_include_filters = collections.defaultdict(set) 54 55 # Load all include files data into a merged set 56 for include_path in self.file_content["includes"]: 57 address, inner_path = include_path.split("/", 1) 58 try: 59 content = self.site_manager.get(address).storage.loadJson(inner_path) 60 except Exception as err: 61 self.log.warning( 62 "Error loading include %s: %s" % 63 (include_path, Debug.formatException(err)) 64 ) 65 continue 66 67 for key, val in content.items(): 68 if type(val) is not dict: 69 continue 70 71 new_include_filters[key].update(val.keys()) 72 73 mutes_added = new_include_filters["mutes"].difference(self.include_filters["mutes"]) 74 mutes_removed = self.include_filters["mutes"].difference(new_include_filters["mutes"]) 75 76 self.include_filters = new_include_filters 77 78 if update_site_dbs: 79 for auth_address in mutes_added: 80 self.changeDbs(auth_address, "remove") 81 82 for auth_address in mutes_removed: 83 if not self.isMuted(auth_address): 84 self.changeDbs(auth_address, "load") 85 86 num_mutes = len(self.include_filters["mutes"]) 87 num_siteblocks = len(self.include_filters["siteblocks"]) 88 self.log.debug( 89 "Loaded %s mutes, %s blocked sites from %s includes in %.3fs" % 90 (num_mutes, num_siteblocks, len(self.file_content["includes"]), time.time() - s) 91 ) 92 93 def includeAdd(self, address, inner_path, description=None): 94 self.file_content["includes"]["%s/%s" % (address, inner_path)] = { 95 "date_added": time.time(), 96 "address": address, 97 "description": description, 98 "inner_path": inner_path 99 } 100 self.includeUpdateAll() 101 self.save() 102 103 def includeRemove(self, address, inner_path): 104 del self.file_content["includes"]["%s/%s" % (address, inner_path)] 105 self.includeUpdateAll() 106 self.save() 107 108 def save(self): 109 s = time.time() 110 helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8")) 111 self.log.debug("Saved in %.3fs" % (time.time() - s)) 112 113 def isMuted(self, auth_address): 114 if auth_address in self.file_content["mutes"] or auth_address in self.include_filters["mutes"]: 115 return True 116 else: 117 return False 118 119 def getSiteAddressHashed(self, address): 120 return "0x" + hashlib.sha256(address.encode("ascii")).hexdigest() 121 122 def isSiteblocked(self, address): 123 if address in self.file_content["siteblocks"] or address in self.include_filters["siteblocks"]: 124 return True 125 return False 126 127 def getSiteblockDetails(self, address): 128 details = self.file_content["siteblocks"].get(address) 129 if not details: 130 address_sha256 = self.getSiteAddressHashed(address) 131 details = self.file_content["siteblocks"].get(address_sha256) 132 133 if not details: 134 includes = self.file_content.get("includes", {}).values() 135 for include in includes: 136 include_site = self.site_manager.get(include["address"]) 137 if not include_site: 138 continue 139 content = include_site.storage.loadJson(include["inner_path"]) 140 details = content.get("siteblocks", {}).get(address) 141 if details: 142 details["include"] = include 143 break 144 145 return details 146 147 # Search and remove or readd files of an user 148 def changeDbs(self, auth_address, action): 149 self.log.debug("Mute action %s on user %s" % (action, auth_address)) 150 res = list(self.site_manager.list().values())[0].content_manager.contents.db.execute( 151 "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path", 152 {"inner_path": "%%/%s/%%" % auth_address} 153 ) 154 for row in res: 155 site = self.site_manager.sites.get(row["address"]) 156 if not site: 157 continue 158 dir_inner_path = helper.getDirname(row["inner_path"]) 159 for file_name in site.storage.walk(dir_inner_path): 160 if action == "remove": 161 site.storage.onUpdated(dir_inner_path + file_name, False) 162 else: 163 site.storage.onUpdated(dir_inner_path + file_name) 164 site.onFileDone(dir_inner_path + file_name)