Forking what is left of ZeroNet and hopefully adding an AT Proto Frontend/Proxy
1import time
2import os
3
4from . import ContentDb
5from Debug import Debug
6from Config import config
7
8
9class ContentDbDict(dict):
10 def __init__(self, site, *args, **kwargs):
11 s = time.time()
12 self.site = site
13 self.cached_keys = []
14 self.log = self.site.log
15 self.db = ContentDb.getContentDb()
16 self.db_id = self.db.needSite(site)
17 self.num_loaded = 0
18 super(ContentDbDict, self).__init__(self.db.loadDbDict(site)) # Load keys from database
19 self.log.debug("ContentDb init: %.3fs, found files: %s, sites: %s" % (time.time() - s, len(self), len(self.db.site_ids)))
20
21 def loadItem(self, key):
22 try:
23 self.num_loaded += 1
24 if self.num_loaded % 100 == 0:
25 if config.verbose:
26 self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack()))
27 else:
28 self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key))
29 content = self.site.storage.loadJson(key)
30 dict.__setitem__(self, key, content)
31 except IOError:
32 if dict.get(self, key):
33 self.__delitem__(key) # File not exists anymore
34 raise KeyError(key)
35
36 self.addCachedKey(key)
37 self.checkLimit()
38
39 return content
40
41 def getItemSize(self, key):
42 return self.site.storage.getSize(key)
43
44 # Only keep last 10 accessed json in memory
45 def checkLimit(self):
46 if len(self.cached_keys) > 10:
47 key_deleted = self.cached_keys.pop(0)
48 dict.__setitem__(self, key_deleted, False)
49
50 def addCachedKey(self, key):
51 if key not in self.cached_keys and key != "content.json" and len(key) > 40: # Always keep keys smaller than 40 char
52 self.cached_keys.append(key)
53
54 def __getitem__(self, key):
55 val = dict.get(self, key)
56 if val: # Already loaded
57 return val
58 elif val is None: # Unknown key
59 raise KeyError(key)
60 elif val is False: # Loaded before, but purged from cache
61 return self.loadItem(key)
62
63 def __setitem__(self, key, val):
64 self.addCachedKey(key)
65 self.checkLimit()
66 size = self.getItemSize(key)
67 self.db.setContent(self.site, key, val, size)
68 dict.__setitem__(self, key, val)
69
70 def __delitem__(self, key):
71 self.db.deleteContent(self.site, key)
72 dict.__delitem__(self, key)
73 try:
74 self.cached_keys.remove(key)
75 except ValueError:
76 pass
77
78 def iteritems(self):
79 for key in dict.keys(self):
80 try:
81 val = self[key]
82 except Exception as err:
83 self.log.warning("Error loading %s: %s" % (key, err))
84 continue
85 yield key, val
86
87 def items(self):
88 back = []
89 for key in dict.keys(self):
90 try:
91 val = self[key]
92 except Exception as err:
93 self.log.warning("Error loading %s: %s" % (key, err))
94 continue
95 back.append((key, val))
96 return back
97
98 def values(self):
99 back = []
100 for key, val in dict.iteritems(self):
101 if not val:
102 try:
103 val = self.loadItem(key)
104 except Exception:
105 continue
106 back.append(val)
107 return back
108
109 def get(self, key, default=None):
110 try:
111 return self.__getitem__(key)
112 except KeyError:
113 return default
114 except Exception as err:
115 self.site.bad_files[key] = self.site.bad_files.get(key, 1)
116 dict.__delitem__(self, key)
117 self.log.warning("Error loading %s: %s" % (key, err))
118 return default
119
120 def execute(self, query, params={}):
121 params["site_id"] = self.db_id
122 return self.db.execute(query, params)
123
124if __name__ == "__main__":
125 import psutil
126 process = psutil.Process(os.getpid())
127 s_mem = process.memory_info()[0] / float(2 ** 20)
128 root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
129 contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
130 print("Init len", len(contents))
131
132 s = time.time()
133 for dir_name in os.listdir(root + "/data/users/")[0:8000]:
134 contents["data/users/%s/content.json" % dir_name]
135 print("Load: %.3fs" % (time.time() - s))
136
137 s = time.time()
138 found = 0
139 for key, val in contents.items():
140 found += 1
141 assert key
142 assert val
143 print("Found:", found)
144 print("Iteritem: %.3fs" % (time.time() - s))
145
146 s = time.time()
147 found = 0
148 for key in list(contents.keys()):
149 found += 1
150 assert key in contents
151 print("In: %.3fs" % (time.time() - s))
152
153 print("Len:", len(list(contents.values())), len(list(contents.keys())))
154
155 print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)