Forking what is left of ZeroNet and hopefully adding an AT Proto Frontend/Proxy
1from Config import config
2from Db.Db import Db
3import time
4
5
6class ChartDb(Db):
7 def __init__(self):
8 self.version = 2
9 super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir)
10 self.foreign_keys = True
11 self.checkTables()
12 self.sites = self.loadSites()
13 self.types = self.loadTypes()
14
15 def getSchema(self):
16 schema = {}
17 schema["db_name"] = "Chart"
18 schema["tables"] = {}
19 schema["tables"]["data"] = {
20 "cols": [
21 ["data_id", "INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE"],
22 ["type_id", "INTEGER NOT NULL"],
23 ["site_id", "INTEGER"],
24 ["value", "INTEGER"],
25 ["date_added", "DATETIME DEFAULT (CURRENT_TIMESTAMP)"]
26 ],
27 "indexes": [
28 "CREATE INDEX site_id ON data (site_id)",
29 "CREATE INDEX date_added ON data (date_added)"
30 ],
31 "schema_changed": 2
32 }
33 schema["tables"]["type"] = {
34 "cols": [
35 ["type_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"],
36 ["name", "TEXT"]
37 ],
38 "schema_changed": 1
39 }
40 schema["tables"]["site"] = {
41 "cols": [
42 ["site_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"],
43 ["address", "TEXT"]
44 ],
45 "schema_changed": 1
46 }
47 return schema
48
49 def getTypeId(self, name):
50 if name not in self.types:
51 res = self.execute("INSERT INTO type ?", {"name": name})
52 self.types[name] = res.lastrowid
53
54 return self.types[name]
55
56 def getSiteId(self, address):
57 if address not in self.sites:
58 res = self.execute("INSERT INTO site ?", {"address": address})
59 self.sites[address] = res.lastrowid
60
61 return self.sites[address]
62
63 def loadSites(self):
64 sites = {}
65 for row in self.execute("SELECT * FROM site"):
66 sites[row["address"]] = row["site_id"]
67 return sites
68
69 def loadTypes(self):
70 types = {}
71 for row in self.execute("SELECT * FROM type"):
72 types[row["name"]] = row["type_id"]
73 return types
74
75 def deleteSite(self, address):
76 if address in self.sites:
77 site_id = self.sites[address]
78 del self.sites[address]
79 self.execute("DELETE FROM site WHERE ?", {"site_id": site_id})
80 self.execute("DELETE FROM data WHERE ?", {"site_id": site_id})
81
82 def archive(self):
83 week_back = 1
84 while 1:
85 s = time.time()
86 date_added_from = time.time() - 60 * 60 * 24 * 7 * (week_back + 1)
87 date_added_to = date_added_from + 60 * 60 * 24 * 7
88 res = self.execute("""
89 SELECT
90 MAX(date_added) AS date_added,
91 SUM(value) AS value,
92 GROUP_CONCAT(data_id) AS data_ids,
93 type_id,
94 site_id,
95 COUNT(*) AS num
96 FROM data
97 WHERE
98 site_id IS NULL AND
99 date_added > :date_added_from AND
100 date_added < :date_added_to
101 GROUP BY strftime('%Y-%m-%d %H', date_added, 'unixepoch', 'localtime'), type_id
102 """, {"date_added_from": date_added_from, "date_added_to": date_added_to})
103
104 num_archived = 0
105 cur = self.getCursor()
106 for row in res:
107 if row["num"] == 1:
108 continue
109 cur.execute("INSERT INTO data ?", {
110 "type_id": row["type_id"],
111 "site_id": row["site_id"],
112 "value": row["value"],
113 "date_added": row["date_added"]
114 })
115 cur.execute("DELETE FROM data WHERE data_id IN (%s)" % row["data_ids"])
116 num_archived += row["num"]
117 self.log.debug("Archived %s data from %s weeks ago in %.3fs" % (num_archived, week_back, time.time() - s))
118 week_back += 1
119 time.sleep(0.1)
120 if num_archived == 0:
121 break
122 # Only keep 6 month of global stats
123 self.execute(
124 "DELETE FROM data WHERE site_id IS NULL AND date_added < :date_added_limit",
125 {"date_added_limit": time.time() - 60 * 60 * 24 * 30 * 6 }
126 )
127 # Only keep 1 month of site stats
128 self.execute(
129 "DELETE FROM data WHERE site_id IS NOT NULL AND date_added < :date_added_limit",
130 {"date_added_limit": time.time() - 60 * 60 * 24 * 30 }
131 )
132 if week_back > 1:
133 self.execute("VACUUM")