anproto personal data server
2
fork

Configure Feed

Select the types of activity you want to include in your feed.

at master 366 lines 9.1 kB view raw
1import { decode } from './lib/base64.js' 2import { cachekv } from './lib/cachekv.js' 3import { idbkv } from './lib/idbkv.js' 4import { human } from './lib/human.js' 5import { vb } from './lib/vb.js' 6import { an } from 'https://esm.sh/gh/evbogue/anproto@ddc040c/an.js' 7 8let db 9let hashLog = [] 10let openedLog = [] 11let newMessages = false 12let sort = true 13 14export const apds = {} 15 16const rebuildOpenedLog = async () => { 17 const newArray = [] 18 19 await Promise.all(hashLog.map(async (hash) => { 20 try { 21 const obj = { 22 hash, 23 sig: await apds.get(hash) 24 } 25 if (!obj.sig) { return } 26 const sigHash = await apds.hash(obj.sig) 27 if (sigHash !== hash) { 28 console.warn('hashlog integrity: sig hash mismatch', hash) 29 await db.rm(hash) 30 return 31 } 32 obj.author = obj.sig.substring(0, 44) 33 obj.opened = await apds.open(obj.sig) 34 if (!obj.opened || obj.opened.length < 14) { 35 console.warn('hashlog integrity: open failed', hash) 36 await db.rm(hash) 37 return 38 } 39 const contentHash = obj.opened.substring(13) 40 obj.text = await apds.get(contentHash) 41 if (obj.text) { 42 const contentSig = await apds.hash(obj.text) 43 if (contentSig !== contentHash) { 44 console.warn('hashlog integrity: content hash mismatch', hash) 45 await db.rm(contentHash) 46 return 47 } 48 } 49 obj.ts = obj.opened.substring(0, 13) 50 newArray.push(obj) 51 } catch (err) { /* ignore per-entry */ } 52 })) 53 54 await newArray.sort((a, b) => a.ts - b.ts) 55 56 const newLog = [] 57 await newArray.forEach(msg => { 58 newLog.push(msg.hash) 59 }) 60 61 hashLog = newLog 62 openedLog = newArray 63 newMessages = true 64 sort = false 65} 66 67apds.ensureOpenLog = async () => { 68 if (sort || !openedLog.length) { 69 await rebuildOpenedLog() 70 } 71} 72 73apds.start = async (appId) => { 74 if ('indexedDB' in globalThis) { 75 try { 76 db = await idbkv(appId) 77 const migrationFlag = 'cachekv_migrated_v1' 78 const migrationDone = await db.get(migrationFlag) 79 if (!migrationDone) { 80 const legacy = await cachekv(appId) 81 const migrated = [] 82 if (legacy && legacy.keys) { 83 const keys = await legacy.keys() 84 for (const key of keys) { 85 if (key === migrationFlag) continue 86 const value = await legacy.get(key) 87 if (value === undefined) continue 88 const existing = await db.get(key) 89 if (existing === undefined) { 90 await db.put(key, value) 91 } 92 await legacy.rm(key) 93 migrated.push(key) 94 } 95 } 96 await db.put(migrationFlag, new Date().toISOString()) 97 if (migrated.length) { 98 console.log('apds: migrated cachekv to IndexedDB', migrated.join(', ')) 99 } 100 } 101 } catch (err) { 102 console.warn('IndexedDB unavailable, falling back to Cache API', err) 103 } 104 } 105 if (!db) { 106 db = await cachekv(appId) 107 } 108 109 setInterval(async () => { 110 if (newMessages) { 111 await db.put('hashlog', JSON.stringify(hashLog)) 112 await db.put('openedlog', JSON.stringify(openedLog)) 113 newMessages = false 114 } 115 }, 1000) 116 117 const getHashLog = await db.get('hashlog') 118 const getOpenedLog = await db.get('openedlog') 119 if (getHashLog) { 120 hashLog = JSON.parse(getHashLog) 121 } 122 if (getOpenedLog) { 123 openedLog = JSON.parse(getOpenedLog) 124 } 125 126 setInterval(async () => { 127 if (sort) { 128 await rebuildOpenedLog() 129 } 130 }, 20000) 131} 132 133apds.generate = async () => { 134 const genkey = await an.gen() 135 return genkey 136} 137 138apds.keypair = async () => { 139 const keypair = await db.get('keypair') 140 if (keypair) { 141 return keypair 142 } 143} 144 145apds.pubkey = async () => { 146 const keypair = await apds.keypair() 147 if (keypair) { 148 return keypair.substring(0, 44) 149 } 150} 151 152apds.privkey = async () => { 153 const keypair = await apds.keypair() 154 if (keypair) { 155 return keypair.substring(44) 156 } 157} 158 159apds.deletekey = async () => { 160 db.rm('keypair') 161} 162 163apds.clear = async () => { 164 db.clear() 165} 166 167apds.hash = async (data) => { return await an.hash(data) } 168 169apds.sign = async (data) => { 170 const hash = await apds.make(data) 171 const sig = await an.sign(hash, await apds.keypair()) 172 await apds.add(sig) 173 const protocolMsg = await apds.make(sig) 174 175 db.put('previous', protocolMsg) 176 return protocolMsg 177} 178 179apds.open = async (msg) => { 180 try { 181 if (msg.endsWith('==')) { 182 return await an.open(msg) 183 } //else { 184 //console.log('NOT A VALID SIGNATURE ' + msg) 185 //} 186 } catch (err) { 187 //console.log('Not a valid ANProto message') 188 } 189} 190 191import { yaml } from './lib/yaml.js' 192 193apds.parseYaml = async (doc) => { 194 return await yaml.parse(doc) 195} 196 197apds.createYaml = async (obj, content) => { 198 return await yaml.create(obj, content) 199} 200 201const isHash = (value) => typeof value === 'string' && value.length === 44 202 203const extractImagesFromBody = (body) => { 204 if (!body) { return [] } 205 const matches = body.match(/!\[.*?\]\((.*?)\)/g) 206 if (!matches) { return [] } 207 const hashes = [] 208 for (const image of matches) { 209 const src = image.match(/!\[.*?\]\((.*?)\)/)?.[1] 210 if (isHash(src)) { hashes.push(src) } 211 } 212 return hashes 213} 214 215apds.compose = async (content, prev) => { 216 let obj = {} 217 if (prev) { obj = prev } 218 219 const name = await db.get('name') 220 const image = await db.get('image') 221 const previous = await db.get('previous') 222 223 if (name) { obj.name = name} 224 if (image) { obj.image = image} 225 if (previous) { obj.previous = previous} 226 227 if (Object.keys(obj).length > 0) { 228 const yaml = await apds.createYaml(obj, content) 229 return await apds.sign(yaml) 230 } else { 231 return await apds.sign(content) 232 } 233} 234 235apds.make = async (data) => { 236 const hash = await apds.hash(data) 237 238 await db.put(hash, data) 239 240 return hash 241} 242 243apds.get = async (hash) => { 244 const blob = await db.get(hash) 245 246 return blob 247} 248 249apds.put = async (key, value) => { 250 await db.put(key, value) 251} 252 253apds.rm = async (key) => { 254 await db.rm(key) 255} 256 257apds.add = async (msg) => { 258 const opened = await apds.open(msg) 259 if (opened) { 260 const hash = await apds.make(msg) 261 if (!hashLog.includes(hash)) { 262 hashLog.push(hash) 263 const obj = { 264 hash, 265 sig: msg 266 } 267 obj.author = obj.sig.substring(0, 44) 268 obj.opened = opened 269 obj.text = await apds.get(obj.opened.substring(13)) 270 obj.ts = obj.opened.substring(0, 13) 271 openedLog.push(obj) 272 newMessages = true 273 sort = true 274 return true 275 } 276 } 277} 278 279apds.purgeAuthor = async (author) => { 280 if (!author || author.length !== 44) { 281 return { removed: 0, blobs: 0 } 282 } 283 const targets = openedLog.filter(msg => msg.author === author) 284 if (!targets.length) { 285 return { removed: 0, blobs: 0 } 286 } 287 288 const hashesToRemove = new Set() 289 const blobsToRemove = new Set() 290 291 for (const msg of targets) { 292 if (!msg || !msg.hash) { continue } 293 hashesToRemove.add(msg.hash) 294 const opened = msg.opened || (msg.sig ? await apds.open(msg.sig) : null) 295 const contentHash = opened && opened.length > 13 ? opened.substring(13) : null 296 const content = msg.text || (contentHash ? await apds.get(contentHash) : null) 297 if (contentHash && isHash(contentHash)) { 298 blobsToRemove.add(contentHash) 299 } 300 if (content) { 301 const parsed = await apds.parseYaml(content) 302 if (parsed?.image && isHash(parsed.image)) { 303 blobsToRemove.add(parsed.image) 304 } 305 const bodyImages = extractImagesFromBody(parsed?.body) 306 bodyImages.forEach(hash => blobsToRemove.add(hash)) 307 } 308 } 309 310 for (const hash of blobsToRemove) { 311 await db.rm(hash) 312 } 313 for (const hash of hashesToRemove) { 314 await db.rm(hash) 315 } 316 317 if (hashesToRemove.size) { 318 hashLog = hashLog.filter(hash => !hashesToRemove.has(hash)) 319 openedLog = openedLog.filter(msg => msg.author !== author) 320 newMessages = true 321 sort = false 322 await db.put('hashlog', JSON.stringify(hashLog)) 323 await db.put('openedlog', JSON.stringify(openedLog)) 324 } 325 326 return { removed: hashesToRemove.size, blobs: blobsToRemove.size } 327} 328 329apds.getHashLog = async () => { return hashLog } 330 331apds.getOpenedLog = async () => { return openedLog } 332 333apds.query = async (query) => { 334 if (!openedLog[0]) { return [] } 335 if (!query) { return openedLog } 336 if (query.startsWith('?')) { 337 const search = query.substring(1).replace(/%20/g, ' ').toUpperCase() 338 const result = openedLog.filter(msg => msg.text && msg.text.toUpperCase().includes(search)) 339 return result 340 } 341 const result = openedLog.filter(msg => msg.author == query || msg.hash == query) 342 return result 343} 344 345apds.getPubkeys = async () => { 346 const arr = await apds.query() 347 const newSet = new Set() 348 for (const msg of arr) { 349 newSet.add(msg.author) 350 } 351 const newArr = [...newSet] 352 return newArr 353} 354 355apds.getLatest = async (pubkey) => { 356 const q = openedLog.filter(msg => msg.author === pubkey) 357 return q[q.length -1] 358} 359 360apds.human = async (ts) => { 361 return await human(new Date(parseInt(ts))) 362} 363 364apds.visual = async (pubkey) => { 365 return vb(decode(pubkey), 256) 366}