+1
package.json
+1
package.json
+8
pnpm-lock.yaml
+8
pnpm-lock.yaml
···
78
78
'@mary/async-iterator-fns':
79
79
specifier: jsr:^0.1.1
80
80
version: '@jsr/mary__async-iterator-fns@0.1.1'
81
+
'@mary/batch-fetch':
82
+
specifier: jsr:^0.1.0
83
+
version: '@jsr/mary__batch-fetch@0.1.0'
81
84
'@mary/date-fns':
82
85
specifier: jsr:^0.1.3
83
86
version: '@jsr/mary__date-fns@0.1.3'
···
1230
1233
1231
1234
'@jsr/mary__async-iterator-fns@0.1.1':
1232
1235
resolution: {integrity: sha512-ef/TDpu6yGTAb4fbGEjSFPO7u49WrxJeXa9T6fvmjlpEfR84qxrjTR6MEUQ1hMySD0+O9yQKvBl/KL5x7K9+iA==, tarball: https://npm.jsr.io/~/11/@jsr/mary__async-iterator-fns/0.1.1.tgz}
1236
+
1237
+
'@jsr/mary__batch-fetch@0.1.0':
1238
+
resolution: {integrity: sha512-A5SmTfDUMjr+AaJA+wFg4eKKVQm6/51CYTg5ssO+vcnaCJoP1Y0RURG6VkMno9QUholf3AAldfzRgQOXDZbFgg==, tarball: https://npm.jsr.io/~/11/@jsr/mary__batch-fetch/0.1.0.tgz}
1233
1239
1234
1240
'@jsr/mary__date-fns@0.1.3':
1235
1241
resolution: {integrity: sha512-kjS04BESEHO9ZTqjOxk4ip8DsAdVDmt/jC5V4zVIYq3VD/04+WJK9kjdQda23eVZMuF9ZZY0zMswU7UXG+PSrg==, tarball: https://npm.jsr.io/~/11/@jsr/mary__date-fns/0.1.3.tgz}
···
3734
3740
'@jsr/mary__array-fns@0.1.4': {}
3735
3741
3736
3742
'@jsr/mary__async-iterator-fns@0.1.1': {}
3743
+
3744
+
'@jsr/mary__batch-fetch@0.1.0': {}
3737
3745
3738
3746
'@jsr/mary__date-fns@0.1.3': {}
3739
3747
+7
-8
src/lib/states/singletons/moderation.ts
+7
-8
src/lib/states/singletons/moderation.ts
···
3
3
4
4
import type { AppBskyLabelerDefs, At } from '@atcute/client/lexicons';
5
5
import { mapDefined } from '@mary/array-fns';
6
-
import { createQueries } from '@mary/solid-query';
6
+
import { createBatchedFetch } from '@mary/batch-fetch';
7
+
import { type QueryFunctionContext as QC, createQueries } from '@mary/solid-query';
7
8
8
9
import { BLUESKY_MODERATION_DID } from '~/api/defaults';
9
10
import type { ModerationLabeler, ModerationOptions, ModerationPreferences } from '~/api/moderation';
10
11
import { interpretLabelerDefinition } from '~/api/moderation/labeler';
11
-
12
-
import { createBatchedFetch } from '~/lib/utils/batch-fetch';
13
12
14
13
import { useAgent } from '../agent';
15
14
import { useSession } from '../session';
···
40
39
return currentAccount.preferences.moderation;
41
40
});
42
41
43
-
const fetchLabeler = createBatchedFetch<At.Did, At.Did, ModerationLabeler>({
42
+
const fetchLabeler = createBatchedFetch<At.Did, ModerationLabeler>({
44
43
limit: 20,
45
44
timeout: 1,
46
-
idFromQuery: (query) => query,
47
-
idFromData: (data) => data.did,
48
-
async fetch(dids) {
45
+
idFromResource: (labeler) => labeler.did,
46
+
async fetch(dids, signal) {
49
47
const { data } = await rpc.get('app.bsky.labeler.getServices', {
48
+
signal,
50
49
params: {
51
50
dids: dids,
52
51
detailed: true,
···
66
65
67
66
return {
68
67
queryKey: ['labeler-definition', did],
69
-
queryFn: () => fetchLabeler(did),
68
+
queryFn: ({ signal }: QC) => fetchLabeler(did, signal),
70
69
staleTime: 21600000, // 6 hours
71
70
gcTime: 86400000, // 24 hours
72
71
refetchOnWindowFocus: true,
-106
src/lib/utils/batch-fetch.ts
-106
src/lib/utils/batch-fetch.ts
···
1
-
// we would sometimes rely on fetching multiple individual posts, and it would
2
-
// be preferrable if it can be batched.
3
-
4
-
type Promisable<T> = T | Promise<T>;
5
-
6
-
export type QueryId = string | number;
7
-
8
-
export interface BatchedFetchOptions<Query, Id extends QueryId, Data> {
9
-
limit: number;
10
-
timeout: number;
11
-
fetch: (queries: Query[]) => Promisable<Data[]>;
12
-
key?: (query: Query) => string | number;
13
-
idFromQuery: (query: Query) => Id;
14
-
idFromData: (data: Data) => Id;
15
-
}
16
-
17
-
interface BatchedFetchMap<Query, Id, Data> {
18
-
key: string | number | undefined;
19
-
timeout: any;
20
-
queries: Query[];
21
-
pending: Map<Id, PromiseWithResolvers<Data>>;
22
-
}
23
-
24
-
export class ResourceMissingError extends Error {
25
-
name = 'ResourceMissingError';
26
-
}
27
-
28
-
/*#__NO_SIDE_EFFECTS__*/
29
-
export const createBatchedFetch = <Query, Id extends QueryId, Data>(
30
-
options: BatchedFetchOptions<Query, Id, Data>,
31
-
) => {
32
-
const { limit, timeout, fetch, key: _key, idFromData, idFromQuery } = options;
33
-
34
-
let curr: BatchedFetchMap<Query, Id, Data> | undefined;
35
-
36
-
return (query: Query): Promise<Data> => {
37
-
const id = idFromQuery(query);
38
-
const key = _key?.(query);
39
-
40
-
let map = curr;
41
-
42
-
if (!map || map.queries.length >= limit || map.key !== key) {
43
-
map = curr = {
44
-
key,
45
-
timeout: undefined,
46
-
queries: [],
47
-
pending: new Map(),
48
-
};
49
-
}
50
-
51
-
let deferred = map.pending.get(id);
52
-
53
-
if (!deferred) {
54
-
deferred = Promise.withResolvers<Data>();
55
-
56
-
map.queries.push(query);
57
-
map.pending.set(id, deferred);
58
-
}
59
-
60
-
clearTimeout(map.timeout);
61
-
62
-
map.timeout = setTimeout(() => {
63
-
if (curr === map) {
64
-
curr = undefined;
65
-
}
66
-
67
-
perform(map!, fetch, idFromData);
68
-
}, timeout);
69
-
70
-
return deferred.promise;
71
-
};
72
-
};
73
-
74
-
const perform = async <Query, Id extends QueryId, Data>(
75
-
map: BatchedFetchMap<Query, Id, Data>,
76
-
fetch: (queries: Query[]) => Promisable<Data[]>,
77
-
idFromData: (data: Data) => Id,
78
-
) => {
79
-
const queries = map.queries;
80
-
const pending = map.pending;
81
-
82
-
let errored = false;
83
-
84
-
try {
85
-
const dataset = await fetch(queries);
86
-
87
-
for (const data of dataset) {
88
-
const id = idFromData(data);
89
-
const deferred = pending.get(id);
90
-
91
-
deferred?.resolve(data);
92
-
}
93
-
} catch (error) {
94
-
errored = true;
95
-
96
-
for (const deferred of pending.values()) {
97
-
deferred.reject(error);
98
-
}
99
-
} finally {
100
-
if (!errored) {
101
-
for (const deferred of pending.values()) {
102
-
deferred.reject(new ResourceMissingError());
103
-
}
104
-
}
105
-
}
106
-
};