+4
-1
.claude/settings.local.json
+4
-1
.claude/settings.local.json
···
10
10
"Read(//Users/scarndp/dev/skywatch/skywatch-tail/src/blobs/**)",
11
11
"Read(//Users/scarndp/dev/skywatch/skywatch-tail/**)",
12
12
"Bash(curl:*)",
13
-
"Bash(cat:*)"
13
+
"Bash(cat:*)",
14
+
"Bash(git push:*)",
15
+
"Bash(bun scripts/compute-phash.ts:*)",
16
+
"Bash(bun test:*)"
14
17
],
15
18
"deny": [],
16
19
"ask": []
+2
src/config/index.ts
+2
src/config/index.ts
···
2
2
jetstream: {
3
3
url: string;
4
4
wantedCollections: string[];
5
+
cursorUpdateInterval: number;
5
6
};
6
7
redis: {
7
8
url: string;
···
70
71
jetstream: {
71
72
url: getEnv("JETSTREAM_URL", "wss://jetstream.atproto.tools/subscribe"),
72
73
wantedCollections: ["app.bsky.feed.post"],
74
+
cursorUpdateInterval: getEnvNumber("CURSOR_UPDATE_INTERVAL", 10000),
73
75
},
74
76
redis: {
75
77
url: getEnv("REDIS_URL", "redis://localhost:6379"),
+212
-136
src/main.ts
+212
-136
src/main.ts
···
1
-
import { readFile, writeFile } from "node:fs/promises";
2
-
import { existsSync } from "node:fs";
1
+
import fs from "node:fs";
3
2
import type { CommitCreateEvent } from "@skyware/jetstream";
4
3
import Redis from "ioredis";
5
4
import { Jetstream } from "@skyware/jetstream";
···
18
17
19
18
logger.info("Starting skywatch-phash service");
20
19
21
-
// Cursor persistence
22
-
const CURSOR_FILE = "/app/cursor.txt";
20
+
let cursor = 0;
21
+
let cursorUpdateInterval: NodeJS.Timeout;
23
22
24
-
async function loadCursor(): Promise<number | undefined> {
25
-
try {
26
-
if (existsSync(CURSOR_FILE)) {
27
-
const content = await readFile(CURSOR_FILE, "utf-8");
28
-
const cursor = Number.parseInt(content.trim(), 10);
29
-
if (!Number.isNaN(cursor) && cursor > 0) {
30
-
logger.info({ cursor, cursorDate: new Date(cursor / 1000).toISOString() }, "Loaded cursor from file");
31
-
return cursor;
32
-
}
33
-
}
34
-
} catch (error) {
35
-
logger.warn({ error }, "Failed to load cursor from file");
36
-
}
37
-
return undefined;
23
+
function epochUsToDateTime(cursor: number): string {
24
+
return new Date(cursor / 1000).toISOString();
38
25
}
39
26
40
-
async function saveCursor(cursor: number): Promise<void> {
41
-
try {
42
-
await writeFile(CURSOR_FILE, cursor.toString(), "utf-8");
43
-
} catch (error) {
44
-
logger.error({ error }, "Failed to save cursor to file");
27
+
try {
28
+
logger.info({ process: "MAIN" }, "Trying to read cursor from cursor.txt");
29
+
cursor = Number(fs.readFileSync("cursor.txt", "utf8"));
30
+
logger.info(
31
+
{ process: "MAIN", cursor, datetime: epochUsToDateTime(cursor) },
32
+
"Cursor found",
33
+
);
34
+
} catch (error) {
35
+
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
36
+
cursor = Math.floor(Date.now() * 1000);
37
+
logger.info(
38
+
{ process: "MAIN", cursor, datetime: epochUsToDateTime(cursor) },
39
+
"Cursor not found in cursor.txt, setting cursor",
40
+
);
41
+
fs.writeFileSync("cursor.txt", cursor.toString(), "utf8");
42
+
} else {
43
+
logger.error({ process: "MAIN", error }, "Failed to read cursor");
44
+
process.exit(1);
45
45
}
46
46
}
47
47
48
-
// Load cursor before creating Jetstream instance
49
-
const savedCursor = await loadCursor();
50
-
51
48
// Create Jetstream instance at module level
52
49
const jetstream = new Jetstream({
53
50
endpoint: config.jetstream.url,
54
51
wantedCollections: ["app.bsky.feed.post"],
55
-
cursor: savedCursor,
52
+
cursor,
56
53
});
57
54
58
55
// Module-level variables for queue and worker
···
75
72
76
73
// Register Jetstream event handlers at module level
77
74
jetstream.on("open", () => {
78
-
logger.info(
79
-
{
80
-
url: config.jetstream.url,
81
-
cursor: jetstream.cursor,
82
-
},
83
-
"Connected to Jetstream"
84
-
);
75
+
if (jetstream.cursor) {
76
+
logger.info(
77
+
{
78
+
process: "MAIN",
79
+
url: config.jetstream.url,
80
+
cursor: jetstream.cursor,
81
+
datetime: epochUsToDateTime(jetstream.cursor),
82
+
},
83
+
"Connected to Jetstream with cursor",
84
+
);
85
+
} else {
86
+
logger.info(
87
+
{ process: "MAIN", url: config.jetstream.url },
88
+
"Connected to Jetstream, waiting for cursor",
89
+
);
90
+
}
91
+
cursorUpdateInterval = setInterval(() => {
92
+
if (jetstream.cursor) {
93
+
logger.info(
94
+
{
95
+
process: "MAIN",
96
+
cursor: jetstream.cursor,
97
+
datetime: epochUsToDateTime(jetstream.cursor),
98
+
},
99
+
"Cursor updated",
100
+
);
101
+
fs.writeFile("cursor.txt", jetstream.cursor.toString(), (err) => {
102
+
if (err)
103
+
logger.error(
104
+
{ process: "MAIN", error: err },
105
+
"Failed to write cursor",
106
+
);
107
+
});
108
+
}
109
+
}, config.jetstream.cursorUpdateInterval);
85
110
});
86
111
87
112
jetstream.on("close", () => {
88
-
logger.info("Jetstream connection closed");
113
+
clearInterval(cursorUpdateInterval);
114
+
logger.info({ process: "MAIN" }, "Jetstream connection closed");
89
115
});
90
116
91
117
jetstream.on("error", (error) => {
···
93
119
});
94
120
95
121
// Register onCreate handler for posts
96
-
jetstream.onCreate("app.bsky.feed.post", async (event: CommitCreateEvent<"app.bsky.feed.post">) => {
97
-
try {
98
-
const record = event.commit.record as Record<string, unknown>;
99
-
const embed = record.embed;
122
+
jetstream.onCreate(
123
+
"app.bsky.feed.post",
124
+
async (event: CommitCreateEvent<"app.bsky.feed.post">) => {
125
+
const atURI = `at://${event.did}/app.bsky.feed.post/${event.commit.rkey}`;
126
+
const hasEmbed = Object.prototype.hasOwnProperty.call(
127
+
event.commit.record,
128
+
"embed",
129
+
);
100
130
101
-
// Extract blob references
102
-
const blobs: Array<{ cid: string; mimeType?: string }> = [];
131
+
if (!hasEmbed) {
132
+
return;
133
+
}
103
134
104
-
if (embed && typeof embed === "object") {
105
-
const embedObj = embed as Record<string, unknown>;
135
+
try {
136
+
const record = event.commit.record as Record<string, unknown>;
137
+
const embed = record.embed;
106
138
107
-
if (Array.isArray(embedObj.images)) {
108
-
for (const img of embedObj.images) {
109
-
if (typeof img === "object" && img !== null) {
110
-
const image = img as Record<string, unknown>;
111
-
if (image.image && typeof image.image === "object" && image.image !== null) {
112
-
const imageObj = image.image as Record<string, unknown>;
113
-
const ref = imageObj.ref as Record<string, unknown> | undefined;
114
-
if (ref && typeof ref.$link === "string") {
115
-
blobs.push({
116
-
cid: ref.$link,
117
-
mimeType: typeof imageObj.mimeType === "string" ? imageObj.mimeType : undefined,
118
-
});
119
-
}
120
-
}
121
-
}
122
-
}
123
-
}
139
+
// Extract blob references
140
+
const blobs: Array<{ cid: string; mimeType?: string }> = [];
141
+
142
+
if (embed && typeof embed === "object") {
143
+
const embedObj = embed as Record<string, unknown>;
124
144
125
-
if (embedObj.media && typeof embedObj.media === "object" && embedObj.media !== null) {
126
-
const media = embedObj.media as Record<string, unknown>;
127
-
if (Array.isArray(media.images)) {
128
-
for (const img of media.images) {
145
+
if (Array.isArray(embedObj.images)) {
146
+
for (const img of embedObj.images) {
129
147
if (typeof img === "object" && img !== null) {
130
148
const image = img as Record<string, unknown>;
131
-
if (image.image && typeof image.image === "object" && image.image !== null) {
149
+
if (
150
+
image.image &&
151
+
typeof image.image === "object" &&
152
+
image.image !== null
153
+
) {
132
154
const imageObj = image.image as Record<string, unknown>;
133
155
const ref = imageObj.ref as Record<string, unknown> | undefined;
134
156
if (ref && typeof ref.$link === "string") {
135
157
blobs.push({
136
158
cid: ref.$link,
137
-
mimeType: typeof imageObj.mimeType === "string" ? imageObj.mimeType : undefined,
159
+
mimeType:
160
+
typeof imageObj.mimeType === "string"
161
+
? imageObj.mimeType
162
+
: undefined,
138
163
});
139
164
}
140
165
}
141
166
}
142
167
}
143
168
}
169
+
170
+
if (
171
+
embedObj.media &&
172
+
typeof embedObj.media === "object" &&
173
+
embedObj.media !== null
174
+
) {
175
+
const media = embedObj.media as Record<string, unknown>;
176
+
if (Array.isArray(media.images)) {
177
+
for (const img of media.images) {
178
+
if (typeof img === "object" && img !== null) {
179
+
const image = img as Record<string, unknown>;
180
+
if (
181
+
image.image &&
182
+
typeof image.image === "object" &&
183
+
image.image !== null
184
+
) {
185
+
const imageObj = image.image as Record<string, unknown>;
186
+
const ref = imageObj.ref as
187
+
| Record<string, unknown>
188
+
| undefined;
189
+
if (ref && typeof ref.$link === "string") {
190
+
blobs.push({
191
+
cid: ref.$link,
192
+
mimeType:
193
+
typeof imageObj.mimeType === "string"
194
+
? imageObj.mimeType
195
+
: undefined,
196
+
});
197
+
}
198
+
}
199
+
}
200
+
}
201
+
}
202
+
}
144
203
}
145
-
}
146
204
147
-
if (blobs.length === 0) {
148
-
return;
149
-
}
205
+
if (blobs.length === 0) {
206
+
return;
207
+
}
150
208
151
-
const postUri = `at://${event.did}/${event.commit.collection}/${event.commit.rkey}`;
209
+
const postUri = `at://${event.did}/${event.commit.collection}/${event.commit.rkey}`;
152
210
153
-
logger.debug({ uri: postUri, blobCount: blobs.length }, "Post with blobs detected");
211
+
logger.debug(
212
+
{ uri: postUri, blobCount: blobs.length },
213
+
"Post with blobs detected",
214
+
);
154
215
155
-
const job: ImageJob = {
156
-
postUri,
157
-
postCid: event.commit.cid,
158
-
postDid: event.did,
159
-
blobs,
160
-
timestamp: Date.now(),
161
-
attempts: 0,
162
-
};
216
+
const job: ImageJob = {
217
+
postUri,
218
+
postCid: event.commit.cid,
219
+
postDid: event.did,
220
+
blobs,
221
+
timestamp: Date.now(),
222
+
attempts: 0,
223
+
};
163
224
164
-
await queue.enqueue(job);
165
-
} catch (error) {
166
-
logger.error({ error, event }, "Error processing jetstream event");
167
-
}
168
-
});
225
+
await queue.enqueue(job);
226
+
} catch (error) {
227
+
logger.error({ error, event }, "Error processing jetstream event");
228
+
}
229
+
},
230
+
);
169
231
170
232
// Async setup
171
233
logger.info("Authenticating labeler");
···
184
246
logger.error({ error }, "Redis error");
185
247
});
186
248
187
-
cache = config.cache.enabled ? new PhashCache(redis, config.cache.ttl) : undefined;
249
+
cache = config.cache.enabled
250
+
? new PhashCache(redis, config.cache.ttl)
251
+
: undefined;
188
252
claims = new ModerationClaims(redis);
189
253
metrics = new MetricsCollector();
190
254
···
202
266
},
203
267
agent,
204
268
cache,
205
-
metrics
269
+
metrics,
206
270
);
207
271
208
272
worker.onMatchFound(async (postUri, postCid, postDid, match) => {
···
218
282
matchedPhash: match.matchedPhash,
219
283
hammingDistance: match.hammingDistance,
220
284
},
221
-
"Match found - executing moderation actions"
285
+
"Match found - executing moderation actions",
222
286
);
223
287
224
288
try {
225
289
if (check.toLabel) {
226
-
await createPostLabel(postUri, postCid, check.label, check.comment, match.phash, claims, metrics);
290
+
await createPostLabel(
291
+
postUri,
292
+
postCid,
293
+
check.label,
294
+
check.comment,
295
+
match.phash,
296
+
claims,
297
+
metrics,
298
+
);
227
299
}
228
300
229
301
if (check.reportPost) {
230
-
await createPostReport(postUri, postCid, check.comment, match.phash, metrics);
302
+
await createPostReport(
303
+
postUri,
304
+
postCid,
305
+
check.comment,
306
+
match.phash,
307
+
metrics,
308
+
);
231
309
}
232
310
233
311
if (check.labelAcct) {
234
-
await createAccountLabel(postDid, check.label, check.comment, postUri, match.phash, claims, metrics);
312
+
await createAccountLabel(
313
+
postDid,
314
+
check.label,
315
+
check.comment,
316
+
postUri,
317
+
match.phash,
318
+
claims,
319
+
metrics,
320
+
);
235
321
}
236
322
237
323
if (check.reportAcct) {
238
-
await createAccountReport(postDid, check.comment, postUri, match.phash, metrics);
324
+
await createAccountReport(
325
+
postDid,
326
+
check.comment,
327
+
postUri,
328
+
match.phash,
329
+
metrics,
330
+
);
239
331
}
240
332
} catch (error) {
241
-
logger.error({ error, postUri, postDid }, "Failed to execute moderation actions");
333
+
logger.error(
334
+
{ error, postUri, postDid },
335
+
"Failed to execute moderation actions",
336
+
);
242
337
}
243
338
});
244
339
245
-
const shutdown = async () => {
246
-
try {
247
-
logger.info("Shutting down gracefully");
248
-
jetstream.close();
249
-
250
-
// Save cursor one last time before shutdown
251
-
if (jetstream.cursor) {
252
-
await saveCursor(jetstream.cursor);
253
-
logger.info({ cursor: jetstream.cursor }, "Saved final cursor position");
254
-
}
255
-
256
-
await worker.stop();
257
-
await queue.disconnect();
258
-
await redis.quit();
259
-
clearInterval(statsInterval);
260
-
clearInterval(cursorInterval);
261
-
logger.info("Service stopped");
262
-
process.exit(0);
263
-
} catch (error) {
264
-
logger.error({ error }, "Error shutting down gracefully");
265
-
process.exit(1);
266
-
}
267
-
};
268
-
269
-
process.on("SIGTERM", () => void shutdown());
270
-
process.on("SIGINT", () => void shutdown());
271
-
272
340
logger.info("Starting queue worker");
273
341
await worker.start();
274
342
···
277
345
278
346
logger.info("Service started and ready");
279
347
280
-
// Save cursor position every 10 seconds
281
-
cursorInterval = setInterval(async () => {
282
-
if (jetstream.cursor) {
283
-
const cursorDate = new Date(jetstream.cursor / 1000).toISOString();
284
-
logger.info(
285
-
{
286
-
cursor: jetstream.cursor,
287
-
cursorDate,
288
-
},
289
-
"Jetstream cursor position"
290
-
);
291
-
await saveCursor(jetstream.cursor);
292
-
}
293
-
}, 10_000);
294
-
295
348
statsInterval = setInterval(async () => {
296
349
const workerStats = await worker.getStats();
297
350
const cacheStats = cache ? await cache.getStats() : null;
···
302
355
cache: cacheStats,
303
356
metrics: metricsData,
304
357
},
305
-
"Service stats"
358
+
"Service stats",
306
359
);
307
360
}, 60000);
361
+
362
+
async function shutdown() {
363
+
try {
364
+
logger.info({ process: "MAIN" }, "Shutting down gracefully");
365
+
if (jetstream.cursor !== undefined) {
366
+
fs.writeFileSync("cursor.txt", jetstream.cursor.toString(), "utf8");
367
+
}
368
+
jetstream.close();
369
+
await worker.stop();
370
+
await queue.disconnect();
371
+
await redis.quit();
372
+
clearInterval(statsInterval);
373
+
clearInterval(cursorUpdateInterval);
374
+
logger.info({ process: "MAIN" }, "Service stopped");
375
+
process.exit(0);
376
+
} catch (error) {
377
+
logger.error({ process: "MAIN", error }, "Error shutting down gracefully");
378
+
process.exit(1);
379
+
}
380
+
}
381
+
382
+
process.on("SIGINT", () => void shutdown());
383
+
process.on("SIGTERM", () => void shutdown());
+3
-3
src/processor/image-processor.ts
+3
-3
src/processor/image-processor.ts
···
37
37
return null;
38
38
}
39
39
40
-
const didDoc = await didDocResponse.json();
41
-
const pdsService = didDoc.service?.find((s: any) =>
40
+
const didDoc = await didDocResponse.json() as { service?: Array<{ id: string; type: string; serviceEndpoint?: string }> };
41
+
const pdsService = didDoc.service?.find((s) =>
42
42
s.id === "#atproto_pds" && s.type === "AtprotoPersonalDataServer"
43
43
);
44
44
···
68
68
);
69
69
70
70
if (!response.ok) {
71
-
const data = await response.json().catch(() => ({}));
71
+
const data = await response.json().catch(() => ({})) as { error?: string; message?: string };
72
72
if (data.error === "RepoTakendown") {
73
73
logger.info({ did, message: data.message }, "Repo has been taken down, skipping");
74
74
this.repoTakendownCache.set(did, true);
+85
-66
tests/unit/image-processor.test.ts
+85
-66
tests/unit/image-processor.test.ts
···
12
12
return ab;
13
13
}
14
14
15
+
// Mock PDS resolution and blob fetch
16
+
function createMockFetch(blobResponse: Response | ((callCount: number) => Response)) {
17
+
let callCount = 0;
18
+
return mock((url: string) => {
19
+
// Mock PLC directory lookup
20
+
if (url.includes('plc.directory') || url.includes('plc.wtf')) {
21
+
return Promise.resolve({
22
+
ok: true,
23
+
json: async () => ({
24
+
service: [{
25
+
id: '#atproto_pds',
26
+
type: 'AtprotoPersonalDataServer',
27
+
serviceEndpoint: 'https://test.pds'
28
+
}]
29
+
})
30
+
} as Response);
31
+
}
32
+
33
+
// Mock describeRepo for takedown check
34
+
if (url.includes('describeRepo')) {
35
+
return Promise.resolve({
36
+
ok: true,
37
+
json: async () => ({})
38
+
} as Response);
39
+
}
40
+
41
+
// Mock blob fetch
42
+
callCount++;
43
+
if (typeof blobResponse === 'function') {
44
+
return Promise.resolve(blobResponse(callCount));
45
+
}
46
+
return Promise.resolve(blobResponse);
47
+
});
48
+
}
49
+
15
50
describe("ImageProcessor", () => {
16
51
let processor: ImageProcessor;
17
52
const testChecks: BlobCheck[] = [
···
27
62
},
28
63
];
29
64
65
+
const mockAgent = {} as any; // Mock AtpAgent
66
+
30
67
beforeEach(() => {
31
-
processor = new ImageProcessor(testChecks, "https://test.pds");
68
+
processor = new ImageProcessor(testChecks, mockAgent);
32
69
});
33
70
34
71
test("should skip non-image blobs", async () => {
35
-
const mockFetch = mock(() =>
36
-
Promise.resolve({
37
-
ok: true,
38
-
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
39
-
} as Response)
40
-
);
72
+
const mockFetch = createMockFetch({
73
+
ok: true,
74
+
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
75
+
} as Response);
41
76
42
77
globalThis.fetch = mockFetch as unknown as typeof fetch;
43
78
···
52
87
});
53
88
54
89
test("should skip SVG images", async () => {
55
-
const mockFetch = mock(() =>
56
-
Promise.resolve({
57
-
ok: true,
58
-
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
59
-
} as Response)
60
-
);
90
+
const mockFetch = createMockFetch({
91
+
ok: true,
92
+
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
93
+
} as Response);
61
94
62
95
globalThis.fetch = mockFetch as unknown as typeof fetch;
63
96
···
72
105
});
73
106
74
107
test("should return null when blob fetch fails", async () => {
75
-
const mockFetch = mock(() =>
76
-
Promise.resolve({
77
-
ok: false,
78
-
status: 404,
79
-
} as Response)
80
-
);
108
+
const mockFetch = createMockFetch({
109
+
ok: false,
110
+
status: 404,
111
+
} as Response);
81
112
82
113
globalThis.fetch = mockFetch as unknown as typeof fetch;
83
114
···
103
134
.png()
104
135
.toBuffer();
105
136
106
-
const mockFetch = mock(() =>
107
-
Promise.resolve({
108
-
ok: true,
109
-
arrayBuffer: async () => bufferToArrayBuffer(testImage),
110
-
} as Response)
111
-
);
137
+
const mockFetch = createMockFetch({
138
+
ok: true,
139
+
arrayBuffer: async () => bufferToArrayBuffer(testImage),
140
+
} as Response);
112
141
113
142
globalThis.fetch = mockFetch as unknown as typeof fetch;
114
143
···
137
166
.png()
138
167
.toBuffer();
139
168
140
-
const mockFetch = mock(() =>
141
-
Promise.resolve({
142
-
ok: true,
143
-
arrayBuffer: async () => bufferToArrayBuffer(testImage),
144
-
} as Response)
145
-
);
169
+
const mockFetch = createMockFetch({
170
+
ok: true,
171
+
arrayBuffer: async () => bufferToArrayBuffer(testImage),
172
+
} as Response);
146
173
147
174
globalThis.fetch = mockFetch as unknown as typeof fetch;
148
175
···
167
194
},
168
195
];
169
196
170
-
const matchingProcessor = new ImageProcessor(checksWithActualHash, "https://test.pds");
197
+
const matchingProcessor = new ImageProcessor(checksWithActualHash, mockAgent);
171
198
172
199
const result = await matchingProcessor.processBlob("did:test:123", blob);
173
200
···
188
215
.png()
189
216
.toBuffer();
190
217
191
-
const mockFetch = mock(() =>
192
-
Promise.resolve({
193
-
ok: true,
194
-
arrayBuffer: async () => bufferToArrayBuffer(testImage),
195
-
} as Response)
196
-
);
218
+
const mockFetch = createMockFetch({
219
+
ok: true,
220
+
arrayBuffer: async () => bufferToArrayBuffer(testImage),
221
+
} as Response);
197
222
198
223
globalThis.fetch = mockFetch as unknown as typeof fetch;
199
224
···
221
246
.png()
222
247
.toBuffer();
223
248
224
-
let callCount = 0;
225
-
const mockFetch = mock(() => {
226
-
callCount++;
249
+
const mockFetch = createMockFetch((callCount: number) => {
250
+
// First blob fetch fails (after PLC/describeRepo calls)
227
251
if (callCount === 1) {
228
-
return Promise.resolve({
252
+
return {
229
253
ok: false,
230
254
status: 404,
231
-
} as Response);
255
+
} as Response;
232
256
}
233
-
return Promise.resolve({
257
+
// Second blob fetch succeeds
258
+
return {
234
259
ok: true,
235
260
arrayBuffer: async () => bufferToArrayBuffer(testImage),
236
-
} as Response);
261
+
} as Response;
237
262
});
238
263
239
264
globalThis.fetch = mockFetch as unknown as typeof fetch;
···
264
289
},
265
290
];
266
291
267
-
const processorWithIgnore = new ImageProcessor(checksWithIgnore, "https://test.pds");
292
+
const processorWithIgnore = new ImageProcessor(checksWithIgnore, mockAgent);
268
293
269
294
const testImage = await sharp({
270
295
create: {
···
277
302
.png()
278
303
.toBuffer();
279
304
280
-
const mockFetch = mock(() =>
281
-
Promise.resolve({
282
-
ok: true,
283
-
arrayBuffer: async () => bufferToArrayBuffer(testImage),
284
-
} as Response)
285
-
);
305
+
const mockFetch = createMockFetch({
306
+
ok: true,
307
+
arrayBuffer: async () => bufferToArrayBuffer(testImage),
308
+
} as Response);
286
309
287
310
globalThis.fetch = mockFetch as unknown as typeof fetch;
288
311
···
312
335
},
313
336
];
314
337
315
-
const processorWithIgnore = new ImageProcessor(checksWithIgnore, "https://test.pds");
338
+
const processorWithIgnore = new ImageProcessor(checksWithIgnore, mockAgent);
316
339
317
340
const testImage = await sharp({
318
341
create: {
···
325
348
.png()
326
349
.toBuffer();
327
350
328
-
const mockFetch = mock(() =>
329
-
Promise.resolve({
330
-
ok: true,
331
-
arrayBuffer: async () => bufferToArrayBuffer(testImage),
332
-
} as Response)
333
-
);
351
+
const mockFetch = createMockFetch({
352
+
ok: true,
353
+
arrayBuffer: async () => bufferToArrayBuffer(testImage),
354
+
} as Response);
334
355
335
356
globalThis.fetch = mockFetch as unknown as typeof fetch;
336
357
···
370
391
},
371
392
];
372
393
373
-
const processorMixed = new ImageProcessor(mixedChecks, "https://test.pds");
394
+
const processorMixed = new ImageProcessor(mixedChecks, mockAgent);
374
395
375
396
const testImage = await sharp({
376
397
create: {
···
383
404
.png()
384
405
.toBuffer();
385
406
386
-
const mockFetch = mock(() =>
387
-
Promise.resolve({
388
-
ok: true,
389
-
arrayBuffer: async () => bufferToArrayBuffer(testImage),
390
-
} as Response)
391
-
);
407
+
const mockFetch = createMockFetch({
408
+
ok: true,
409
+
arrayBuffer: async () => bufferToArrayBuffer(testImage),
410
+
} as Response);
392
411
393
412
globalThis.fetch = mockFetch as unknown as typeof fetch;
394
413