1import { Client } from "@atcute/client";
2
3import * as CAR from "@atcute/car";
4import { CarReader } from "@atcute/car/v4";
5import * as CBOR from "@atcute/cbor";
6import * as CID from "@atcute/cid";
7import { type FoundPublicKey, getPublicKeyFromDidController, verifySig } from "@atcute/crypto";
8import { type DidDocument, getAtprotoVerificationMaterial } from "@atcute/identity";
9import { Did } from "@atcute/lexicons";
10import { toSha256 } from "@atcute/uint8array";
11
12import { type AddressedAtUri, parseAddressedAtUri } from "./types/at-uri";
13
14export interface VerifyError {
15 message: string;
16 detail?: unknown;
17}
18
19export interface VerifyResult {
20 errors: VerifyError[];
21}
22
23export interface VerifyOptions {
24 rpc: Client;
25 uri: string;
26 cid: string;
27 record: unknown;
28 didDoc: DidDocument;
29}
30
31export const verifyRecord = async (opts: VerifyOptions): Promise<VerifyResult> => {
32 const errors: VerifyError[] = [];
33
34 // verify cid can be parsed
35 try {
36 CID.fromString(opts.cid);
37 } catch (e) {
38 errors.push({ message: `provided cid is invalid`, detail: e });
39 }
40
41 // verify record content matches cid
42 let cbor: Uint8Array;
43 {
44 cbor = CBOR.encode(opts.record);
45
46 const cid = await CID.create(CID.CODEC_DCBOR, cbor);
47 const cidString = CID.toString(cid);
48
49 if (cidString !== opts.cid) {
50 errors.push({ message: `record content does not match cid` });
51 }
52 }
53
54 // verify at-uri is valid
55 let uri: AddressedAtUri;
56 try {
57 uri = parseAddressedAtUri(opts.uri);
58
59 if (uri.repo !== opts.didDoc.id) {
60 errors.push({ message: `repo in at-uri does not match did document` });
61 }
62 } catch (err) {
63 errors.push({ message: `provided at-uri is invalid`, detail: err });
64 return { errors };
65 }
66
67 // grab public key from did document
68 let publicKey: FoundPublicKey;
69 try {
70 const controller = getAtprotoVerificationMaterial(opts.didDoc);
71 if (!controller) {
72 errors.push({
73 message: `did document does not contain verification material`,
74 });
75 return { errors };
76 }
77
78 publicKey = getPublicKeyFromDidController(controller);
79 } catch (err) {
80 errors.push({
81 message: `failed to get public key from did document`,
82 detail: err,
83 });
84 return { errors };
85 }
86
87 // grab the raw record blocks from the pds
88 let car: Uint8Array;
89 const { ok, data } = await opts.rpc.get("com.atproto.sync.getRecord", {
90 params: {
91 did: opts.didDoc.id as Did,
92 collection: uri.collection,
93 rkey: uri.rkey,
94 },
95 as: "bytes",
96 });
97 if (!ok) {
98 errors.push({ message: `failed to fetch car from pds`, detail: data.error });
99 return { errors };
100 } else {
101 car = data;
102 }
103
104 // read the car
105 let blockmap: CAR.BlockMap;
106 let commit: CAR.Commit;
107
108 try {
109 const reader = CarReader.fromUint8Array(car);
110 if (reader.header.data.roots.length !== 1) {
111 errors.push({ message: `car must have exactly one root` });
112 return { errors };
113 }
114
115 blockmap = new Map();
116 for (const entry of reader) {
117 const cidString = CID.toString(entry.cid);
118
119 // Verify that `bytes` matches its associated CID
120 const expectedCid = CID.toString(await CID.create(entry.cid.codec as 85 | 113, entry.bytes));
121 if (cidString !== expectedCid) {
122 errors.push({
123 message: `cid does not match bytes`,
124 detail: { cid: cidString, expectedCid },
125 });
126 }
127
128 blockmap.set(cidString, entry);
129 }
130
131 if (blockmap.size === 0) {
132 errors.push({ message: `car must have at least one block` });
133 return { errors };
134 }
135
136 commit = CAR.readBlock(blockmap, reader.header.data.roots[0], CAR.isCommit);
137 } catch (err) {
138 errors.push({ message: `failed to read car`, detail: err });
139 return { errors };
140 }
141
142 // verify did in commit matches the did in the at-uri
143 if (commit.did !== opts.didDoc.id) {
144 errors.push({ message: `did in commit does not match did document` });
145 }
146
147 // verify signature contained in commit is valid
148 {
149 const { sig, ...unsigned } = commit;
150
151 const data = CBOR.encode(unsigned);
152 const valid = await verifySig(publicKey, CBOR.fromBytes(sig), data);
153
154 if (!valid) {
155 errors.push({ message: `signature verification failed` });
156 }
157 }
158
159 // verify the commit is a valid commit
160 try {
161 const result = await dfs(blockmap, commit.data.$link, opts.cid);
162 if (!result.found) {
163 errors.push({ message: `could not find record in car` });
164 }
165 } catch (err) {
166 errors.push({ message: `failed to iterate over car`, detail: err });
167 }
168
169 return { errors };
170};
171
172interface DfsResult {
173 found: boolean;
174 min?: string;
175 max?: string;
176 depth?: number;
177}
178
179const encoder = new TextEncoder();
180const decoder = new TextDecoder();
181
182const dfs = async (
183 blockmap: CAR.BlockMap,
184 from: string | undefined,
185 target: string,
186 visited = new Set<string>(),
187): Promise<DfsResult> => {
188 // If there's no starting point, return empty state
189 if (from == null) {
190 return { found: false };
191 }
192
193 // Check for cycles
194 {
195 if (visited.has(from)) {
196 throw new Error(`cycle detected; cid=${from}`);
197 }
198
199 visited.add(from);
200 }
201
202 // Get the block data
203 let node: CAR.MstNode;
204 {
205 const entry = blockmap.get(from);
206 if (!entry) {
207 return { found: false };
208 }
209
210 const decoded = CBOR.decode(entry.bytes);
211 if (!CAR.isMstNode(decoded)) {
212 throw new Error(`invalid mst node; cid=${from}`);
213 }
214
215 node = decoded;
216 }
217
218 // Recursively process the left child
219 const left = await dfs(blockmap, node.l?.$link, target, visited);
220
221 let key = "";
222 let found = left.found;
223 let depth: number | undefined;
224 let firstKey: string | undefined;
225 let lastKey: string | undefined;
226
227 // Process all entries in this node
228 for (const entry of node.e) {
229 if (entry.v.$link === target) {
230 found = true;
231 }
232
233 // Construct the key by truncating and appending
234 key = key.substring(0, entry.p) + decoder.decode(CBOR.fromBytes(entry.k));
235
236 // Calculate depth based on leading zeros in the hash
237 const keyDigest = await toSha256(encoder.encode(key));
238 let zeroCount = 0;
239
240 outerLoop: for (const byte of keyDigest) {
241 for (let bit = 7; bit >= 0; bit--) {
242 if (((byte >> bit) & 1) !== 0) {
243 break outerLoop;
244 }
245 zeroCount++;
246 }
247 }
248
249 const thisDepth = Math.floor(zeroCount / 2);
250
251 // Ensure consistent depth
252 if (depth === undefined) {
253 depth = thisDepth;
254 } else if (depth !== thisDepth) {
255 throw new Error(`node has entries with different depths; cid=${from}`);
256 }
257
258 // Track first and last keys
259 if (lastKey === undefined) {
260 firstKey = key;
261 lastKey = key;
262 }
263
264 // Check key ordering
265 if (lastKey > key) {
266 throw new Error(`entries are out of order; cid=${from}`);
267 }
268
269 // Process right child
270 const right = await dfs(blockmap, entry.t?.$link, target, visited);
271
272 // Check ordering with right subtree
273 if (right.min && right.min < lastKey) {
274 throw new Error(`entries are out of order; cid=${from}`);
275 }
276
277 found ||= right.found;
278
279 // Check depth ordering
280 if (left.depth !== undefined && left.depth >= thisDepth) {
281 throw new Error(`depths are out of order; cid=${from}`);
282 }
283
284 if (right.depth !== undefined && right.depth >= thisDepth) {
285 throw new Error(`depths are out of order; cid=${from}`);
286 }
287
288 // Update last key based on right subtree
289 lastKey = right.max ?? key;
290 }
291
292 // Check ordering with left subtree
293 if (left.max && firstKey && left.max > firstKey) {
294 throw new Error(`entries are out of order; cid=${from}`);
295 }
296
297 return {
298 found,
299 min: firstKey,
300 max: lastKey,
301 depth,
302 };
303};