Cloudflare Durable Objects PDS Implementation Plan#
For Claude: REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
Goal: Build a minimal AT Protocol PDS on Cloudflare Durable Objects with zero dependencies.
Architecture: Each user gets their own Durable Object with SQLite storage. A router Worker maps DIDs to Objects. All crypto uses Web Crypto API (P-256 for signing, SHA-256 for hashing).
Tech Stack: Cloudflare Workers, Durable Objects, SQLite, Web Crypto API, no npm dependencies.
Task 1: Project Setup#
Files:
- Create:
package.json - Create:
wrangler.toml - Create:
src/pds.js
Step 1: Initialize package.json
{
"name": "cloudflare-pds",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "wrangler dev",
"deploy": "wrangler deploy",
"test": "node test/run.js"
}
}
Step 2: Create wrangler.toml
name = "atproto-pds"
main = "src/pds.js"
compatibility_date = "2024-01-01"
[[durable_objects.bindings]]
name = "PDS"
class_name = "PersonalDataServer"
[[migrations]]
tag = "v1"
new_sqlite_classes = ["PersonalDataServer"]
Step 3: Create minimal src/pds.js skeleton
export class PersonalDataServer {
constructor(state, env) {
this.state = state
this.sql = state.storage.sql
}
async fetch(request) {
return new Response('pds running', { status: 200 })
}
}
export default {
async fetch(request, env) {
const url = new URL(request.url)
const did = url.searchParams.get('did')
if (!did) {
return new Response('missing did param', { status: 400 })
}
const id = env.PDS.idFromName(did)
const pds = env.PDS.get(id)
return pds.fetch(request)
}
}
Step 4: Verify it runs
Run: npx wrangler dev
Test: curl "http://localhost:8787/?did=did:plc:test"
Expected: pds running
Step 5: Commit
git init
git add -A
git commit -m "feat: initial project setup with Durable Object skeleton"
Task 2: CBOR Encoding#
Files:
- Modify:
src/pds.js
Implement minimal deterministic CBOR encoding. Only the types AT Protocol uses: maps, arrays, strings, bytes, integers, null, booleans.
Step 1: Add CBOR encoding function
Add to top of src/pds.js:
// === CBOR ENCODING ===
// Minimal deterministic CBOR (RFC 8949) - sorted keys, minimal integers
function cborEncode(value) {
const parts = []
function encode(val) {
if (val === null) {
parts.push(0xf6) // null
} else if (val === true) {
parts.push(0xf5) // true
} else if (val === false) {
parts.push(0xf4) // false
} else if (typeof val === 'number') {
encodeInteger(val)
} else if (typeof val === 'string') {
const bytes = new TextEncoder().encode(val)
encodeHead(3, bytes.length) // major type 3 = text string
parts.push(...bytes)
} else if (val instanceof Uint8Array) {
encodeHead(2, val.length) // major type 2 = byte string
parts.push(...val)
} else if (Array.isArray(val)) {
encodeHead(4, val.length) // major type 4 = array
for (const item of val) encode(item)
} else if (typeof val === 'object') {
// Sort keys for deterministic encoding
const keys = Object.keys(val).sort()
encodeHead(5, keys.length) // major type 5 = map
for (const key of keys) {
encode(key)
encode(val[key])
}
}
}
function encodeHead(majorType, length) {
const mt = majorType << 5
if (length < 24) {
parts.push(mt | length)
} else if (length < 256) {
parts.push(mt | 24, length)
} else if (length < 65536) {
parts.push(mt | 25, length >> 8, length & 0xff)
} else if (length < 4294967296) {
parts.push(mt | 26, (length >> 24) & 0xff, (length >> 16) & 0xff, (length >> 8) & 0xff, length & 0xff)
}
}
function encodeInteger(n) {
if (n >= 0) {
encodeHead(0, n) // major type 0 = unsigned int
} else {
encodeHead(1, -n - 1) // major type 1 = negative int
}
}
encode(value)
return new Uint8Array(parts)
}
Step 2: Add simple test endpoint
Modify the fetch handler temporarily:
async fetch(request) {
const url = new URL(request.url)
if (url.pathname === '/test/cbor') {
const encoded = cborEncode({ hello: 'world', num: 42 })
return new Response(encoded, {
headers: { 'content-type': 'application/cbor' }
})
}
return new Response('pds running', { status: 200 })
}
Step 3: Verify CBOR output
Run: npx wrangler dev
Test: curl "http://localhost:8787/test/cbor?did=did:plc:test" | xxd
Expected: Valid CBOR bytes (a2 65 68 65 6c 6c 6f 65 77 6f 72 6c 64 63 6e 75 6d 18 2a)
Step 4: Commit
git add src/pds.js
git commit -m "feat: add deterministic CBOR encoding"
Task 3: CID Generation#
Files:
- Modify:
src/pds.js
Generate CIDs (Content Identifiers) using SHA-256 + multiformat encoding.
Step 1: Add CID utilities
Add after CBOR section:
// === CID GENERATION ===
// dag-cbor (0x71) + sha-256 (0x12) + 32 bytes
async function createCid(bytes) {
const hash = await crypto.subtle.digest('SHA-256', bytes)
const hashBytes = new Uint8Array(hash)
// CIDv1: version(1) + codec(dag-cbor=0x71) + multihash(sha256)
// Multihash: hash-type(0x12) + length(0x20=32) + digest
const cid = new Uint8Array(2 + 2 + 32)
cid[0] = 0x01 // CIDv1
cid[1] = 0x71 // dag-cbor codec
cid[2] = 0x12 // sha-256
cid[3] = 0x20 // 32 bytes
cid.set(hashBytes, 4)
return cid
}
function cidToString(cid) {
// base32lower encoding for CIDv1
return 'b' + base32Encode(cid)
}
function base32Encode(bytes) {
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
let result = ''
let bits = 0
let value = 0
for (const byte of bytes) {
value = (value << 8) | byte
bits += 8
while (bits >= 5) {
bits -= 5
result += alphabet[(value >> bits) & 31]
}
}
if (bits > 0) {
result += alphabet[(value << (5 - bits)) & 31]
}
return result
}
Step 2: Add test endpoint
if (url.pathname === '/test/cid') {
const data = cborEncode({ test: 'data' })
const cid = await createCid(data)
return Response.json({ cid: cidToString(cid) })
}
Step 3: Verify CID generation
Run: npx wrangler dev
Test: curl "http://localhost:8787/test/cid?did=did:plc:test"
Expected: JSON with CID string starting with 'b'
Step 4: Commit
git add src/pds.js
git commit -m "feat: add CID generation with SHA-256"
Task 4: TID Generation#
Files:
- Modify:
src/pds.js
Generate TIDs (Timestamp IDs) for record keys and revisions.
Step 1: Add TID utilities
Add after CID section:
// === TID GENERATION ===
// Timestamp-based IDs: base32-sort encoded microseconds + clock ID
const TID_CHARS = '234567abcdefghijklmnopqrstuvwxyz'
let lastTimestamp = 0
let clockId = Math.floor(Math.random() * 1024)
function createTid() {
let timestamp = Date.now() * 1000 // microseconds
// Ensure monotonic
if (timestamp <= lastTimestamp) {
timestamp = lastTimestamp + 1
}
lastTimestamp = timestamp
// 13 chars: 11 for timestamp (64 bits but only ~53 used), 2 for clock ID
let tid = ''
// Encode timestamp (high bits first for sortability)
let ts = timestamp
for (let i = 0; i < 11; i++) {
tid = TID_CHARS[ts & 31] + tid
ts = Math.floor(ts / 32)
}
// Append clock ID (2 chars)
tid += TID_CHARS[(clockId >> 5) & 31]
tid += TID_CHARS[clockId & 31]
return tid
}
Step 2: Add test endpoint
if (url.pathname === '/test/tid') {
const tids = [createTid(), createTid(), createTid()]
return Response.json({ tids })
}
Step 3: Verify TIDs are monotonic
Run: npx wrangler dev
Test: curl "http://localhost:8787/test/tid?did=did:plc:test"
Expected: Three 13-char TIDs, each greater than the previous
Step 4: Commit
git add src/pds.js
git commit -m "feat: add TID generation for record keys"
Task 5: SQLite Schema#
Files:
- Modify:
src/pds.js
Initialize the database schema when the Durable Object starts.
Step 1: Add schema initialization
Modify the constructor:
export class PersonalDataServer {
constructor(state, env) {
this.state = state
this.sql = state.storage.sql
this.env = env
// Initialize schema
this.sql.exec(`
CREATE TABLE IF NOT EXISTS blocks (
cid TEXT PRIMARY KEY,
data BLOB NOT NULL
);
CREATE TABLE IF NOT EXISTS records (
uri TEXT PRIMARY KEY,
cid TEXT NOT NULL,
collection TEXT NOT NULL,
rkey TEXT NOT NULL,
value BLOB NOT NULL
);
CREATE TABLE IF NOT EXISTS commits (
seq INTEGER PRIMARY KEY AUTOINCREMENT,
cid TEXT NOT NULL,
rev TEXT NOT NULL,
prev TEXT
);
CREATE TABLE IF NOT EXISTS seq_events (
seq INTEGER PRIMARY KEY AUTOINCREMENT,
did TEXT NOT NULL,
commit_cid TEXT NOT NULL,
evt BLOB NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_records_collection ON records(collection, rkey);
`)
}
// ... rest of class
}
Step 2: Add test endpoint to verify schema
if (url.pathname === '/test/schema') {
const tables = this.sql.exec(`
SELECT name FROM sqlite_master WHERE type='table' ORDER BY name
`).toArray()
return Response.json({ tables: tables.map(t => t.name) })
}
Step 3: Verify schema creates
Run: npx wrangler dev
Test: curl "http://localhost:8787/test/schema?did=did:plc:test"
Expected: {"tables":["blocks","commits","records","seq_events"]}
Step 4: Commit
git add src/pds.js
git commit -m "feat: add SQLite schema for PDS storage"
Task 6: P-256 Signing#
Files:
- Modify:
src/pds.js
Add P-256 ECDSA signing using Web Crypto API.
Step 1: Add signing utilities
Add after TID section:
// === P-256 SIGNING ===
// Web Crypto ECDSA with P-256 curve
async function importPrivateKey(privateKeyBytes) {
// PKCS#8 wrapper for raw P-256 private key
const pkcs8Prefix = new Uint8Array([
0x30, 0x41, 0x02, 0x01, 0x00, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, 0x48,
0xce, 0x3d, 0x02, 0x01, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03,
0x01, 0x07, 0x04, 0x27, 0x30, 0x25, 0x02, 0x01, 0x01, 0x04, 0x20
])
const pkcs8 = new Uint8Array(pkcs8Prefix.length + 32)
pkcs8.set(pkcs8Prefix)
pkcs8.set(privateKeyBytes, pkcs8Prefix.length)
return crypto.subtle.importKey(
'pkcs8',
pkcs8,
{ name: 'ECDSA', namedCurve: 'P-256' },
false,
['sign']
)
}
async function sign(privateKey, data) {
const signature = await crypto.subtle.sign(
{ name: 'ECDSA', hash: 'SHA-256' },
privateKey,
data
)
return new Uint8Array(signature)
}
async function generateKeyPair() {
const keyPair = await crypto.subtle.generateKey(
{ name: 'ECDSA', namedCurve: 'P-256' },
true,
['sign', 'verify']
)
// Export private key as raw bytes
const privateJwk = await crypto.subtle.exportKey('jwk', keyPair.privateKey)
const privateBytes = base64UrlDecode(privateJwk.d)
// Export public key as compressed point
const publicRaw = await crypto.subtle.exportKey('raw', keyPair.publicKey)
const publicBytes = new Uint8Array(publicRaw)
const compressed = compressPublicKey(publicBytes)
return { privateKey: privateBytes, publicKey: compressed }
}
function compressPublicKey(uncompressed) {
// uncompressed is 65 bytes: 0x04 + x(32) + y(32)
// compressed is 33 bytes: prefix(02 or 03) + x(32)
const x = uncompressed.slice(1, 33)
const y = uncompressed.slice(33, 65)
const prefix = (y[31] & 1) === 0 ? 0x02 : 0x03
const compressed = new Uint8Array(33)
compressed[0] = prefix
compressed.set(x, 1)
return compressed
}
function base64UrlDecode(str) {
const base64 = str.replace(/-/g, '+').replace(/_/g, '/')
const binary = atob(base64)
const bytes = new Uint8Array(binary.length)
for (let i = 0; i < binary.length; i++) {
bytes[i] = binary.charCodeAt(i)
}
return bytes
}
function bytesToHex(bytes) {
return Array.from(bytes).map(b => b.toString(16).padStart(2, '0')).join('')
}
function hexToBytes(hex) {
const bytes = new Uint8Array(hex.length / 2)
for (let i = 0; i < hex.length; i += 2) {
bytes[i / 2] = parseInt(hex.substr(i, 2), 16)
}
return bytes
}
Step 2: Add test endpoint
if (url.pathname === '/test/sign') {
const kp = await generateKeyPair()
const data = new TextEncoder().encode('test message')
const key = await importPrivateKey(kp.privateKey)
const sig = await sign(key, data)
return Response.json({
publicKey: bytesToHex(kp.publicKey),
signature: bytesToHex(sig)
})
}
Step 3: Verify signing works
Run: npx wrangler dev
Test: curl "http://localhost:8787/test/sign?did=did:plc:test"
Expected: JSON with 66-char public key hex and 128-char signature hex
Step 4: Commit
git add src/pds.js
git commit -m "feat: add P-256 ECDSA signing via Web Crypto"
Task 7: Identity Storage#
Files:
- Modify:
src/pds.js
Store DID and signing key in Durable Object storage.
Step 1: Add identity methods to class
Add to PersonalDataServer class:
async initIdentity(did, privateKeyHex) {
await this.state.storage.put('did', did)
await this.state.storage.put('privateKey', privateKeyHex)
}
async getDid() {
if (!this._did) {
this._did = await this.state.storage.get('did')
}
return this._did
}
async getSigningKey() {
const hex = await this.state.storage.get('privateKey')
if (!hex) return null
return importPrivateKey(hexToBytes(hex))
}
Step 2: Add init endpoint
if (url.pathname === '/init') {
const body = await request.json()
if (!body.did || !body.privateKey) {
return Response.json({ error: 'missing did or privateKey' }, { status: 400 })
}
await this.initIdentity(body.did, body.privateKey)
return Response.json({ ok: true, did: body.did })
}
Step 3: Add status endpoint
if (url.pathname === '/status') {
const did = await this.getDid()
return Response.json({
initialized: !!did,
did: did || null
})
}
Step 4: Verify identity storage
Run: npx wrangler dev
# Check uninitialized
curl "http://localhost:8787/status?did=did:plc:test"
# Expected: {"initialized":false,"did":null}
# Initialize
curl -X POST "http://localhost:8787/init?did=did:plc:test" \
-H "Content-Type: application/json" \
-d '{"did":"did:plc:test","privateKey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
# Expected: {"ok":true,"did":"did:plc:test"}
# Check initialized
curl "http://localhost:8787/status?did=did:plc:test"
# Expected: {"initialized":true,"did":"did:plc:test"}
Step 5: Commit
git add src/pds.js
git commit -m "feat: add identity storage and init endpoint"
Task 8: MST (Merkle Search Tree)#
Files:
- Modify:
src/pds.js
Implement simple MST that rebuilds on each write.
Step 1: Add MST utilities
Add after signing section:
// === MERKLE SEARCH TREE ===
// Simple rebuild-on-write implementation
async function sha256(data) {
const hash = await crypto.subtle.digest('SHA-256', data)
return new Uint8Array(hash)
}
function getKeyDepth(key) {
// Count leading zeros in hash to determine tree depth
const keyBytes = new TextEncoder().encode(key)
// Sync hash for depth calculation (use first bytes of key as proxy)
let zeros = 0
for (const byte of keyBytes) {
if (byte === 0) zeros += 8
else {
for (let i = 7; i >= 0; i--) {
if ((byte >> i) & 1) break
zeros++
}
break
}
}
return Math.floor(zeros / 4)
}
class MST {
constructor(sql) {
this.sql = sql
}
async computeRoot() {
const records = this.sql.exec(`
SELECT collection, rkey, cid FROM records ORDER BY collection, rkey
`).toArray()
if (records.length === 0) {
return null
}
const entries = records.map(r => ({
key: `${r.collection}/${r.rkey}`,
cid: r.cid
}))
return this.buildTree(entries, 0)
}
async buildTree(entries, depth) {
if (entries.length === 0) return null
const node = { l: null, e: [] }
let leftEntries = []
for (const entry of entries) {
const keyDepth = getKeyDepth(entry.key)
if (keyDepth > depth) {
leftEntries.push(entry)
} else {
// Store accumulated left entries
if (leftEntries.length > 0) {
const leftCid = await this.buildTree(leftEntries, depth + 1)
if (node.e.length === 0) {
node.l = leftCid
} else {
node.e[node.e.length - 1].t = leftCid
}
leftEntries = []
}
node.e.push({ k: entry.key, v: entry.cid, t: null })
}
}
// Handle remaining left entries
if (leftEntries.length > 0) {
const leftCid = await this.buildTree(leftEntries, depth + 1)
if (node.e.length > 0) {
node.e[node.e.length - 1].t = leftCid
} else {
node.l = leftCid
}
}
// Encode and store node
const nodeBytes = cborEncode(node)
const nodeCid = await createCid(nodeBytes)
const cidStr = cidToString(nodeCid)
this.sql.exec(
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
cidStr,
nodeBytes
)
return cidStr
}
}
Step 2: Add MST test endpoint
if (url.pathname === '/test/mst') {
// Insert some test records
this.sql.exec(`INSERT OR REPLACE INTO records VALUES (?, ?, ?, ?, ?)`,
'at://did:plc:test/app.bsky.feed.post/abc', 'cid1', 'app.bsky.feed.post', 'abc', new Uint8Array([1]))
this.sql.exec(`INSERT OR REPLACE INTO records VALUES (?, ?, ?, ?, ?)`,
'at://did:plc:test/app.bsky.feed.post/def', 'cid2', 'app.bsky.feed.post', 'def', new Uint8Array([2]))
const mst = new MST(this.sql)
const root = await mst.computeRoot()
return Response.json({ root })
}
Step 3: Verify MST builds
Run: npx wrangler dev
Test: curl "http://localhost:8787/test/mst?did=did:plc:test"
Expected: JSON with root CID string
Step 4: Commit
git add src/pds.js
git commit -m "feat: add Merkle Search Tree implementation"
Task 9: createRecord Endpoint#
Files:
- Modify:
src/pds.js
Implement the core write path.
Step 1: Add createRecord method
Add to PersonalDataServer class:
async createRecord(collection, record, rkey = null) {
const did = await this.getDid()
if (!did) throw new Error('PDS not initialized')
rkey = rkey || createTid()
const uri = `at://${did}/${collection}/${rkey}`
// Encode and hash record
const recordBytes = cborEncode(record)
const recordCid = await createCid(recordBytes)
const recordCidStr = cidToString(recordCid)
// Store block
this.sql.exec(
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
recordCidStr, recordBytes
)
// Store record index
this.sql.exec(
`INSERT OR REPLACE INTO records (uri, cid, collection, rkey, value) VALUES (?, ?, ?, ?, ?)`,
uri, recordCidStr, collection, rkey, recordBytes
)
// Rebuild MST
const mst = new MST(this.sql)
const dataRoot = await mst.computeRoot()
// Get previous commit
const prevCommit = this.sql.exec(
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
).one()
// Create commit
const rev = createTid()
const commit = {
did,
version: 3,
data: dataRoot,
rev,
prev: prevCommit?.cid || null
}
// Sign commit
const commitBytes = cborEncode(commit)
const signingKey = await this.getSigningKey()
const sig = await sign(signingKey, commitBytes)
const signedCommit = { ...commit, sig }
const signedBytes = cborEncode(signedCommit)
const commitCid = await createCid(signedBytes)
const commitCidStr = cidToString(commitCid)
// Store commit block
this.sql.exec(
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
commitCidStr, signedBytes
)
// Store commit reference
this.sql.exec(
`INSERT INTO commits (cid, rev, prev) VALUES (?, ?, ?)`,
commitCidStr, rev, prevCommit?.cid || null
)
// Sequence event
const evt = cborEncode({
ops: [{ action: 'create', path: `${collection}/${rkey}`, cid: recordCidStr }]
})
this.sql.exec(
`INSERT INTO seq_events (did, commit_cid, evt) VALUES (?, ?, ?)`,
did, commitCidStr, evt
)
return { uri, cid: recordCidStr, commit: commitCidStr }
}
Step 2: Add XRPC endpoint
if (url.pathname === '/xrpc/com.atproto.repo.createRecord') {
if (request.method !== 'POST') {
return Response.json({ error: 'method not allowed' }, { status: 405 })
}
const body = await request.json()
if (!body.collection || !body.record) {
return Response.json({ error: 'missing collection or record' }, { status: 400 })
}
try {
const result = await this.createRecord(body.collection, body.record, body.rkey)
return Response.json(result)
} catch (err) {
return Response.json({ error: err.message }, { status: 500 })
}
}
Step 3: Verify createRecord works
Run: npx wrangler dev
# First initialize
curl -X POST "http://localhost:8787/init?did=did:plc:test123" \
-H "Content-Type: application/json" \
-d '{"did":"did:plc:test123","privateKey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
# Create a post
curl -X POST "http://localhost:8787/xrpc/com.atproto.repo.createRecord?did=did:plc:test123" \
-H "Content-Type: application/json" \
-d '{"collection":"app.bsky.feed.post","record":{"text":"Hello world!","createdAt":"2026-01-04T00:00:00Z"}}'
Expected: JSON with uri, cid, and commit fields
Step 4: Commit
git add src/pds.js
git commit -m "feat: add createRecord endpoint"
Task 10: getRecord Endpoint#
Files:
- Modify:
src/pds.js
Step 1: Add XRPC endpoint
if (url.pathname === '/xrpc/com.atproto.repo.getRecord') {
const collection = url.searchParams.get('collection')
const rkey = url.searchParams.get('rkey')
if (!collection || !rkey) {
return Response.json({ error: 'missing collection or rkey' }, { status: 400 })
}
const did = await this.getDid()
const uri = `at://${did}/${collection}/${rkey}`
const row = this.sql.exec(
`SELECT cid, value FROM records WHERE uri = ?`, uri
).one()
if (!row) {
return Response.json({ error: 'record not found' }, { status: 404 })
}
// Decode CBOR for response (minimal decoder)
const value = cborDecode(row.value)
return Response.json({ uri, cid: row.cid, value })
}
Step 2: Add minimal CBOR decoder
Add after cborEncode:
function cborDecode(bytes) {
let offset = 0
function read() {
const initial = bytes[offset++]
const major = initial >> 5
const info = initial & 0x1f
let length = info
if (info === 24) length = bytes[offset++]
else if (info === 25) { length = (bytes[offset++] << 8) | bytes[offset++] }
else if (info === 26) {
length = (bytes[offset++] << 24) | (bytes[offset++] << 16) | (bytes[offset++] << 8) | bytes[offset++]
}
switch (major) {
case 0: return length // unsigned int
case 1: return -1 - length // negative int
case 2: { // byte string
const data = bytes.slice(offset, offset + length)
offset += length
return data
}
case 3: { // text string
const data = new TextDecoder().decode(bytes.slice(offset, offset + length))
offset += length
return data
}
case 4: { // array
const arr = []
for (let i = 0; i < length; i++) arr.push(read())
return arr
}
case 5: { // map
const obj = {}
for (let i = 0; i < length; i++) {
const key = read()
obj[key] = read()
}
return obj
}
case 7: { // special
if (info === 20) return false
if (info === 21) return true
if (info === 22) return null
return undefined
}
}
}
return read()
}
Step 3: Verify getRecord works
Run: npx wrangler dev
# Create a record first, then get it
curl "http://localhost:8787/xrpc/com.atproto.repo.getRecord?did=did:plc:test123&collection=app.bsky.feed.post&rkey=<rkey_from_create>"
Expected: JSON with uri, cid, and value (the original record)
Step 4: Commit
git add src/pds.js
git commit -m "feat: add getRecord endpoint with CBOR decoder"
Task 11: CAR File Builder#
Files:
- Modify:
src/pds.js
Build CAR (Content Addressable aRchive) files for repo export.
Step 1: Add CAR builder
Add after MST section:
// === CAR FILE BUILDER ===
function varint(n) {
const bytes = []
while (n >= 0x80) {
bytes.push((n & 0x7f) | 0x80)
n >>>= 7
}
bytes.push(n)
return new Uint8Array(bytes)
}
function cidToBytes(cidStr) {
// Decode base32lower CID string to bytes
if (!cidStr.startsWith('b')) throw new Error('expected base32lower CID')
return base32Decode(cidStr.slice(1))
}
function base32Decode(str) {
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
let bits = 0
let value = 0
const output = []
for (const char of str) {
const idx = alphabet.indexOf(char)
if (idx === -1) continue
value = (value << 5) | idx
bits += 5
if (bits >= 8) {
bits -= 8
output.push((value >> bits) & 0xff)
}
}
return new Uint8Array(output)
}
function buildCarFile(rootCid, blocks) {
const parts = []
// Header: { version: 1, roots: [rootCid] }
const rootCidBytes = cidToBytes(rootCid)
const header = cborEncode({ version: 1, roots: [rootCidBytes] })
parts.push(varint(header.length))
parts.push(header)
// Blocks: varint(len) + cid + data
for (const block of blocks) {
const cidBytes = cidToBytes(block.cid)
const blockLen = cidBytes.length + block.data.length
parts.push(varint(blockLen))
parts.push(cidBytes)
parts.push(block.data)
}
// Concatenate all parts
const totalLen = parts.reduce((sum, p) => sum + p.length, 0)
const car = new Uint8Array(totalLen)
let offset = 0
for (const part of parts) {
car.set(part, offset)
offset += part.length
}
return car
}
Step 2: Commit
git add src/pds.js
git commit -m "feat: add CAR file builder"
Task 12: getRepo Endpoint#
Files:
- Modify:
src/pds.js
Step 1: Add XRPC endpoint
if (url.pathname === '/xrpc/com.atproto.sync.getRepo') {
const commit = this.sql.exec(
`SELECT cid FROM commits ORDER BY seq DESC LIMIT 1`
).one()
if (!commit) {
return Response.json({ error: 'repo not found' }, { status: 404 })
}
const blocks = this.sql.exec(`SELECT cid, data FROM blocks`).toArray()
const car = buildCarFile(commit.cid, blocks)
return new Response(car, {
headers: { 'content-type': 'application/vnd.ipld.car' }
})
}
Step 2: Verify getRepo works
Run: npx wrangler dev
curl "http://localhost:8787/xrpc/com.atproto.sync.getRepo?did=did:plc:test123" -o repo.car
xxd repo.car | head -20
Expected: Binary CAR file starting with CBOR header
Step 3: Commit
git add src/pds.js
git commit -m "feat: add getRepo endpoint returning CAR file"
Task 13: subscribeRepos WebSocket#
Files:
- Modify:
src/pds.js
Step 1: Add WebSocket endpoint
if (url.pathname === '/xrpc/com.atproto.sync.subscribeRepos') {
const upgradeHeader = request.headers.get('Upgrade')
if (upgradeHeader !== 'websocket') {
return new Response('expected websocket', { status: 426 })
}
const { 0: client, 1: server } = new WebSocketPair()
this.state.acceptWebSocket(server)
// Send backlog if cursor provided
const cursor = url.searchParams.get('cursor')
if (cursor) {
const events = this.sql.exec(
`SELECT * FROM seq_events WHERE seq > ? ORDER BY seq`,
parseInt(cursor)
).toArray()
for (const evt of events) {
server.send(this.formatEvent(evt))
}
}
return new Response(null, { status: 101, webSocket: client })
}
Step 2: Add event formatting and WebSocket handlers
Add to PersonalDataServer class:
formatEvent(evt) {
const did = this.sql.exec(`SELECT did FROM seq_events WHERE seq = ?`, evt.seq).one()?.did
// AT Protocol frame format: header + body
const header = cborEncode({ op: 1, t: '#commit' })
const body = cborEncode({
seq: evt.seq,
rebase: false,
tooBig: false,
repo: did || evt.did,
commit: cidToBytes(evt.commit_cid),
rev: createTid(),
since: null,
blocks: new Uint8Array(0), // Simplified - real impl includes CAR slice
ops: cborDecode(evt.evt).ops,
blobs: [],
time: new Date().toISOString()
})
// Concatenate header + body
const frame = new Uint8Array(header.length + body.length)
frame.set(header)
frame.set(body, header.length)
return frame
}
async webSocketMessage(ws, message) {
// Handle ping
if (message === 'ping') ws.send('pong')
}
async webSocketClose(ws, code, reason) {
// Durable Object will hibernate when no connections remain
}
broadcastEvent(evt) {
const frame = this.formatEvent(evt)
for (const ws of this.state.getWebSockets()) {
try {
ws.send(frame)
} catch (e) {
// Client disconnected
}
}
}
Step 3: Update createRecord to broadcast
Add at end of createRecord method, before return:
// Broadcast to subscribers
const evtRow = this.sql.exec(
`SELECT * FROM seq_events ORDER BY seq DESC LIMIT 1`
).one()
if (evtRow) {
this.broadcastEvent(evtRow)
}
Step 4: Verify WebSocket works
Run: npx wrangler dev
Use websocat or similar:
websocat "ws://localhost:8787/xrpc/com.atproto.sync.subscribeRepos?did=did:plc:test123"
In another terminal, create a record — you should see bytes appear in the WebSocket connection.
Step 5: Commit
git add src/pds.js
git commit -m "feat: add subscribeRepos WebSocket endpoint"
Task 14: Clean Up Test Endpoints#
Files:
- Modify:
src/pds.js
Step 1: Remove test endpoints
Remove all /test/* endpoint handlers from the fetch method. Keep only:
/init/status/xrpc/com.atproto.repo.createRecord/xrpc/com.atproto.repo.getRecord/xrpc/com.atproto.sync.getRepo/xrpc/com.atproto.sync.subscribeRepos
Step 2: Add proper 404 handler
return Response.json({ error: 'not found' }, { status: 404 })
Step 3: Commit
git add src/pds.js
git commit -m "chore: remove test endpoints, clean up routing"
Task 15: Deploy and Test#
Step 1: Deploy to Cloudflare
npx wrangler deploy
Step 2: Initialize with a real DID
Generate a P-256 keypair and create a did:plc (or use existing).
# Example initialization
curl -X POST "https://atproto-pds.<your-subdomain>.workers.dev/init?did=did:plc:yourActualDid" \
-H "Content-Type: application/json" \
-d '{"did":"did:plc:yourActualDid","privateKey":"your64CharHexPrivateKey"}'
Step 3: Create a test post
curl -X POST "https://atproto-pds.<your-subdomain>.workers.dev/xrpc/com.atproto.repo.createRecord?did=did:plc:yourActualDid" \
-H "Content-Type: application/json" \
-d '{"collection":"app.bsky.feed.post","record":{"$type":"app.bsky.feed.post","text":"Hello from Cloudflare PDS!","createdAt":"2026-01-04T12:00:00.000Z"}}'
Step 4: Verify repo is accessible
curl "https://atproto-pds.<your-subdomain>.workers.dev/xrpc/com.atproto.sync.getRepo?did=did:plc:yourActualDid" -o test.car
Step 5: Commit deployment config if needed
git add -A
git commit -m "chore: ready for deployment"
Summary#
Total Lines: ~400 in single file Dependencies: Zero Endpoints: 4 XRPC + 2 internal
What works:
- Create records with proper CIDs
- MST for repo structure
- P-256 signed commits
- CAR file export for relays
- WebSocket streaming for real-time sync
What's next (future tasks):
- Incremental MST updates
- OAuth/JWT authentication
- Blob storage (R2)
- Handle resolution
- DID:PLC registration helper