at://Press
1import Database from "better-sqlite3";
2import { nanoid } from "nanoid";
3import type { BlogEntry, AtBlob } from "./pds";
4import { createPdsSession } from "./api";
5import { PDS_URL, DID, BLOG_COLLECTION } from "./constants";
6
7let db: Database.Database | null = null;
8
9function getDb(): Database.Database {
10 if (!db) {
11 const dbPath = process.env.DRAFTS_DB_PATH || "/data/drafts.db";
12 db = new Database(dbPath);
13 db.pragma("journal_mode = WAL");
14 db.exec(`
15 CREATE TABLE IF NOT EXISTS drafts (
16 rkey TEXT PRIMARY KEY,
17 title TEXT NOT NULL,
18 content TEXT NOT NULL,
19 created_at TEXT NOT NULL,
20 updated_at TEXT NOT NULL DEFAULT (datetime('now')),
21 blobs TEXT
22 )
23 `);
24 db.exec(`
25 CREATE TABLE IF NOT EXISTS migrations_applied (
26 name TEXT PRIMARY KEY,
27 applied_at TEXT NOT NULL DEFAULT (datetime('now'))
28 )
29 `);
30 }
31 return db;
32}
33
34export function generateRkey(): string {
35 return nanoid(13);
36}
37
38interface DraftRow {
39 rkey: string;
40 title: string;
41 content: string;
42 created_at: string;
43 updated_at: string;
44 blobs: string | null;
45}
46
47function rowToEntry(row: DraftRow): BlogEntry {
48 return {
49 uri: "",
50 cid: "",
51 rkey: row.rkey,
52 title: row.title,
53 content: row.content,
54 createdAt: row.created_at,
55 visibility: "author",
56 blobs: row.blobs ? JSON.parse(row.blobs) : undefined,
57 };
58}
59
60export function listDrafts(): BlogEntry[] {
61 const rows = getDb()
62 .prepare("SELECT * FROM drafts ORDER BY created_at DESC")
63 .all() as DraftRow[];
64 return rows.map(rowToEntry);
65}
66
67export function getDraft(rkey: string): BlogEntry | null {
68 const row = getDb()
69 .prepare("SELECT * FROM drafts WHERE rkey = ?")
70 .get(rkey) as DraftRow | undefined;
71 return row ? rowToEntry(row) : null;
72}
73
74export function saveDraft(draft: {
75 rkey?: string;
76 title: string;
77 content: string;
78 createdAt?: string;
79 blobs?: AtBlob[];
80}): string {
81 const rkey = draft.rkey || generateRkey();
82 const now = new Date().toISOString();
83 const blobsJson = draft.blobs?.length ? JSON.stringify(draft.blobs) : null;
84
85 getDb()
86 .prepare(`
87 INSERT INTO drafts (rkey, title, content, created_at, updated_at, blobs)
88 VALUES (?, ?, ?, ?, ?, ?)
89 ON CONFLICT(rkey) DO UPDATE SET
90 title = excluded.title,
91 content = excluded.content,
92 updated_at = excluded.updated_at,
93 blobs = excluded.blobs
94 `)
95 .run(rkey, draft.title, draft.content, draft.createdAt || now, now, blobsJson);
96
97 return rkey;
98}
99
100export function deleteDraft(rkey: string): boolean {
101 const result = getDb()
102 .prepare("DELETE FROM drafts WHERE rkey = ?")
103 .run(rkey);
104 return result.changes > 0;
105}
106
107// --- Migration ---
108
109function hasMigrationRun(name: string): boolean {
110 const row = getDb()
111 .prepare("SELECT 1 FROM migrations_applied WHERE name = ?")
112 .get(name);
113 return !!row;
114}
115
116function markMigrationRun(name: string): void {
117 getDb()
118 .prepare("INSERT OR IGNORE INTO migrations_applied (name) VALUES (?)")
119 .run(name);
120}
121
122export async function migratePdsDraftsToSqlite(): Promise<void> {
123 if (hasMigrationRun("pds-drafts-to-sqlite")) return;
124
125 console.log("[migration] Checking for PDS drafts to migrate...");
126
127 // Fetch drafts directly from PDS (not via getDraftEntries which now reads SQLite)
128 const pdsDrafts: Array<{ rkey: string; title: string; content: string; createdAt: string; blobs?: AtBlob[] }> = [];
129 let cursor: string | undefined;
130
131 try {
132 do {
133 const params = new URLSearchParams({
134 repo: DID,
135 collection: BLOG_COLLECTION,
136 limit: "100",
137 });
138 if (cursor) params.set("cursor", cursor);
139
140 const res = await fetch(
141 `${PDS_URL}/xrpc/com.atproto.repo.listRecords?${params}`
142 );
143 if (!res.ok) break;
144
145 const data = await res.json() as {
146 records: Array<{ uri: string; value: Record<string, unknown> }>;
147 cursor?: string;
148 };
149
150 for (const record of data.records) {
151 const val = record.value;
152 if (val.visibility !== "author") continue;
153
154 pdsDrafts.push({
155 rkey: record.uri.split("/").pop() || "",
156 title: (val.title as string) || "Untitled",
157 content: val.content as string,
158 createdAt: val.createdAt as string,
159 blobs: Array.isArray(val.blobs) ? val.blobs as AtBlob[] : undefined,
160 });
161 }
162
163 cursor = data.cursor;
164 } while (cursor);
165 } catch (err) {
166 console.error("[migration] Failed to fetch PDS drafts:", err);
167 }
168
169 if (pdsDrafts.length === 0) {
170 console.log("[migration] No PDS drafts found.");
171 markMigrationRun("pds-drafts-to-sqlite");
172 return;
173 }
174
175 console.log(`[migration] Found ${pdsDrafts.length} PDS drafts, migrating...`);
176
177 for (const draft of pdsDrafts) {
178 saveDraft({
179 rkey: draft.rkey,
180 title: draft.title,
181 content: draft.content,
182 createdAt: draft.createdAt,
183 blobs: draft.blobs,
184 });
185 }
186
187 // Delete migrated drafts from PDS
188 try {
189 const [accessJwt] = await createPdsSession();
190 if (accessJwt) {
191 for (const draft of pdsDrafts) {
192 const res = await fetch(
193 `${PDS_URL}/xrpc/com.atproto.repo.deleteRecord`,
194 {
195 method: "POST",
196 headers: {
197 "Content-Type": "application/json",
198 Authorization: `Bearer ${accessJwt}`,
199 },
200 body: JSON.stringify({
201 repo: DID,
202 collection: BLOG_COLLECTION,
203 rkey: draft.rkey,
204 }),
205 }
206 );
207 if (res.ok) {
208 console.log(`[migration] Deleted PDS draft: ${draft.rkey}`);
209 } else {
210 console.warn(`[migration] Failed to delete PDS draft ${draft.rkey}: ${res.status}`);
211 }
212 }
213 }
214 } catch (err) {
215 console.warn("[migration] Failed to delete PDS drafts (copied to SQLite):", err);
216 }
217
218 markMigrationRun("pds-drafts-to-sqlite");
219 console.log("[migration] PDS draft migration complete.");
220}
221
222// For testing: close and reset the database connection
223export function closeDb(): void {
224 if (db) {
225 db.close();
226 db = null;
227 }
228}