A CLI for publishing standard.site documents to ATProto sequoia.pub
standard site lexicon cli publishing

Compare changes

Choose any two refs to compare.

+703 -126
+69
CHANGELOG.md
··· 1 + ## [0.2.0] - 2026-02-01 2 + 3 + ### ๐Ÿš€ Features 4 + 5 + - Added bskyPostRef 6 + - Added draft field to frontmatter config 7 + 8 + ### โš™๏ธ Miscellaneous Tasks 9 + 10 + - Update blog post 11 + - Fix blog build error 12 + - Adjust blog post 13 + - Updated docs 14 + - Version bump 15 + ## [0.1.1] - 2026-01-31 16 + 17 + ### ๐Ÿ› Bug Fixes 18 + 19 + - Fix tangled url to repo 20 + 21 + ### โš™๏ธ Miscellaneous Tasks 22 + 23 + - Merge branch 'main' into feat/blog-post 24 + - Updated blog post 25 + - Updated date 26 + - Added publishing 27 + - Spelling and grammar 28 + - Updated package scripts 29 + - Refactored codebase to use node and fs instead of bun 30 + - Version bump 31 + ## [0.1.0] - 2026-01-30 32 + 33 + ### ๐Ÿš€ Features 34 + 35 + - Init 36 + - Added blog post 37 + 38 + ### โš™๏ธ Miscellaneous Tasks 39 + 40 + - Updated package.json 41 + - Cleaned up commands and libs 42 + - Updated init commands 43 + - Updated greeting 44 + - Updated readme 45 + - Link updates 46 + - Version bump 47 + - Added hugo support through frontmatter parsing 48 + - Version bump 49 + - Updated docs 50 + - Adapted inject.ts pattern 51 + - Updated docs 52 + - Version bump" 53 + - Updated package scripts 54 + - Updated scripts 55 + - Added ignore field to config 56 + - Udpate docs 57 + - Version bump 58 + - Added tags to flow 59 + - Added ability to exit during init flow 60 + - Version bump 61 + - Updated docs 62 + - Updated links 63 + - Updated docs 64 + - Initial refactor 65 + - Checkpoint 66 + - Refactored mapping 67 + - Docs updates 68 + - Docs updates 69 + - Version bump
+38 -10
bun.lock
··· 24 24 }, 25 25 "packages/cli": { 26 26 "name": "sequoia-cli", 27 - "version": "0.0.6", 27 + "version": "0.1.0", 28 28 "bin": { 29 - "sequoia": "dist/sequoia", 29 + "sequoia": "dist/index.js", 30 30 }, 31 31 "dependencies": { 32 32 "@atproto/api": "^0.18.17", 33 33 "@clack/prompts": "^1.0.0", 34 34 "cmd-ts": "^0.14.3", 35 + "glob": "^13.0.0", 36 + "mime-types": "^2.1.35", 37 + "minimatch": "^10.1.1", 35 38 }, 36 39 "devDependencies": { 37 - "@types/bun": "latest", 40 + "@types/mime-types": "^3.0.1", 41 + "@types/node": "^20", 38 42 }, 39 43 "peerDependencies": { 40 44 "typescript": "^5", ··· 188 192 189 193 "@iconify/utils": ["@iconify/utils@3.1.0", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", "mlly": "^1.8.0" } }, "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw=="], 190 194 195 + "@isaacs/balanced-match": ["@isaacs/balanced-match@4.0.1", "", {}, "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ=="], 196 + 197 + "@isaacs/brace-expansion": ["@isaacs/brace-expansion@5.0.0", "", { "dependencies": { "@isaacs/balanced-match": "^4.0.1" } }, "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA=="], 198 + 191 199 "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="], 192 200 193 201 "@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="], ··· 524 532 525 533 "@types/mdx": ["@types/mdx@2.0.13", "", {}, "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw=="], 526 534 535 + "@types/mime-types": ["@types/mime-types@3.0.1", "", {}, "sha512-xRMsfuQbnRq1Ef+C+RKaENOxXX87Ygl38W1vDfPHRku02TgQr+Qd8iivLtAMcR0KF5/29xlnFihkTlbqFrGOVQ=="], 536 + 527 537 "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], 528 538 529 - "@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], 539 + "@types/node": ["@types/node@20.19.30", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-WJtwWJu7UdlvzEAUm484QNg5eAoq5QR08KDNx7g45Usrs2NtOPiX8ugDqmKdXkyL03rBqU5dYNYVQetEpBHq2g=="], 530 540 531 541 "@types/react": ["@types/react@19.2.10", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-WPigyYuGhgZ/cTPRXB2EwUw+XvsRA3GqHlsP4qteqrnnjDrApbS7MxcGr/hke5iUoeB7E/gQtrs9I37zAJ0Vjw=="], 532 542 ··· 834 844 835 845 "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], 836 846 847 + "glob": ["glob@13.0.0", "", { "dependencies": { "minimatch": "^10.1.1", "minipass": "^7.1.2", "path-scurry": "^2.0.0" } }, "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA=="], 848 + 837 849 "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], 838 850 839 851 "hachure-fill": ["hachure-fill@0.5.2", "", {}, "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg=="], ··· 1094 1106 1095 1107 "mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="], 1096 1108 1097 - "mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], 1109 + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], 1110 + 1111 + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], 1098 1112 1099 1113 "mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="], 1100 1114 1101 1115 "mini-svg-data-uri": ["mini-svg-data-uri@1.4.4", "", { "bin": { "mini-svg-data-uri": "cli.js" } }, "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg=="], 1102 1116 1117 + "minimatch": ["minimatch@10.1.1", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ=="], 1118 + 1119 + "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], 1120 + 1103 1121 "minisearch": ["minisearch@7.2.0", "", {}, "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg=="], 1104 1122 1105 1123 "mlly": ["mlly@1.8.0", "", { "dependencies": { "acorn": "^8.15.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "ufo": "^1.6.1" } }, "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g=="], ··· 1149 1167 "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], 1150 1168 1151 1169 "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], 1170 + 1171 + "path-scurry": ["path-scurry@2.0.1", "", { "dependencies": { "lru-cache": "^11.0.0", "minipass": "^7.1.2" } }, "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA=="], 1152 1172 1153 1173 "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], 1154 1174 ··· 1336 1356 1337 1357 "uint8arrays": ["uint8arrays@3.0.0", "", { "dependencies": { "multiformats": "^9.4.2" } }, "sha512-HRCx0q6O9Bfbp+HHSfQQKD7wU70+lydKVt4EghkdOvlK/NlrF90z+eXV34mUd48rNvVJXwkrMSPpCATkct8fJA=="], 1338 1358 1339 - "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 1359 + "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], 1340 1360 1341 1361 "unicode-segmenter": ["unicode-segmenter@0.14.5", "", {}, "sha512-jHGmj2LUuqDcX3hqY12Ql+uhUTn8huuxNZGq7GvtF6bSybzH3aFgedYu/KTzQStEgt1Ra2F3HxadNXsNjb3m3g=="], 1342 1362 ··· 1442 1462 1443 1463 "@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], 1444 1464 1465 + "bun-types/@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], 1466 + 1445 1467 "chevrotain/lodash-es": ["lodash-es@4.17.21", "", {}, "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw=="], 1468 + 1469 + "compressible/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], 1446 1470 1447 1471 "compression/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], 1448 1472 ··· 1456 1480 1457 1481 "d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="], 1458 1482 1483 + "eval/@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], 1484 + 1459 1485 "hast-util-from-dom/hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w=="], 1460 1486 1461 1487 "hast-util-from-parse5/hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w=="], ··· 1474 1500 1475 1501 "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], 1476 1502 1503 + "path-scurry/lru-cache": ["lru-cache@11.2.5", "", {}, "sha512-vFrFJkWtJvJnD5hg+hJvVE8Lh/TcMzKnTgCWmtBipwI5yLX/iX+5UB2tfuyODF5E7k9xEzMdYgGqaSb1c0c5Yw=="], 1504 + 1477 1505 "radix-ui/@radix-ui/react-label": ["@radix-ui/react-label@2.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ=="], 1478 1506 1479 1507 "rollup/fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], 1480 1508 1481 1509 "send/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], 1482 - 1483 - "sequoia-cli/@types/bun": ["@types/bun@1.3.8", "", { "dependencies": { "bun-types": "1.3.8" } }, "sha512-3LvWJ2q5GerAXYxO2mffLTqOzEu5qnhEAlh48Vnu8WQfnmSwbgagjGZV6BoHKJztENYEDn6QmVd949W4uESRJA=="], 1484 1510 1485 1511 "vite/fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], 1486 1512 ··· 1488 1514 1489 1515 "@shikijs/twoslash/twoslash/twoslash-protocol": ["twoslash-protocol@0.2.12", "", {}, "sha512-5qZLXVYfZ9ABdjqbvPc4RWMr7PrpPaaDSeaYY55vl/w1j6H6kzsWK/urAEIXlzYlyrFmyz1UbwIt+AA0ck+wbg=="], 1490 1516 1517 + "bun-types/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 1518 + 1491 1519 "compression/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], 1492 1520 1493 1521 "create-vocs/@clack/prompts/@clack/core": ["@clack/core@0.3.5", "", { "dependencies": { "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, "sha512-5cfhQNH+1VQ2xLQlmzXMqUoiaH0lRBq9/CLW9lTyMbuKLC3+xEK01tHVvyut++mLOn5urSHmkm6I0Lg9MaJSTQ=="], ··· 1498 1526 1499 1527 "d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="], 1500 1528 1529 + "eval/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 1530 + 1501 1531 "hast-util-from-dom/hastscript/property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="], 1502 1532 1503 1533 "p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], 1504 1534 1505 1535 "send/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], 1506 - 1507 - "sequoia-cli/@types/bun/bun-types": ["bun-types@1.3.8", "", { "dependencies": { "@types/node": "*" } }, "sha512-fL99nxdOWvV4LqjmC+8Q9kW3M4QTtTR1eePs94v5ctGqU8OeceWrSUaRw3JYb7tU3FkMIAjkueehrHPPPGKi5Q=="], 1508 1536 } 1509 1537 }
+2
docs/.gitignore
··· 1 1 .wrangler 2 + 3 + .sequoia-state.json
+7
docs/docs/pages/blog/index.mdx
··· 1 + --- 2 + layout: minimal 3 + --- 4 + 5 + # Blog 6 + 7 + ::blog-posts
+54
docs/docs/pages/blog/introducing-sequoia.mdx
··· 1 + --- 2 + layout: minimal 3 + title: "Introducing Sequoia: Publishing for the Open Web" 4 + date: 2026-01-30 5 + atUri: "at://did:plc:kq6bvkw4sxof3vdinuitehn5/site.standard.document/3mdnztyhoem2v" 6 + --- 7 + 8 + # Introducing Sequoia: Publishing for the Open Web 9 + 10 + ![hero](/hero.png) 11 + 12 + Today I'm excited to release a new tool for the [AT Protocol](https://atproto.com): Sequoia. This is a CLI tool that can take your existing self-hosted blog and publish it to the ATmosphere using [Standard.site](https://standard.site) lexicons. 13 + 14 + If you haven't explored ATProto you can find a primer [here](https://stevedylan.dev/posts/atproto-starter/), but in short, it's a new way to publish content to the web that puts ownership and control back in the hands of users. Blogs in some ways have already been doing this, but they've been missing a key piece: distribution. One of the unique features of ATProto is [lexicons](), which are schemas that apps build to create folders of content on a user's personal data server. The domain verified nature lets them be indexed and aggregated with ease. Outside of apps, lexicons can be extended by community members to build a common standard. That's exactly how [Standard.site](https://standard.site) was brought about, pushing a new way for standardizing publications and documents on ATProto. 15 + 16 + The founders and platforms behind the standard, [leaflet.pub](https://leaflet.pub), [pckt.blog](https://pckt.blog), and [offprint.app](https://offprint.app), all serve to make creating and sharing blogs easy. If you are not a technical person and don't have a blog already, I would highly recommend checking all of them out! However, for those of us who already have blogs, there was a need for a tool that could make it easy to publish existing and new content with this new standard. Thus Sequoia was born. 17 + 18 + Sequoia is a relatively simple CLI that can do the following: 19 + - Authenticate with your ATProto handle 20 + - Configure your blog through an interactive setup process 21 + - Create publication and document records on your PDS 22 + - Add necessary verification pieces to your site 23 + - Sync with existing records on your PDS 24 + 25 + It's designed to be run inside your existing repo, build a one-time config, and then be part of your regular workflow by publishing content or updating existing content, all following the Standard.site lexicons. The best part? It's designed to be fully interoperable. It doesn't matter if you're using Astro, 11ty, Hugo, Svelte, Next, Gatsby, Zola, you name it. If it's a static blog with markdown, Sequoia will work (and if for some reason it doesn't, [open an issue!](https://tangled.org/stevedylan.dev/sequoia/issues/new)). Here's a quick demo of Sequoia in action: 26 + 27 + <iframe 28 + class="w-full" 29 + style={{aspectRatio: "16/9"}} 30 + src="https://www.youtube.com/embed/sxursUHq5kw" 31 + title="YouTube video player" 32 + frameborder="0" 33 + allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" 34 + referrerpolicy="strict-origin-when-cross-origin" 35 + allowfullscreen 36 + ></iframe> 37 + 38 + ATProto has proven to be one of the more exciting pieces of technology that has surfaced in the past few years, and it gives some of us hope for a web that is open once more. No more walled gardens, full control of our data, and connected through lexicons. 39 + 40 + Install Sequoia today and check out the [quickstart guide](/quickstart) to publish your content into the ATmosphere ๐ŸŒณ 41 + 42 + :::code-group 43 + ```bash [npm] 44 + npm i -g sequoia-cli 45 + ``` 46 + 47 + ```bash [pnpm] 48 + pnpm i -g sequoia-cli 49 + ``` 50 + 51 + ```bash [bun] 52 + bun i -g sequoia-cli 53 + ``` 54 + :::
+12 -2
docs/docs/pages/config.mdx
··· 15 15 | `identity` | `string` | No | - | Which stored identity to use | 16 16 | `frontmatter` | `object` | No | - | Custom frontmatter field mappings | 17 17 | `ignore` | `string[]` | No | - | Glob patterns for files to ignore | 18 + | `bluesky` | `object` | No | - | Bluesky posting configuration | 19 + | `bluesky.enabled` | `boolean` | No | `false` | Post to Bluesky when publishing documents | 20 + | `bluesky.maxAgeDays` | `number` | No | `30` | Only post documents published within this many days | 18 21 19 22 ### Example 20 23 ··· 31 34 "frontmatter": { 32 35 "publishDate": "date" 33 36 }, 34 - "ignore": ["_index.md"] 37 + "ignore": ["_index.md"], 38 + "bluesky": { 39 + "enabled": true, 40 + "maxAgeDays": 30 41 + } 35 42 } 36 43 ``` 37 44 ··· 44 51 | `publishDate` | `string` | Yes | `"publishDate"`, `"pubDate"`, `"date"`, `"createdAt"`, `"created_at"` | Publication date | 45 52 | `coverImage` | `string` | No | `"ogImage"` | Cover image filename | 46 53 | `tags` | `string[]` | No | `"tags"` | Post tags/categories | 54 + | `draft` | `boolean` | No | `"draft"` | If `true`, post is skipped during publish | 47 55 48 56 ### Example 49 57 ··· 54 62 publishDate: 2024-01-15 55 63 ogImage: cover.jpg 56 64 tags: [welcome, intro] 65 + draft: false 57 66 --- 58 67 ``` 59 68 ··· 65 74 { 66 75 "frontmatter": { 67 76 "publishDate": "date", 68 - "coverImage": "thumbnail" 77 + "coverImage": "thumbnail", 78 + "draft": "private" 69 79 } 70 80 } 71 81 ```
+40 -2
docs/docs/pages/publishing.mdx
··· 10 10 sequoia publish --dry-run 11 11 ``` 12 12 13 - This will print out the posts that it has discovered, what will be published, and how many. Once everything looks good, send it! 13 + This will print out the posts that it has discovered, what will be published, and how many. If Bluesky posting is enabled, it will also show which posts will be shared to Bluesky. Once everything looks good, send it! 14 14 15 15 ```bash [Terminal] 16 16 sequoia publish ··· 23 23 If you happen to loose the state file or if you want to pull down records you already have published, you can use the `sync` command. 24 24 25 25 ```bash [Terminal] 26 - seuqoia sync 26 + sequoia sync 27 27 ``` 28 28 29 29 Sync will use your ATProto handle to look through all of the `standard.site.document` records on your PDS, and pull down the records that are for the publication in the config. 30 + 31 + ## Bluesky Posting 32 + 33 + Sequoia can automatically post to Bluesky when new documents are published. Enable this in your config: 34 + 35 + ```json 36 + { 37 + "bluesky": { 38 + "enabled": true, 39 + "maxAgeDays": 30 40 + } 41 + } 42 + ``` 43 + 44 + When enabled, each new document will create a Bluesky post with the title, description, and canonical URL. If a cover image exists, it will be embedded in the post. The combined content is limited to 300 characters. 45 + 46 + The `maxAgeDays` setting prevents flooding your feed when first setting up Sequoia. For example, if you have 40 existing blog posts, only those published within the last 30 days will be posted to Bluesky. 47 + 48 + ## Draft Posts 49 + 50 + Posts with `draft: true` in their frontmatter are automatically skipped during publishing. This lets you work on content without accidentally publishing it. 51 + 52 + ```yaml 53 + --- 54 + title: Work in Progress 55 + draft: true 56 + --- 57 + ``` 58 + 59 + If your framework uses a different field name (like `private` or `hidden`), configure it in `sequoia.json`: 60 + 61 + ```json 62 + { 63 + "frontmatter": { 64 + "draft": "private" 65 + } 66 + } 67 + ``` 30 68 31 69 ## Troubleshooting 32 70
+2 -2
docs/docs/pages/quickstart.mdx
··· 33 33 34 34 ### Authorize 35 35 36 - In order for Sequoia to publish or update records on your PDS, you need to authoize it with your ATProto handle and an app password. 36 + In order for Sequoia to publish or update records on your PDS, you need to authorize it with your ATProto handle and an app password. 37 37 38 38 :::tip 39 39 You can create an app password [here](https://bsky.app/settings/app-passwords) ··· 59 59 - **Public/static directory** - The path for the folder where your public items go, e.g. `./public`. Generally used for opengraph images or icons, but in this case we need it to store a `.well-known` verification for your blog, [read more here](/verifying). 60 60 - **Build output directory** - Where you published html css and js lives, e.g. `./dist` 61 61 - **URL path prefix for posts** - The path that goes before a post slug, e.g. the prefix for `https://sequoia.pub/blog/hello` would be `/blog`. 62 - - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with infomation like `title`, `description`, and `publishedDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 62 + - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with information like `title`, `description`, and `publishDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 63 63 - **Publication setup** - Here you can choose to `Create a new publication` which will create a `site.standard.publication` record on your PDS, or you can `Use an existing publication AT URI`. If you haven't done this before, select `Create a new publication`. 64 64 - **Publication name** - The name of your blog 65 65 - **Publication description** - A description for your blog
+2 -2
docs/docs/pages/setup.mdx
··· 28 28 29 29 ## Authorize 30 30 31 - In order for Sequoia to publish or update records on your PDS, you need to authoize it with your ATProto handle and an app password. 31 + In order for Sequoia to publish or update records on your PDS, you need to authorize it with your ATProto handle and an app password. 32 32 33 33 :::tip 34 34 You can create an app password [here](https://bsky.app/settings/app-passwords) ··· 56 56 - **Public/static directory** - The path for the folder where your public items go, e.g. `./public`. Generally used for opengraph images or icons, but in this case we need it to store a `.well-known` verification for your blog, [read more here](/verifying). 57 57 - **Build output directory** - Where you published html css and js lives, e.g. `./dist` 58 58 - **URL path prefix for posts** - The path that goes before a post slug, e.g. the prefix for `https://sequoia.pub/blog/hello` would be `/blog`. 59 - - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with infomation like `title`, `description`, and `publishedDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 59 + - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with information like `title`, `description`, and `publishDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 60 60 - **Publication setup** - Here you can choose to `Create a new publication` which will create a `site.standard.publication` record on your PDS, or you can `Use an existing publication AT URI`. If you haven't done this before, select `Create a new publication`. 61 61 - **Publication name** - The name of your blog 62 62 - **Publication description** - A description for your blog
+2 -2
docs/docs/pages/verifying.mdx
··· 3 3 In order for your posts to show up on indexers you need to make sure your publication and your documents are verified. 4 4 5 5 :::tip 6 - You an learn more about Standard.site verification [here](https://standard.site/) 6 + You can learn more about Standard.site verification [here](https://standard.site/) 7 7 ::: 8 8 9 9 ## Publication Verification ··· 22 22 23 23 ### pds.ls 24 24 25 - Visit [pds.ls](https://pds.ls) and in the search bar paste in a `arUri` for either your publication or document, click the info tab, and then click the "info" tab. This will have a schema verification that will make sure the fields are accurate, however this will not cover Standard.site verification as perscribed on their website. 25 + Visit [pds.ls](https://pds.ls) and in the search bar paste in a `arUri` for either your publication or document, click the info tab, and then click the "info" tab. This will have a schema verification that will make sure the fields are accurate, however this will not cover Standard.site verification as prescribed on their website. 26 26 27 27 ### Standard.site Validator 28 28
+2 -2
docs/docs/pages/what-is-sequoia.mdx
··· 3 3 Sequoia is a simple CLI that can be used to publish Standard.site lexicons to the AT Protocol. Yeah that's a mouthful; let's break it down. 4 4 5 5 - [AT Protocol](https://atproto.com) - As the site says, "The AT Protocol is an open, decentralized network for building social applications." In reality it's a bit more than that. It's a new way to publish content to the web that puts control back in the hands of users without sacrificing distrubtion. There's a lot to unpack, but you can find a primer [here](https://stevedylan.dev/posts/atproto-starter/). 6 - - [Lexicons](https://atproto.com/guides/lexicon) - Lexicons are schemas used inside the AT Protocol. If you were to "like" a post, what would that consist of? Probably _who_ liked it, _what_ post was liked, and the _author_ of the post. The unique property to lexicons is that anyone can publish them and have them verified under a domain. Then these lexicons can be used to build apps by pulling a users records, aggregating them using an indexer, and a whole lot more! 7 - - [Standard.site](https://standard.site) - Standard.site is a set of lexicons specailly designed for publishing content. It was started by the founders of [leaflet.pub](https://leaflet.pub), [pckt.blog](https://pckt.blog), and [offprint.app](https://offprint.app), with the mission of finding a schema that can be used for blog posts and blog sites themselves (if you don't have a self-hosted blog, definitely check those platforms out!). So far it has proven to be the lexicon of choice for publishing content to ATProto with multiple tools and lexicons revolving around the standard. 6 + - [Lexicons](https://atproto.com/guides/lexicon) - Lexicons are schemas used inside the AT Protocol. If you were to "like" a post, what would that consist of? Probably _who_ liked it, _what_ post was liked, and the _author_ of the post. A unique property of lexicons is that anyone can publish them and have them verified under a domain. Then these lexicons can be used to build apps by pulling a users records, aggregating them using an indexer, and a whole lot more! 7 + - [Standard.site](https://standard.site) - Standard.site is a set of lexicons specially designed for publishing content. It was started by the founders of [leaflet.pub](https://leaflet.pub), [pckt.blog](https://pckt.blog), and [offprint.app](https://offprint.app), with the mission of finding a schema that can be used for blog posts and blog sites themselves (if you don't have a self-hosted blog, definitely check those platforms out!). So far it has proven to be the lexicon of choice for publishing content to ATProto with multiple tools and lexicons revolving around the standard. 8 8 9 9 The goal of Sequoia is to make it easier for those with existing self-hosted blogs to publish their content to the ATmosphere, no matter what SSG or framework you might be using. As of right now the focus will be static sites, but if there is enough traction there might be a future package that can be used for SSR frameworks too. 10 10
docs/docs/public/.well-known/.gitkeep

This is a binary file and will not be displayed.

+1
docs/docs/public/.well-known/site.standard.publication
··· 1 + at://did:plc:kq6bvkw4sxof3vdinuitehn5/site.standard.publication/3mdnzt4rqr42v
docs/docs/public/hero.png

This is a binary file and will not be displayed.

+1 -1
docs/package.json
··· 6 6 "scripts": { 7 7 "dev": "vocs dev", 8 8 "build": "vocs build && bun inject-og-tags.ts", 9 - "deploy": "bun run build && bunx wrangler pages deploy docs/dist", 9 + "deploy": "bun run build && sequoia inject && bunx wrangler pages deploy docs/dist", 10 10 "preview": "vocs preview" 11 11 }, 12 12 "dependencies": {
+14
docs/sequoia.json
··· 1 + { 2 + "siteUrl": "https://sequoia.pub", 3 + "contentDir": "docs/pages/blog", 4 + "imagesDir": "docs/public", 5 + "publicDir": "docs/public", 6 + "outputDir": "docs/dist", 7 + "pathPrefix": "/blog", 8 + "publicationUri": "at://did:plc:kq6bvkw4sxof3vdinuitehn5/site.standard.publication/3mdnzt4rqr42v", 9 + "pdsUrl": "https://andromeda.social", 10 + "frontmatter": { 11 + "publishDate": "date" 12 + }, 13 + "ignore": ["index.mdx"] 14 + }
+1 -1
docs/vocs.config.ts
··· 17 17 topNav: [ 18 18 { text: "Docs", link: "/quickstart", match: "/" }, 19 19 { text: "Blog", link: "/blog" }, 20 - { text: "Tanlged", link: "https://tangled.org/stevedylan.dev/sequoia" }, 20 + { text: "Tangled", link: "https://tangled.org/stevedylan.dev/sequoia" }, 21 21 { text: "GitHub", link: "https://github.com/stevedylandev/sequoia" }, 22 22 ], 23 23 sidebar: [
+11 -8
packages/cli/package.json
··· 1 1 { 2 2 "name": "sequoia-cli", 3 - "version": "0.1.0", 4 - "module": "dist/index.js", 3 + "version": "0.2.0", 5 4 "type": "module", 6 5 "bin": { 7 - "sequoia": "dist/sequoia" 6 + "sequoia": "dist/index.js" 8 7 }, 9 8 "files": [ 10 9 "dist", 11 10 "README.md" 12 11 ], 13 - "main": "./dist/sequoia", 12 + "main": "./dist/index.js", 14 13 "exports": { 15 - ".": "./dist/sequoia" 14 + ".": "./dist/index.js" 16 15 }, 17 16 "scripts": { 18 - "build": "bun build src/index.ts --compile --outfile dist/sequoia", 17 + "build": "bun build src/index.ts --target node --outdir dist", 19 18 "dev": "bun run build && bun link", 20 19 "deploy": "bun run build && bun publish" 21 20 }, 22 21 "devDependencies": { 23 - "@types/bun": "latest" 22 + "@types/mime-types": "^3.0.1", 23 + "@types/node": "^20" 24 24 }, 25 25 "peerDependencies": { 26 26 "typescript": "^5" 27 27 }, 28 28 "dependencies": { 29 29 "@atproto/api": "^0.18.17", 30 + "@clack/prompts": "^1.0.0", 30 31 "cmd-ts": "^0.14.3", 31 - "@clack/prompts": "^1.0.0" 32 + "glob": "^13.0.0", 33 + "mime-types": "^2.1.35", 34 + "minimatch": "^10.1.1" 32 35 } 33 36 }
+62 -9
packages/cli/src/commands/init.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import { command } from "cmd-ts"; 2 3 import { 3 4 intro, ··· 14 15 import { findConfig, generateConfigTemplate } from "../lib/config"; 15 16 import { loadCredentials } from "../lib/credentials"; 16 17 import { createAgent, createPublication } from "../lib/atproto"; 17 - import type { FrontmatterMapping } from "../lib/types"; 18 + import type { FrontmatterMapping, BlueskyConfig } from "../lib/types"; 19 + 20 + async function fileExists(filePath: string): Promise<boolean> { 21 + try { 22 + await fs.access(filePath); 23 + return true; 24 + } catch { 25 + return false; 26 + } 27 + } 18 28 19 29 const onCancel = () => { 20 30 outro("Setup cancelled"); ··· 127 137 message: "Field name for tags:", 128 138 defaultValue: "tags", 129 139 placeholder: "tags, categories, keywords, etc.", 140 + }), 141 + draftField: () => 142 + text({ 143 + message: "Field name for draft status:", 144 + defaultValue: "draft", 145 + placeholder: "draft, private, hidden, etc.", 130 146 }), 131 147 }, 132 148 { onCancel }, ··· 139 155 ["publishDate", frontmatterConfig.dateField, "publishDate"], 140 156 ["coverImage", frontmatterConfig.coverField, "ogImage"], 141 157 ["tags", frontmatterConfig.tagsField, "tags"], 158 + ["draft", frontmatterConfig.draftField, "draft"], 142 159 ]; 143 160 144 161 const builtMapping = fieldMappings.reduce<FrontmatterMapping>( ··· 253 270 publicationUri = uri as string; 254 271 } 255 272 273 + // Bluesky posting configuration 274 + const enableBluesky = await confirm({ 275 + message: "Enable automatic Bluesky posting when publishing?", 276 + initialValue: false, 277 + }); 278 + 279 + if (enableBluesky === Symbol.for("cancel")) { 280 + onCancel(); 281 + } 282 + 283 + let blueskyConfig: BlueskyConfig | undefined; 284 + if (enableBluesky) { 285 + const maxAgeDaysInput = await text({ 286 + message: "Maximum age (in days) for posts to be shared on Bluesky:", 287 + defaultValue: "7", 288 + placeholder: "7", 289 + validate: (value) => { 290 + const num = parseInt(value, 10); 291 + if (isNaN(num) || num < 1) { 292 + return "Please enter a positive number"; 293 + } 294 + }, 295 + }); 296 + 297 + if (maxAgeDaysInput === Symbol.for("cancel")) { 298 + onCancel(); 299 + } 300 + 301 + const maxAgeDays = parseInt(maxAgeDaysInput as string, 10); 302 + blueskyConfig = { 303 + enabled: true, 304 + ...(maxAgeDays !== 7 && { maxAgeDays }), 305 + }; 306 + } 307 + 256 308 // Get PDS URL from credentials (already loaded earlier) 257 309 const pdsUrl = credentials?.pdsUrl; 258 310 ··· 267 319 publicationUri, 268 320 pdsUrl, 269 321 frontmatter: frontmatterMapping, 322 + bluesky: blueskyConfig, 270 323 }); 271 324 272 325 const configPath = path.join(process.cwd(), "sequoia.json"); 273 - await Bun.write(configPath, configContent); 326 + await fs.writeFile(configPath, configContent); 274 327 275 328 log.success(`Configuration saved to ${configPath}`); 276 329 ··· 283 336 const wellKnownPath = path.join(wellKnownDir, "site.standard.publication"); 284 337 285 338 // Ensure .well-known directory exists 286 - await Bun.write(path.join(wellKnownDir, ".gitkeep"), ""); 287 - await Bun.write(wellKnownPath, publicationUri); 339 + await fs.mkdir(wellKnownDir, { recursive: true }); 340 + await fs.writeFile(path.join(wellKnownDir, ".gitkeep"), ""); 341 + await fs.writeFile(wellKnownPath, publicationUri); 288 342 289 343 log.success(`Created ${wellKnownPath}`); 290 344 291 345 // Update .gitignore 292 346 const gitignorePath = path.join(process.cwd(), ".gitignore"); 293 - const gitignoreFile = Bun.file(gitignorePath); 294 347 const stateFilename = ".sequoia-state.json"; 295 348 296 - if (await gitignoreFile.exists()) { 297 - const gitignoreContent = await gitignoreFile.text(); 349 + if (await fileExists(gitignorePath)) { 350 + const gitignoreContent = await fs.readFile(gitignorePath, "utf-8"); 298 351 if (!gitignoreContent.includes(stateFilename)) { 299 - await Bun.write( 352 + await fs.writeFile( 300 353 gitignorePath, 301 354 gitignoreContent + `\n${stateFilename}\n`, 302 355 ); 303 356 log.info(`Added ${stateFilename} to .gitignore`); 304 357 } 305 358 } else { 306 - await Bun.write(gitignorePath, `${stateFilename}\n`); 359 + await fs.writeFile(gitignorePath, `${stateFilename}\n`); 307 360 log.info(`Created .gitignore with ${stateFilename}`); 308 361 } 309 362
+11 -12
packages/cli/src/commands/inject.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import { command, flag, option, optional, string } from "cmd-ts"; 2 3 import { log } from "@clack/prompts"; 3 4 import * as path from "path"; 4 - import { Glob } from "bun"; 5 + import { glob } from "glob"; 5 6 import { loadConfig, loadState, findConfig } from "../lib/config"; 6 7 7 8 export const injectCommand = command({ ··· 97 98 log.info(`Found ${pathToAtUri.size} published posts in state`); 98 99 99 100 // Scan for HTML files 100 - const glob = new Glob("**/*.html"); 101 - const htmlFiles: string[] = []; 102 - 103 - for await (const file of glob.scan(resolvedOutputDir)) { 104 - htmlFiles.push(path.join(resolvedOutputDir, file)); 105 - } 101 + const htmlFiles = await glob("**/*.html", { 102 + cwd: resolvedOutputDir, 103 + absolute: false, 104 + }); 106 105 107 106 if (htmlFiles.length === 0) { 108 107 log.warn(`No HTML files found in ${resolvedOutputDir}`); ··· 115 114 let skippedCount = 0; 116 115 let alreadyHasCount = 0; 117 116 118 - for (const htmlPath of htmlFiles) { 117 + for (const file of htmlFiles) { 118 + const htmlPath = path.join(resolvedOutputDir, file); 119 119 // Try to match this HTML file to a published post 120 - const relativePath = path.relative(resolvedOutputDir, htmlPath); 120 + const relativePath = file; 121 121 const htmlDir = path.dirname(relativePath); 122 122 const htmlBasename = path.basename(relativePath, ".html"); 123 123 ··· 152 152 } 153 153 154 154 // Read the HTML file 155 - const file = Bun.file(htmlPath); 156 - let content = await file.text(); 155 + let content = await fs.readFile(htmlPath, "utf-8"); 157 156 158 157 // Check if link tag already exists 159 158 const linkTag = `<link rel="site.standard.document" href="${atUri}">`; ··· 184 183 `${indent}${linkTag}\n${indent}` + 185 184 content.slice(headCloseIndex); 186 185 187 - await Bun.write(htmlPath, content); 186 + await fs.writeFile(htmlPath, content); 188 187 log.success(` Injected into: ${relativePath}`); 189 188 injectedCount++; 190 189 }
+91 -7
packages/cli/src/commands/publish.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import { command, flag } from "cmd-ts"; 2 3 import { select, spinner, log } from "@clack/prompts"; 3 4 import * as path from "path"; 4 5 import { loadConfig, loadState, saveState, findConfig } from "../lib/config"; 5 6 import { loadCredentials, listCredentials, getCredentials } from "../lib/credentials"; 6 - import { createAgent, createDocument, updateDocument, uploadImage, resolveImagePath } from "../lib/atproto"; 7 + import { createAgent, createDocument, updateDocument, uploadImage, resolveImagePath, createBlueskyPost, addBskyPostRefToDocument } from "../lib/atproto"; 7 8 import { 8 9 scanContentDirectory, 9 10 getContentHash, 10 11 updateFrontmatterWithAtUri, 11 12 } from "../lib/markdown"; 12 - import type { BlogPost, BlobObject } from "../lib/types"; 13 + import type { BlogPost, BlobObject, StrongRef } from "../lib/types"; 13 14 import { exitOnCancel } from "../lib/prompts"; 14 15 15 16 export const publishCommand = command({ ··· 95 96 action: "create" | "update"; 96 97 reason: string; 97 98 }> = []; 99 + const draftPosts: BlogPost[] = []; 98 100 99 101 for (const post of posts) { 102 + // Skip draft posts 103 + if (post.frontmatter.draft) { 104 + draftPosts.push(post); 105 + continue; 106 + } 107 + 100 108 const contentHash = await getContentHash(post.rawContent); 101 109 const relativeFilePath = path.relative(configDir, post.filePath); 102 110 const postState = state.posts[relativeFilePath]; ··· 124 132 } 125 133 } 126 134 135 + if (draftPosts.length > 0) { 136 + log.info(`Skipping ${draftPosts.length} draft post${draftPosts.length === 1 ? "" : "s"}`); 137 + } 138 + 127 139 if (postsToPublish.length === 0) { 128 140 log.success("All posts are up to date. Nothing to publish."); 129 141 return; 130 142 } 131 143 132 144 log.info(`\n${postsToPublish.length} posts to publish:\n`); 145 + 146 + // Bluesky posting configuration 147 + const blueskyEnabled = config.bluesky?.enabled ?? false; 148 + const maxAgeDays = config.bluesky?.maxAgeDays ?? 7; 149 + const cutoffDate = new Date(); 150 + cutoffDate.setDate(cutoffDate.getDate() - maxAgeDays); 151 + 133 152 for (const { post, action, reason } of postsToPublish) { 134 153 const icon = action === "create" ? "+" : "~"; 135 - log.message(` ${icon} ${post.frontmatter.title} (${reason})`); 154 + const relativeFilePath = path.relative(configDir, post.filePath); 155 + const existingBskyPostRef = state.posts[relativeFilePath]?.bskyPostRef; 156 + 157 + let bskyNote = ""; 158 + if (blueskyEnabled) { 159 + if (existingBskyPostRef) { 160 + bskyNote = " [bsky: exists]"; 161 + } else { 162 + const publishDate = new Date(post.frontmatter.publishDate); 163 + if (publishDate < cutoffDate) { 164 + bskyNote = ` [bsky: skipped, older than ${maxAgeDays} days]`; 165 + } else { 166 + bskyNote = " [bsky: will post]"; 167 + } 168 + } 169 + } 170 + 171 + log.message(` ${icon} ${post.frontmatter.title} (${reason})${bskyNote}`); 136 172 } 137 173 138 174 if (dryRun) { 175 + if (blueskyEnabled) { 176 + log.info(`\nBluesky posting: enabled (max age: ${maxAgeDays} days)`); 177 + } 139 178 log.info("\nDry run complete. No changes made."); 140 179 return; 141 180 } ··· 156 195 let publishedCount = 0; 157 196 let updatedCount = 0; 158 197 let errorCount = 0; 198 + let bskyPostCount = 0; 159 199 160 200 for (const { post, action } of postsToPublish) { 161 201 s.start(`Publishing: ${post.frontmatter.title}`); ··· 164 204 // Handle cover image upload 165 205 let coverImage: BlobObject | undefined; 166 206 if (post.frontmatter.ogImage) { 167 - const imagePath = resolveImagePath( 207 + const imagePath = await resolveImagePath( 168 208 post.frontmatter.ogImage, 169 209 imagesDir, 170 210 contentDir ··· 181 221 } 182 222 } 183 223 184 - // Track atUri and content for state saving 224 + // Track atUri, content for state saving, and bskyPostRef 185 225 let atUri: string; 186 226 let contentForHash: string; 227 + let bskyPostRef: StrongRef | undefined; 228 + const relativeFilePath = path.relative(configDir, post.filePath); 229 + 230 + // Check if bskyPostRef already exists in state 231 + const existingBskyPostRef = state.posts[relativeFilePath]?.bskyPostRef; 187 232 188 233 if (action === "create") { 189 234 atUri = await createDocument(agent, post, config, coverImage); ··· 191 236 192 237 // Update frontmatter with atUri 193 238 const updatedContent = updateFrontmatterWithAtUri(post.rawContent, atUri); 194 - await Bun.write(post.filePath, updatedContent); 239 + await fs.writeFile(post.filePath, updatedContent); 195 240 log.info(` Updated frontmatter in ${path.basename(post.filePath)}`); 196 241 197 242 // Use updated content (with atUri) for hash so next run sees matching hash ··· 207 252 updatedCount++; 208 253 } 209 254 255 + // Create Bluesky post if enabled and conditions are met 256 + if (blueskyEnabled) { 257 + if (existingBskyPostRef) { 258 + log.info(` Bluesky post already exists, skipping`); 259 + bskyPostRef = existingBskyPostRef; 260 + } else { 261 + const publishDate = new Date(post.frontmatter.publishDate); 262 + 263 + if (publishDate < cutoffDate) { 264 + log.info(` Post is older than ${maxAgeDays} days, skipping Bluesky post`); 265 + } else { 266 + // Create Bluesky post 267 + try { 268 + const pathPrefix = config.pathPrefix || "/posts"; 269 + const canonicalUrl = `${config.siteUrl}${pathPrefix}/${post.slug}`; 270 + 271 + bskyPostRef = await createBlueskyPost(agent, { 272 + title: post.frontmatter.title, 273 + description: post.frontmatter.description, 274 + canonicalUrl, 275 + coverImage, 276 + publishedAt: post.frontmatter.publishDate, 277 + }); 278 + 279 + // Update document record with bskyPostRef 280 + await addBskyPostRefToDocument(agent, atUri, bskyPostRef); 281 + log.info(` Created Bluesky post: ${bskyPostRef.uri}`); 282 + bskyPostCount++; 283 + } catch (bskyError) { 284 + const errorMsg = bskyError instanceof Error ? bskyError.message : String(bskyError); 285 + log.warn(` Failed to create Bluesky post: ${errorMsg}`); 286 + } 287 + } 288 + } 289 + } 290 + 210 291 // Update state (use relative path from config directory) 211 292 const contentHash = await getContentHash(contentForHash); 212 - const relativeFilePath = path.relative(configDir, post.filePath); 213 293 state.posts[relativeFilePath] = { 214 294 contentHash, 215 295 atUri, 216 296 lastPublished: new Date().toISOString(), 297 + bskyPostRef, 217 298 }; 218 299 } catch (error) { 219 300 const errorMessage = error instanceof Error ? error.message : String(error); ··· 230 311 log.message("\n---"); 231 312 log.info(`Published: ${publishedCount}`); 232 313 log.info(`Updated: ${updatedCount}`); 314 + if (bskyPostCount > 0) { 315 + log.info(`Bluesky posts: ${bskyPostCount}`); 316 + } 233 317 if (errorCount > 0) { 234 318 log.warn(`Errors: ${errorCount}`); 235 319 }
+3 -3
packages/cli/src/commands/sync.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import { command, flag } from "cmd-ts"; 2 3 import { select, spinner, log } from "@clack/prompts"; 3 4 import * as path from "path"; ··· 169 170 if (frontmatterUpdates.length > 0) { 170 171 s.start(`Updating frontmatter in ${frontmatterUpdates.length} files...`); 171 172 for (const { filePath, atUri } of frontmatterUpdates) { 172 - const file = Bun.file(filePath); 173 - const content = await file.text(); 173 + const content = await fs.readFile(filePath, "utf-8"); 174 174 const updated = updateFrontmatterWithAtUri(content, atUri); 175 - await Bun.write(filePath, updated); 175 + await fs.writeFile(filePath, updated); 176 176 log.message(` Updated: ${path.basename(filePath)}`); 177 177 } 178 178 s.stop("Frontmatter updated");
+3 -3
packages/cli/src/index.ts
··· 1 - #!/usr/bin/env bun 1 + #!/usr/bin/env node 2 2 3 3 import { run, subcommands } from "cmd-ts"; 4 4 import { authCommand } from "./commands/auth"; ··· 31 31 32 32 Publish evergreen content to the ATmosphere 33 33 34 - > https://tanlged.org/stevedylan.dev/sequoia 34 + > https://tangled.org/stevedylan.dev/sequoia 35 35 `, 36 - version: "0.1.0", 36 + version: "0.2.0", 37 37 cmds: { 38 38 auth: authCommand, 39 39 init: initCommand,
+196 -16
packages/cli/src/lib/atproto.ts
··· 1 1 import { AtpAgent } from "@atproto/api"; 2 + import * as fs from "fs/promises"; 2 3 import * as path from "path"; 3 - import type { Credentials, BlogPost, BlobObject, PublisherConfig } from "./types"; 4 + import * as mimeTypes from "mime-types"; 5 + import type { Credentials, BlogPost, BlobObject, PublisherConfig, StrongRef } from "./types"; 4 6 import { stripMarkdownForText } from "./markdown"; 7 + 8 + async function fileExists(filePath: string): Promise<boolean> { 9 + try { 10 + await fs.access(filePath); 11 + return true; 12 + } catch { 13 + return false; 14 + } 15 + } 5 16 6 17 export async function resolveHandleToPDS(handle: string): Promise<string> { 7 18 // First, resolve the handle to a DID ··· 87 98 agent: AtpAgent, 88 99 imagePath: string 89 100 ): Promise<BlobObject | undefined> { 90 - const file = Bun.file(imagePath); 91 - 92 - if (!(await file.exists())) { 101 + if (!(await fileExists(imagePath))) { 93 102 return undefined; 94 103 } 95 104 96 105 try { 97 - const imageBuffer = await file.arrayBuffer(); 98 - const mimeType = file.type || "application/octet-stream"; 106 + const imageBuffer = await fs.readFile(imagePath); 107 + const mimeType = mimeTypes.lookup(imagePath) || "application/octet-stream"; 99 108 100 109 const response = await agent.com.atproto.repo.uploadBlob( 101 110 new Uint8Array(imageBuffer), ··· 118 127 } 119 128 } 120 129 121 - export function resolveImagePath( 130 + export async function resolveImagePath( 122 131 ogImage: string, 123 132 imagesDir: string | undefined, 124 133 contentDir: string 125 - ): string | null { 134 + ): Promise<string | null> { 126 135 // Try multiple resolution strategies 127 136 const filename = path.basename(ogImage); 128 137 129 138 // 1. If imagesDir is specified, look there 130 139 if (imagesDir) { 131 140 const imagePath = path.join(imagesDir, filename); 132 - try { 133 - const stat = Bun.file(imagePath); 141 + if (await fileExists(imagePath)) { 142 + const stat = await fs.stat(imagePath); 134 143 if (stat.size > 0) { 135 144 return imagePath; 136 145 } 137 - } catch { 138 - // File doesn't exist, continue 139 146 } 140 147 } 141 148 ··· 146 153 147 154 // 3. Try relative to content directory 148 155 const contentRelative = path.join(contentDir, ogImage); 149 - try { 150 - const stat = Bun.file(contentRelative); 156 + if (await fileExists(contentRelative)) { 157 + const stat = await fs.stat(contentRelative); 151 158 if (stat.size > 0) { 152 159 return contentRelative; 153 160 } 154 - } catch { 155 - // File doesn't exist 156 161 } 157 162 158 163 return null; ··· 347 352 348 353 return response.data.uri; 349 354 } 355 + 356 + // --- Bluesky Post Creation --- 357 + 358 + export interface CreateBlueskyPostOptions { 359 + title: string; 360 + description?: string; 361 + canonicalUrl: string; 362 + coverImage?: BlobObject; 363 + publishedAt: string; // Used as createdAt for the post 364 + } 365 + 366 + /** 367 + * Count graphemes in a string (for Bluesky's 300 grapheme limit) 368 + */ 369 + function countGraphemes(str: string): number { 370 + // Use Intl.Segmenter if available, otherwise fallback to spread operator 371 + if (typeof Intl !== "undefined" && Intl.Segmenter) { 372 + const segmenter = new Intl.Segmenter("en", { granularity: "grapheme" }); 373 + return [...segmenter.segment(str)].length; 374 + } 375 + return [...str].length; 376 + } 377 + 378 + /** 379 + * Truncate a string to a maximum number of graphemes 380 + */ 381 + function truncateToGraphemes(str: string, maxGraphemes: number): string { 382 + if (typeof Intl !== "undefined" && Intl.Segmenter) { 383 + const segmenter = new Intl.Segmenter("en", { granularity: "grapheme" }); 384 + const segments = [...segmenter.segment(str)]; 385 + if (segments.length <= maxGraphemes) return str; 386 + return segments.slice(0, maxGraphemes - 3).map(s => s.segment).join("") + "..."; 387 + } 388 + // Fallback 389 + const chars = [...str]; 390 + if (chars.length <= maxGraphemes) return str; 391 + return chars.slice(0, maxGraphemes - 3).join("") + "..."; 392 + } 393 + 394 + /** 395 + * Create a Bluesky post with external link embed 396 + */ 397 + export async function createBlueskyPost( 398 + agent: AtpAgent, 399 + options: CreateBlueskyPostOptions 400 + ): Promise<StrongRef> { 401 + const { title, description, canonicalUrl, coverImage, publishedAt } = options; 402 + 403 + // Build post text: title + description + URL 404 + // Max 300 graphemes for Bluesky posts 405 + const MAX_GRAPHEMES = 300; 406 + 407 + let postText: string; 408 + const urlPart = `\n\n${canonicalUrl}`; 409 + const urlGraphemes = countGraphemes(urlPart); 410 + 411 + if (description) { 412 + // Try: title + description + URL 413 + const fullText = `${title}\n\n${description}${urlPart}`; 414 + if (countGraphemes(fullText) <= MAX_GRAPHEMES) { 415 + postText = fullText; 416 + } else { 417 + // Truncate description to fit 418 + const availableForDesc = MAX_GRAPHEMES - countGraphemes(title) - countGraphemes("\n\n") - urlGraphemes - countGraphemes("\n\n"); 419 + if (availableForDesc > 10) { 420 + const truncatedDesc = truncateToGraphemes(description, availableForDesc); 421 + postText = `${title}\n\n${truncatedDesc}${urlPart}`; 422 + } else { 423 + // Just title + URL 424 + postText = `${title}${urlPart}`; 425 + } 426 + } 427 + } else { 428 + // Just title + URL 429 + postText = `${title}${urlPart}`; 430 + } 431 + 432 + // Final truncation if still too long (shouldn't happen but safety check) 433 + if (countGraphemes(postText) > MAX_GRAPHEMES) { 434 + postText = truncateToGraphemes(postText, MAX_GRAPHEMES); 435 + } 436 + 437 + // Calculate byte indices for the URL facet 438 + const encoder = new TextEncoder(); 439 + const urlStartInText = postText.lastIndexOf(canonicalUrl); 440 + const beforeUrl = postText.substring(0, urlStartInText); 441 + const byteStart = encoder.encode(beforeUrl).length; 442 + const byteEnd = byteStart + encoder.encode(canonicalUrl).length; 443 + 444 + // Build facets for the URL link 445 + const facets = [ 446 + { 447 + index: { 448 + byteStart, 449 + byteEnd, 450 + }, 451 + features: [ 452 + { 453 + $type: "app.bsky.richtext.facet#link", 454 + uri: canonicalUrl, 455 + }, 456 + ], 457 + }, 458 + ]; 459 + 460 + // Build external embed 461 + const embed: Record<string, unknown> = { 462 + $type: "app.bsky.embed.external", 463 + external: { 464 + uri: canonicalUrl, 465 + title: title.substring(0, 500), // Max 500 chars for title 466 + description: (description || "").substring(0, 1000), // Max 1000 chars for description 467 + }, 468 + }; 469 + 470 + // Add thumbnail if coverImage is available 471 + if (coverImage) { 472 + (embed.external as Record<string, unknown>).thumb = coverImage; 473 + } 474 + 475 + // Create the post record 476 + const record: Record<string, unknown> = { 477 + $type: "app.bsky.feed.post", 478 + text: postText, 479 + facets, 480 + embed, 481 + createdAt: new Date(publishedAt).toISOString(), 482 + }; 483 + 484 + const response = await agent.com.atproto.repo.createRecord({ 485 + repo: agent.session!.did, 486 + collection: "app.bsky.feed.post", 487 + record, 488 + }); 489 + 490 + return { 491 + uri: response.data.uri, 492 + cid: response.data.cid, 493 + }; 494 + } 495 + 496 + /** 497 + * Add bskyPostRef to an existing document record 498 + */ 499 + export async function addBskyPostRefToDocument( 500 + agent: AtpAgent, 501 + documentAtUri: string, 502 + bskyPostRef: StrongRef 503 + ): Promise<void> { 504 + const parsed = parseAtUri(documentAtUri); 505 + if (!parsed) { 506 + throw new Error(`Invalid document URI: ${documentAtUri}`); 507 + } 508 + 509 + // Fetch existing record 510 + const existingRecord = await agent.com.atproto.repo.getRecord({ 511 + repo: parsed.did, 512 + collection: parsed.collection, 513 + rkey: parsed.rkey, 514 + }); 515 + 516 + // Add bskyPostRef to the record 517 + const updatedRecord = { 518 + ...(existingRecord.data.value as Record<string, unknown>), 519 + bskyPostRef, 520 + }; 521 + 522 + // Update the record 523 + await agent.com.atproto.repo.putRecord({ 524 + repo: parsed.did, 525 + collection: parsed.collection, 526 + rkey: parsed.rkey, 527 + record: updatedRecord, 528 + }); 529 + }
+21 -9
packages/cli/src/lib/config.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import * as path from "path"; 2 - import type { PublisherConfig, PublisherState, FrontmatterMapping } from "./types"; 3 + import type { PublisherConfig, PublisherState, FrontmatterMapping, BlueskyConfig } from "./types"; 3 4 4 5 const CONFIG_FILENAME = "sequoia.json"; 5 6 const STATE_FILENAME = ".sequoia-state.json"; 6 7 8 + async function fileExists(filePath: string): Promise<boolean> { 9 + try { 10 + await fs.access(filePath); 11 + return true; 12 + } catch { 13 + return false; 14 + } 15 + } 16 + 7 17 export async function findConfig( 8 18 startDir: string = process.cwd(), 9 19 ): Promise<string | null> { ··· 11 21 12 22 while (true) { 13 23 const configPath = path.join(currentDir, CONFIG_FILENAME); 14 - const file = Bun.file(configPath); 15 24 16 - if (await file.exists()) { 25 + if (await fileExists(configPath)) { 17 26 return configPath; 18 27 } 19 28 ··· 38 47 } 39 48 40 49 try { 41 - const file = Bun.file(resolvedPath); 42 - const content = await file.text(); 50 + const content = await fs.readFile(resolvedPath, "utf-8"); 43 51 const config = JSON.parse(content) as PublisherConfig; 44 52 45 53 // Validate required fields ··· 68 76 pdsUrl?: string; 69 77 frontmatter?: FrontmatterMapping; 70 78 ignore?: string[]; 79 + bluesky?: BlueskyConfig; 71 80 }): string { 72 81 const config: Record<string, unknown> = { 73 82 siteUrl: options.siteUrl, ··· 102 111 103 112 if (options.ignore && options.ignore.length > 0) { 104 113 config.ignore = options.ignore; 114 + } 115 + 116 + if (options.bluesky) { 117 + config.bluesky = options.bluesky; 105 118 } 106 119 107 120 return JSON.stringify(config, null, 2); ··· 109 122 110 123 export async function loadState(configDir: string): Promise<PublisherState> { 111 124 const statePath = path.join(configDir, STATE_FILENAME); 112 - const file = Bun.file(statePath); 113 125 114 - if (!(await file.exists())) { 126 + if (!(await fileExists(statePath))) { 115 127 return { posts: {} }; 116 128 } 117 129 118 130 try { 119 - const content = await file.text(); 131 + const content = await fs.readFile(statePath, "utf-8"); 120 132 return JSON.parse(content) as PublisherState; 121 133 } catch { 122 134 return { posts: {} }; ··· 128 140 state: PublisherState, 129 141 ): Promise<void> { 130 142 const statePath = path.join(configDir, STATE_FILENAME); 131 - await Bun.write(statePath, JSON.stringify(state, null, 2)); 143 + await fs.writeFile(statePath, JSON.stringify(state, null, 2)); 132 144 } 133 145 134 146 export function getStatePath(configDir: string): string {
+15 -6
packages/cli/src/lib/credentials.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import * as path from "path"; 2 3 import * as os from "os"; 3 4 import type { Credentials } from "./types"; ··· 8 9 // Stored credentials keyed by identifier 9 10 type CredentialsStore = Record<string, Credentials>; 10 11 12 + async function fileExists(filePath: string): Promise<boolean> { 13 + try { 14 + await fs.access(filePath); 15 + return true; 16 + } catch { 17 + return false; 18 + } 19 + } 20 + 11 21 /** 12 22 * Load all stored credentials 13 23 */ 14 24 async function loadCredentialsStore(): Promise<CredentialsStore> { 15 - const file = Bun.file(CREDENTIALS_FILE); 16 - if (!(await file.exists())) { 25 + if (!(await fileExists(CREDENTIALS_FILE))) { 17 26 return {}; 18 27 } 19 28 20 29 try { 21 - const content = await file.text(); 30 + const content = await fs.readFile(CREDENTIALS_FILE, "utf-8"); 22 31 const parsed = JSON.parse(content); 23 32 24 33 // Handle legacy single-credential format (migrate on read) ··· 37 46 * Save the entire credentials store 38 47 */ 39 48 async function saveCredentialsStore(store: CredentialsStore): Promise<void> { 40 - await Bun.$`mkdir -p ${CONFIG_DIR}`; 41 - await Bun.write(CREDENTIALS_FILE, JSON.stringify(store, null, 2)); 42 - await Bun.$`chmod 600 ${CREDENTIALS_FILE}`; 49 + await fs.mkdir(CONFIG_DIR, { recursive: true }); 50 + await fs.writeFile(CREDENTIALS_FILE, JSON.stringify(store, null, 2)); 51 + await fs.chmod(CREDENTIALS_FILE, 0o600); 43 52 } 44 53 45 54 /**
+16 -9
packages/cli/src/lib/markdown.ts
··· 1 + import * as fs from "fs/promises"; 1 2 import * as path from "path"; 2 - import { Glob } from "bun"; 3 + import { glob } from "glob"; 4 + import { minimatch } from "minimatch"; 3 5 import type { PostFrontmatter, BlogPost, FrontmatterMapping } from "./types"; 4 6 5 7 export function parseFrontmatter(content: string, mapping?: FrontmatterMapping): { ··· 97 99 const tagsField = mapping?.tags || "tags"; 98 100 frontmatter.tags = raw[tagsField] || raw.tags; 99 101 102 + // Draft mapping 103 + const draftField = mapping?.draft || "draft"; 104 + const draftValue = raw[draftField] ?? raw.draft; 105 + if (draftValue !== undefined) { 106 + frontmatter.draft = draftValue === true || draftValue === "true"; 107 + } 108 + 100 109 // Always preserve atUri (internal field) 101 110 frontmatter.atUri = raw.atUri; 102 111 ··· 120 129 121 130 function shouldIgnore(relativePath: string, ignorePatterns: string[]): boolean { 122 131 for (const pattern of ignorePatterns) { 123 - const glob = new Glob(pattern); 124 - if (glob.match(relativePath)) { 132 + if (minimatch(relativePath, pattern)) { 125 133 return true; 126 134 } 127 135 } ··· 137 145 const posts: BlogPost[] = []; 138 146 139 147 for (const pattern of patterns) { 140 - const glob = new Glob(pattern); 141 - 142 - for await (const relativePath of glob.scan({ 148 + const files = await glob(pattern, { 143 149 cwd: contentDir, 144 150 absolute: false, 145 - })) { 151 + }); 152 + 153 + for (const relativePath of files) { 146 154 // Skip files matching ignore patterns 147 155 if (shouldIgnore(relativePath, ignorePatterns)) { 148 156 continue; 149 157 } 150 158 151 159 const filePath = path.join(contentDir, relativePath); 152 - const file = Bun.file(filePath); 153 - const rawContent = await file.text(); 160 + const rawContent = await fs.readFile(filePath, "utf-8"); 154 161 155 162 try { 156 163 const { frontmatter, body } = parseFrontmatter(rawContent, frontmatterMapping);
+16
packages/cli/src/lib/types.ts
··· 4 4 publishDate?: string; // Field name for publish date (default: "publishDate", also checks "pubDate", "date", "createdAt", "created_at") 5 5 coverImage?: string; // Field name for cover image (default: "ogImage") 6 6 tags?: string; // Field name for tags (default: "tags") 7 + draft?: string; // Field name for draft status (default: "draft") 8 + } 9 + 10 + // Strong reference for Bluesky post (com.atproto.repo.strongRef) 11 + export interface StrongRef { 12 + uri: string; // at:// URI format 13 + cid: string; // Content ID 14 + } 15 + 16 + // Bluesky posting configuration 17 + export interface BlueskyConfig { 18 + enabled: boolean; 19 + maxAgeDays?: number; // Only post if published within N days (default: 7) 7 20 } 8 21 9 22 export interface PublisherConfig { ··· 18 31 identity?: string; // Which stored identity to use (matches identifier) 19 32 frontmatter?: FrontmatterMapping; // Custom frontmatter field mappings 20 33 ignore?: string[]; // Glob patterns for files to ignore (e.g., ["_index.md", "**/drafts/**"]) 34 + bluesky?: BlueskyConfig; // Optional Bluesky posting configuration 21 35 } 22 36 23 37 export interface Credentials { ··· 33 47 tags?: string[]; 34 48 ogImage?: string; 35 49 atUri?: string; 50 + draft?: boolean; 36 51 } 37 52 38 53 export interface BlogPost { ··· 62 77 contentHash: string; 63 78 atUri?: string; 64 79 lastPublished?: string; 80 + bskyPostRef?: StrongRef; // Reference to corresponding Bluesky post 65 81 } 66 82 67 83 export interface PublicationRecord {
+11 -20
packages/cli/tsconfig.json
··· 1 1 { 2 2 "compilerOptions": { 3 - // Environment setup & latest features 4 - "lib": ["ESNext"], 5 - "target": "ESNext", 6 - "module": "Preserve", 7 - "moduleDetection": "force", 8 - "jsx": "react-jsx", 9 - "allowJs": true, 10 - 11 - // Bundler mode 3 + "lib": ["ES2022"], 4 + "target": "ES2022", 5 + "module": "ESNext", 12 6 "moduleResolution": "bundler", 13 - "allowImportingTsExtensions": true, 14 - "verbatimModuleSyntax": true, 15 - "noEmit": true, 16 - 17 - // Best practices 7 + "outDir": "./dist", 8 + "rootDir": "./src", 9 + "declaration": true, 10 + "sourceMap": true, 18 11 "strict": true, 19 12 "skipLibCheck": true, 13 + "esModuleInterop": true, 14 + "resolveJsonModule": true, 15 + "forceConsistentCasingInFileNames": true, 20 16 "noFallthroughCasesInSwitch": true, 21 17 "noUncheckedIndexedAccess": true, 22 - "noImplicitOverride": true, 23 - 24 - // Some stricter flags (disabled by default) 25 18 "noUnusedLocals": false, 26 - "noUnusedParameters": false, 27 - "noPropertyAccessFromIndexSignature": false, 28 - "composite": true 19 + "noUnusedParameters": false 29 20 }, 30 21 "include": ["src"] 31 22 }