A CLI for publishing standard.site documents to ATProto sequoia.pub
standard site lexicon cli publishing

Compare changes

Choose any two refs to compare.

+2118 -1168
+22
.tangled/workflows/lint.yml
··· 1 + # Biome lint and format checks 2 + 3 + when: 4 + - event: ["push", "manual"] 5 + branch: ["main"] 6 + - event: ["pull_request"] 7 + branch: ["main"] 8 + 9 + engine: "nixery" 10 + 11 + dependencies: 12 + nixpkgs: 13 + - bun 14 + - biome 15 + 16 + steps: 17 + - name: "Install dependencies" 18 + command: "bun install" 19 + - name: "Lint check" 20 + command: "cd packages/cli && biome lint ." 21 + - name: "Format check" 22 + command: "cd packages/cli && biome format ."
+83
CHANGELOG.md
··· 1 + ## [0.2.1] - 2026-02-02 2 + 3 + ### โš™๏ธ Miscellaneous Tasks 4 + 5 + - Added CHANGELOG 6 + - Merge main into chore/fronmatter-config-updates 7 + - Added linting and formatting 8 + - Linting updates 9 + - Refactored to use fallback approach if frontmatter.slugField is provided or not 10 + - Version bump 11 + ## [0.2.0] - 2026-02-01 12 + 13 + ### ๐Ÿš€ Features 14 + 15 + - Added bskyPostRef 16 + - Added draft field to frontmatter config 17 + 18 + ### โš™๏ธ Miscellaneous Tasks 19 + 20 + - Resolved action items from issue #3 21 + - Adjusted tags to accept yaml multiline arrays for tags 22 + - Updated inject to handle new slug options 23 + - Updated comments 24 + - Update blog post 25 + - Fix blog build error 26 + - Adjust blog post 27 + - Updated docs 28 + - Version bump 29 + ## [0.1.1] - 2026-01-31 30 + 31 + ### ๐Ÿ› Bug Fixes 32 + 33 + - Fix tangled url to repo 34 + 35 + ### โš™๏ธ Miscellaneous Tasks 36 + 37 + - Merge branch 'main' into feat/blog-post 38 + - Updated blog post 39 + - Updated date 40 + - Added publishing 41 + - Spelling and grammar 42 + - Updated package scripts 43 + - Refactored codebase to use node and fs instead of bun 44 + - Version bump 45 + ## [0.1.0] - 2026-01-30 46 + 47 + ### ๐Ÿš€ Features 48 + 49 + - Init 50 + - Added blog post 51 + 52 + ### โš™๏ธ Miscellaneous Tasks 53 + 54 + - Updated package.json 55 + - Cleaned up commands and libs 56 + - Updated init commands 57 + - Updated greeting 58 + - Updated readme 59 + - Link updates 60 + - Version bump 61 + - Added hugo support through frontmatter parsing 62 + - Version bump 63 + - Updated docs 64 + - Adapted inject.ts pattern 65 + - Updated docs 66 + - Version bump" 67 + - Updated package scripts 68 + - Updated scripts 69 + - Added ignore field to config 70 + - Udpate docs 71 + - Version bump 72 + - Added tags to flow 73 + - Added ability to exit during init flow 74 + - Version bump 75 + - Updated docs 76 + - Updated links 77 + - Updated docs 78 + - Initial refactor 79 + - Checkpoint 80 + - Refactored mapping 81 + - Docs updates 82 + - Docs updates 83 + - Version bump
+57 -10
bun.lock
··· 24 24 }, 25 25 "packages/cli": { 26 26 "name": "sequoia-cli", 27 - "version": "0.0.6", 27 + "version": "0.2.0", 28 28 "bin": { 29 - "sequoia": "dist/sequoia", 29 + "sequoia": "dist/index.js", 30 30 }, 31 31 "dependencies": { 32 32 "@atproto/api": "^0.18.17", 33 33 "@clack/prompts": "^1.0.0", 34 34 "cmd-ts": "^0.14.3", 35 + "glob": "^13.0.0", 36 + "mime-types": "^2.1.35", 37 + "minimatch": "^10.1.1", 35 38 }, 36 39 "devDependencies": { 37 - "@types/bun": "latest", 40 + "@biomejs/biome": "^2.3.13", 41 + "@types/mime-types": "^3.0.1", 42 + "@types/node": "^20", 38 43 }, 39 44 "peerDependencies": { 40 45 "typescript": "^5", ··· 100 105 101 106 "@babel/types": ["@babel/types@7.28.6", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg=="], 102 107 108 + "@biomejs/biome": ["@biomejs/biome@2.3.13", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.13", "@biomejs/cli-darwin-x64": "2.3.13", "@biomejs/cli-linux-arm64": "2.3.13", "@biomejs/cli-linux-arm64-musl": "2.3.13", "@biomejs/cli-linux-x64": "2.3.13", "@biomejs/cli-linux-x64-musl": "2.3.13", "@biomejs/cli-win32-arm64": "2.3.13", "@biomejs/cli-win32-x64": "2.3.13" }, "bin": { "biome": "bin/biome" } }, "sha512-Fw7UsV0UAtWIBIm0M7g5CRerpu1eKyKAXIazzxhbXYUyMkwNrkX/KLkGI7b+uVDQ5cLUMfOC9vR60q9IDYDstA=="], 109 + 110 + "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.13", "", { "os": "darwin", "cpu": "arm64" }, "sha512-0OCwP0/BoKzyJHnFdaTk/i7hIP9JHH9oJJq6hrSCPmJPo8JWcJhprK4gQlhFzrwdTBAW4Bjt/RmCf3ZZe59gwQ=="], 111 + 112 + "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.13", "", { "os": "darwin", "cpu": "x64" }, "sha512-AGr8OoemT/ejynbIu56qeil2+F2WLkIjn2d8jGK1JkchxnMUhYOfnqc9sVzcRxpG9Ycvw4weQ5sprRvtb7Yhcw=="], 113 + 114 + "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.13", "", { "os": "linux", "cpu": "arm64" }, "sha512-xvOiFkrDNu607MPMBUQ6huHmBG1PZLOrqhtK6pXJW3GjfVqJg0Z/qpTdhXfcqWdSZHcT+Nct2fOgewZvytESkw=="], 115 + 116 + "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.13", "", { "os": "linux", "cpu": "arm64" }, "sha512-TUdDCSY+Eo/EHjhJz7P2GnWwfqet+lFxBZzGHldrvULr59AgahamLs/N85SC4+bdF86EhqDuuw9rYLvLFWWlXA=="], 117 + 118 + "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.13", "", { "os": "linux", "cpu": "x64" }, "sha512-s+YsZlgiXNq8XkgHs6xdvKDFOj/bwTEevqEY6rC2I3cBHbxXYU1LOZstH3Ffw9hE5tE1sqT7U23C00MzkXztMw=="], 119 + 120 + "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.13", "", { "os": "linux", "cpu": "x64" }, "sha512-0bdwFVSbbM//Sds6OjtnmQGp4eUjOTt6kHvR/1P0ieR9GcTUAlPNvPC3DiavTqq302W34Ae2T6u5VVNGuQtGlQ=="], 121 + 122 + "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.13", "", { "os": "win32", "cpu": "arm64" }, "sha512-QweDxY89fq0VvrxME+wS/BXKmqMrOTZlN9SqQ79kQSIc3FrEwvW/PvUegQF6XIVaekncDykB5dzPqjbwSKs9DA=="], 123 + 124 + "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.13", "", { "os": "win32", "cpu": "x64" }, "sha512-trDw2ogdM2lyav9WFQsdsfdVy1dvZALymRpgmWsvSez0BJzBjulhOT/t+wyKeh3pZWvwP3VMs1SoOKwO3wecMQ=="], 125 + 103 126 "@braintree/sanitize-url": ["@braintree/sanitize-url@7.1.1", "", {}, "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw=="], 104 127 105 128 "@chevrotain/cst-dts-gen": ["@chevrotain/cst-dts-gen@11.0.3", "", { "dependencies": { "@chevrotain/gast": "11.0.3", "@chevrotain/types": "11.0.3", "lodash-es": "4.17.21" } }, "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ=="], ··· 187 210 "@iconify/types": ["@iconify/types@2.0.0", "", {}, "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg=="], 188 211 189 212 "@iconify/utils": ["@iconify/utils@3.1.0", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@iconify/types": "^2.0.0", "mlly": "^1.8.0" } }, "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw=="], 213 + 214 + "@isaacs/balanced-match": ["@isaacs/balanced-match@4.0.1", "", {}, "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ=="], 215 + 216 + "@isaacs/brace-expansion": ["@isaacs/brace-expansion@5.0.0", "", { "dependencies": { "@isaacs/balanced-match": "^4.0.1" } }, "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA=="], 190 217 191 218 "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="], 192 219 ··· 524 551 525 552 "@types/mdx": ["@types/mdx@2.0.13", "", {}, "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw=="], 526 553 554 + "@types/mime-types": ["@types/mime-types@3.0.1", "", {}, "sha512-xRMsfuQbnRq1Ef+C+RKaENOxXX87Ygl38W1vDfPHRku02TgQr+Qd8iivLtAMcR0KF5/29xlnFihkTlbqFrGOVQ=="], 555 + 527 556 "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], 528 557 529 - "@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], 558 + "@types/node": ["@types/node@20.19.30", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-WJtwWJu7UdlvzEAUm484QNg5eAoq5QR08KDNx7g45Usrs2NtOPiX8ugDqmKdXkyL03rBqU5dYNYVQetEpBHq2g=="], 530 559 531 560 "@types/react": ["@types/react@19.2.10", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-WPigyYuGhgZ/cTPRXB2EwUw+XvsRA3GqHlsP4qteqrnnjDrApbS7MxcGr/hke5iUoeB7E/gQtrs9I37zAJ0Vjw=="], 532 561 ··· 834 863 835 864 "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], 836 865 866 + "glob": ["glob@13.0.0", "", { "dependencies": { "minimatch": "^10.1.1", "minipass": "^7.1.2", "path-scurry": "^2.0.0" } }, "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA=="], 867 + 837 868 "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], 838 869 839 870 "hachure-fill": ["hachure-fill@0.5.2", "", {}, "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg=="], ··· 1094 1125 1095 1126 "mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="], 1096 1127 1097 - "mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], 1128 + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], 1129 + 1130 + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], 1098 1131 1099 1132 "mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="], 1100 1133 1101 1134 "mini-svg-data-uri": ["mini-svg-data-uri@1.4.4", "", { "bin": { "mini-svg-data-uri": "cli.js" } }, "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg=="], 1102 1135 1136 + "minimatch": ["minimatch@10.1.1", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ=="], 1137 + 1138 + "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], 1139 + 1103 1140 "minisearch": ["minisearch@7.2.0", "", {}, "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg=="], 1104 1141 1105 1142 "mlly": ["mlly@1.8.0", "", { "dependencies": { "acorn": "^8.15.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "ufo": "^1.6.1" } }, "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g=="], ··· 1149 1186 "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], 1150 1187 1151 1188 "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], 1189 + 1190 + "path-scurry": ["path-scurry@2.0.1", "", { "dependencies": { "lru-cache": "^11.0.0", "minipass": "^7.1.2" } }, "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA=="], 1152 1191 1153 1192 "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], 1154 1193 ··· 1336 1375 1337 1376 "uint8arrays": ["uint8arrays@3.0.0", "", { "dependencies": { "multiformats": "^9.4.2" } }, "sha512-HRCx0q6O9Bfbp+HHSfQQKD7wU70+lydKVt4EghkdOvlK/NlrF90z+eXV34mUd48rNvVJXwkrMSPpCATkct8fJA=="], 1338 1377 1339 - "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 1378 + "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], 1340 1379 1341 1380 "unicode-segmenter": ["unicode-segmenter@0.14.5", "", {}, "sha512-jHGmj2LUuqDcX3hqY12Ql+uhUTn8huuxNZGq7GvtF6bSybzH3aFgedYu/KTzQStEgt1Ra2F3HxadNXsNjb3m3g=="], 1342 1381 ··· 1442 1481 1443 1482 "@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], 1444 1483 1484 + "bun-types/@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], 1485 + 1445 1486 "chevrotain/lodash-es": ["lodash-es@4.17.21", "", {}, "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw=="], 1487 + 1488 + "compressible/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], 1446 1489 1447 1490 "compression/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], 1448 1491 ··· 1456 1499 1457 1500 "d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="], 1458 1501 1502 + "eval/@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], 1503 + 1459 1504 "hast-util-from-dom/hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w=="], 1460 1505 1461 1506 "hast-util-from-parse5/hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w=="], ··· 1473 1518 "p-locate/p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], 1474 1519 1475 1520 "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], 1521 + 1522 + "path-scurry/lru-cache": ["lru-cache@11.2.5", "", {}, "sha512-vFrFJkWtJvJnD5hg+hJvVE8Lh/TcMzKnTgCWmtBipwI5yLX/iX+5UB2tfuyODF5E7k9xEzMdYgGqaSb1c0c5Yw=="], 1476 1523 1477 1524 "radix-ui/@radix-ui/react-label": ["@radix-ui/react-label@2.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ=="], 1478 1525 ··· 1480 1527 1481 1528 "send/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], 1482 1529 1483 - "sequoia-cli/@types/bun": ["@types/bun@1.3.8", "", { "dependencies": { "bun-types": "1.3.8" } }, "sha512-3LvWJ2q5GerAXYxO2mffLTqOzEu5qnhEAlh48Vnu8WQfnmSwbgagjGZV6BoHKJztENYEDn6QmVd949W4uESRJA=="], 1484 - 1485 1530 "vite/fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], 1486 1531 1487 1532 "@radix-ui/react-label/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="], 1488 1533 1489 1534 "@shikijs/twoslash/twoslash/twoslash-protocol": ["twoslash-protocol@0.2.12", "", {}, "sha512-5qZLXVYfZ9ABdjqbvPc4RWMr7PrpPaaDSeaYY55vl/w1j6H6kzsWK/urAEIXlzYlyrFmyz1UbwIt+AA0ck+wbg=="], 1535 + 1536 + "bun-types/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 1490 1537 1491 1538 "compression/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], 1492 1539 ··· 1498 1545 1499 1546 "d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="], 1500 1547 1548 + "eval/@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 1549 + 1501 1550 "hast-util-from-dom/hastscript/property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="], 1502 1551 1503 1552 "p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], 1504 1553 1505 1554 "send/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], 1506 - 1507 - "sequoia-cli/@types/bun/bun-types": ["bun-types@1.3.8", "", { "dependencies": { "@types/node": "*" } }, "sha512-fL99nxdOWvV4LqjmC+8Q9kW3M4QTtTR1eePs94v5ctGqU8OeceWrSUaRw3JYb7tU3FkMIAjkueehrHPPPGKi5Q=="], 1508 1555 } 1509 1556 }
+2
docs/.gitignore
··· 1 1 .wrangler 2 + 3 + .sequoia-state.json
+7
docs/docs/pages/blog/index.mdx
··· 1 + --- 2 + layout: minimal 3 + --- 4 + 5 + # Blog 6 + 7 + ::blog-posts
+54
docs/docs/pages/blog/introducing-sequoia.mdx
··· 1 + --- 2 + layout: minimal 3 + title: "Introducing Sequoia: Publishing for the Open Web" 4 + date: 2026-01-30 5 + atUri: "at://did:plc:kq6bvkw4sxof3vdinuitehn5/site.standard.document/3mdnztyhoem2v" 6 + --- 7 + 8 + # Introducing Sequoia: Publishing for the Open Web 9 + 10 + ![hero](/hero.png) 11 + 12 + Today I'm excited to release a new tool for the [AT Protocol](https://atproto.com): Sequoia. This is a CLI tool that can take your existing self-hosted blog and publish it to the ATmosphere using [Standard.site](https://standard.site) lexicons. 13 + 14 + If you haven't explored ATProto you can find a primer [here](https://stevedylan.dev/posts/atproto-starter/), but in short, it's a new way to publish content to the web that puts ownership and control back in the hands of users. Blogs in some ways have already been doing this, but they've been missing a key piece: distribution. One of the unique features of ATProto is [lexicons](), which are schemas that apps build to create folders of content on a user's personal data server. The domain verified nature lets them be indexed and aggregated with ease. Outside of apps, lexicons can be extended by community members to build a common standard. That's exactly how [Standard.site](https://standard.site) was brought about, pushing a new way for standardizing publications and documents on ATProto. 15 + 16 + The founders and platforms behind the standard, [leaflet.pub](https://leaflet.pub), [pckt.blog](https://pckt.blog), and [offprint.app](https://offprint.app), all serve to make creating and sharing blogs easy. If you are not a technical person and don't have a blog already, I would highly recommend checking all of them out! However, for those of us who already have blogs, there was a need for a tool that could make it easy to publish existing and new content with this new standard. Thus Sequoia was born. 17 + 18 + Sequoia is a relatively simple CLI that can do the following: 19 + - Authenticate with your ATProto handle 20 + - Configure your blog through an interactive setup process 21 + - Create publication and document records on your PDS 22 + - Add necessary verification pieces to your site 23 + - Sync with existing records on your PDS 24 + 25 + It's designed to be run inside your existing repo, build a one-time config, and then be part of your regular workflow by publishing content or updating existing content, all following the Standard.site lexicons. The best part? It's designed to be fully interoperable. It doesn't matter if you're using Astro, 11ty, Hugo, Svelte, Next, Gatsby, Zola, you name it. If it's a static blog with markdown, Sequoia will work (and if for some reason it doesn't, [open an issue!](https://tangled.org/stevedylan.dev/sequoia/issues/new)). Here's a quick demo of Sequoia in action: 26 + 27 + <iframe 28 + class="w-full" 29 + style={{aspectRatio: "16/9"}} 30 + src="https://www.youtube.com/embed/sxursUHq5kw" 31 + title="YouTube video player" 32 + frameborder="0" 33 + allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" 34 + referrerpolicy="strict-origin-when-cross-origin" 35 + allowfullscreen 36 + ></iframe> 37 + 38 + ATProto has proven to be one of the more exciting pieces of technology that has surfaced in the past few years, and it gives some of us hope for a web that is open once more. No more walled gardens, full control of our data, and connected through lexicons. 39 + 40 + Install Sequoia today and check out the [quickstart guide](/quickstart) to publish your content into the ATmosphere ๐ŸŒณ 41 + 42 + :::code-group 43 + ```bash [npm] 44 + npm i -g sequoia-cli 45 + ``` 46 + 47 + ```bash [pnpm] 48 + pnpm i -g sequoia-cli 49 + ``` 50 + 51 + ```bash [bun] 52 + bun i -g sequoia-cli 53 + ``` 54 + :::
+28 -2
docs/docs/pages/config.mdx
··· 14 14 | `pdsUrl` | `string` | No | `"https://bsky.social"` | PDS server URL, generated automatically | 15 15 | `identity` | `string` | No | - | Which stored identity to use | 16 16 | `frontmatter` | `object` | No | - | Custom frontmatter field mappings | 17 + | `frontmatter.slugField` | `string` | No | - | Frontmatter field to use for slug (defaults to filepath) | 17 18 | `ignore` | `string[]` | No | - | Glob patterns for files to ignore | 19 + | `removeIndexFromSlug` | `boolean` | No | `false` | Remove `/index` or `/_index` suffix from slugs | 20 + | `bluesky` | `object` | No | - | Bluesky posting configuration | 21 + | `bluesky.enabled` | `boolean` | No | `false` | Post to Bluesky when publishing documents | 22 + | `bluesky.maxAgeDays` | `number` | No | `30` | Only post documents published within this many days | 18 23 19 24 ### Example 20 25 ··· 31 36 "frontmatter": { 32 37 "publishDate": "date" 33 38 }, 34 - "ignore": ["_index.md"] 39 + "ignore": ["_index.md"], 40 + "bluesky": { 41 + "enabled": true, 42 + "maxAgeDays": 30 43 + } 35 44 } 36 45 ``` 37 46 ··· 44 53 | `publishDate` | `string` | Yes | `"publishDate"`, `"pubDate"`, `"date"`, `"createdAt"`, `"created_at"` | Publication date | 45 54 | `coverImage` | `string` | No | `"ogImage"` | Cover image filename | 46 55 | `tags` | `string[]` | No | `"tags"` | Post tags/categories | 56 + | `draft` | `boolean` | No | `"draft"` | If `true`, post is skipped during publish | 47 57 48 58 ### Example 49 59 ··· 54 64 publishDate: 2024-01-15 55 65 ogImage: cover.jpg 56 66 tags: [welcome, intro] 67 + draft: false 57 68 --- 58 69 ``` 59 70 ··· 65 76 { 66 77 "frontmatter": { 67 78 "publishDate": "date", 68 - "coverImage": "thumbnail" 79 + "coverImage": "thumbnail", 80 + "draft": "private" 69 81 } 70 82 } 71 83 ``` 84 + 85 + ### Slug Configuration 86 + 87 + By default, slugs are generated from the filepath (e.g., `posts/my-post.md` becomes `posts/my-post`). To use a frontmatter field instead: 88 + 89 + ```json 90 + { 91 + "frontmatter": { 92 + "slugField": "url" 93 + } 94 + } 95 + ``` 96 + 97 + If the frontmatter field is not found, it falls back to the filepath. 72 98 73 99 ### Ignoring Files 74 100
+40 -2
docs/docs/pages/publishing.mdx
··· 10 10 sequoia publish --dry-run 11 11 ``` 12 12 13 - This will print out the posts that it has discovered, what will be published, and how many. Once everything looks good, send it! 13 + This will print out the posts that it has discovered, what will be published, and how many. If Bluesky posting is enabled, it will also show which posts will be shared to Bluesky. Once everything looks good, send it! 14 14 15 15 ```bash [Terminal] 16 16 sequoia publish ··· 23 23 If you happen to loose the state file or if you want to pull down records you already have published, you can use the `sync` command. 24 24 25 25 ```bash [Terminal] 26 - seuqoia sync 26 + sequoia sync 27 27 ``` 28 28 29 29 Sync will use your ATProto handle to look through all of the `standard.site.document` records on your PDS, and pull down the records that are for the publication in the config. 30 + 31 + ## Bluesky Posting 32 + 33 + Sequoia can automatically post to Bluesky when new documents are published. Enable this in your config: 34 + 35 + ```json 36 + { 37 + "bluesky": { 38 + "enabled": true, 39 + "maxAgeDays": 30 40 + } 41 + } 42 + ``` 43 + 44 + When enabled, each new document will create a Bluesky post with the title, description, and canonical URL. If a cover image exists, it will be embedded in the post. The combined content is limited to 300 characters. 45 + 46 + The `maxAgeDays` setting prevents flooding your feed when first setting up Sequoia. For example, if you have 40 existing blog posts, only those published within the last 30 days will be posted to Bluesky. 47 + 48 + ## Draft Posts 49 + 50 + Posts with `draft: true` in their frontmatter are automatically skipped during publishing. This lets you work on content without accidentally publishing it. 51 + 52 + ```yaml 53 + --- 54 + title: Work in Progress 55 + draft: true 56 + --- 57 + ``` 58 + 59 + If your framework uses a different field name (like `private` or `hidden`), configure it in `sequoia.json`: 60 + 61 + ```json 62 + { 63 + "frontmatter": { 64 + "draft": "private" 65 + } 66 + } 67 + ``` 30 68 31 69 ## Troubleshooting 32 70
+2 -2
docs/docs/pages/quickstart.mdx
··· 33 33 34 34 ### Authorize 35 35 36 - In order for Sequoia to publish or update records on your PDS, you need to authoize it with your ATProto handle and an app password. 36 + In order for Sequoia to publish or update records on your PDS, you need to authorize it with your ATProto handle and an app password. 37 37 38 38 :::tip 39 39 You can create an app password [here](https://bsky.app/settings/app-passwords) ··· 59 59 - **Public/static directory** - The path for the folder where your public items go, e.g. `./public`. Generally used for opengraph images or icons, but in this case we need it to store a `.well-known` verification for your blog, [read more here](/verifying). 60 60 - **Build output directory** - Where you published html css and js lives, e.g. `./dist` 61 61 - **URL path prefix for posts** - The path that goes before a post slug, e.g. the prefix for `https://sequoia.pub/blog/hello` would be `/blog`. 62 - - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with infomation like `title`, `description`, and `publishedDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 62 + - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with information like `title`, `description`, and `publishDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 63 63 - **Publication setup** - Here you can choose to `Create a new publication` which will create a `site.standard.publication` record on your PDS, or you can `Use an existing publication AT URI`. If you haven't done this before, select `Create a new publication`. 64 64 - **Publication name** - The name of your blog 65 65 - **Publication description** - A description for your blog
+2 -2
docs/docs/pages/setup.mdx
··· 28 28 29 29 ## Authorize 30 30 31 - In order for Sequoia to publish or update records on your PDS, you need to authoize it with your ATProto handle and an app password. 31 + In order for Sequoia to publish or update records on your PDS, you need to authorize it with your ATProto handle and an app password. 32 32 33 33 :::tip 34 34 You can create an app password [here](https://bsky.app/settings/app-passwords) ··· 56 56 - **Public/static directory** - The path for the folder where your public items go, e.g. `./public`. Generally used for opengraph images or icons, but in this case we need it to store a `.well-known` verification for your blog, [read more here](/verifying). 57 57 - **Build output directory** - Where you published html css and js lives, e.g. `./dist` 58 58 - **URL path prefix for posts** - The path that goes before a post slug, e.g. the prefix for `https://sequoia.pub/blog/hello` would be `/blog`. 59 - - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with infomation like `title`, `description`, and `publishedDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 59 + - **Configure your frontmatter field mappings** - In your markdown posts there is usually frontmatter with information like `title`, `description`, and `publishDate`. Follow the prompts and enter the names for your frontmatter fields so Sequoia can use them for creating standard.site documents. 60 60 - **Publication setup** - Here you can choose to `Create a new publication` which will create a `site.standard.publication` record on your PDS, or you can `Use an existing publication AT URI`. If you haven't done this before, select `Create a new publication`. 61 61 - **Publication name** - The name of your blog 62 62 - **Publication description** - A description for your blog
+2 -2
docs/docs/pages/verifying.mdx
··· 3 3 In order for your posts to show up on indexers you need to make sure your publication and your documents are verified. 4 4 5 5 :::tip 6 - You an learn more about Standard.site verification [here](https://standard.site/) 6 + You can learn more about Standard.site verification [here](https://standard.site/) 7 7 ::: 8 8 9 9 ## Publication Verification ··· 22 22 23 23 ### pds.ls 24 24 25 - Visit [pds.ls](https://pds.ls) and in the search bar paste in a `arUri` for either your publication or document, click the info tab, and then click the "info" tab. This will have a schema verification that will make sure the fields are accurate, however this will not cover Standard.site verification as perscribed on their website. 25 + Visit [pds.ls](https://pds.ls) and in the search bar paste in a `arUri` for either your publication or document, click the info tab, and then click the "info" tab. This will have a schema verification that will make sure the fields are accurate, however this will not cover Standard.site verification as prescribed on their website. 26 26 27 27 ### Standard.site Validator 28 28
+2 -2
docs/docs/pages/what-is-sequoia.mdx
··· 3 3 Sequoia is a simple CLI that can be used to publish Standard.site lexicons to the AT Protocol. Yeah that's a mouthful; let's break it down. 4 4 5 5 - [AT Protocol](https://atproto.com) - As the site says, "The AT Protocol is an open, decentralized network for building social applications." In reality it's a bit more than that. It's a new way to publish content to the web that puts control back in the hands of users without sacrificing distrubtion. There's a lot to unpack, but you can find a primer [here](https://stevedylan.dev/posts/atproto-starter/). 6 - - [Lexicons](https://atproto.com/guides/lexicon) - Lexicons are schemas used inside the AT Protocol. If you were to "like" a post, what would that consist of? Probably _who_ liked it, _what_ post was liked, and the _author_ of the post. The unique property to lexicons is that anyone can publish them and have them verified under a domain. Then these lexicons can be used to build apps by pulling a users records, aggregating them using an indexer, and a whole lot more! 7 - - [Standard.site](https://standard.site) - Standard.site is a set of lexicons specailly designed for publishing content. It was started by the founders of [leaflet.pub](https://leaflet.pub), [pckt.blog](https://pckt.blog), and [offprint.app](https://offprint.app), with the mission of finding a schema that can be used for blog posts and blog sites themselves (if you don't have a self-hosted blog, definitely check those platforms out!). So far it has proven to be the lexicon of choice for publishing content to ATProto with multiple tools and lexicons revolving around the standard. 6 + - [Lexicons](https://atproto.com/guides/lexicon) - Lexicons are schemas used inside the AT Protocol. If you were to "like" a post, what would that consist of? Probably _who_ liked it, _what_ post was liked, and the _author_ of the post. A unique property of lexicons is that anyone can publish them and have them verified under a domain. Then these lexicons can be used to build apps by pulling a users records, aggregating them using an indexer, and a whole lot more! 7 + - [Standard.site](https://standard.site) - Standard.site is a set of lexicons specially designed for publishing content. It was started by the founders of [leaflet.pub](https://leaflet.pub), [pckt.blog](https://pckt.blog), and [offprint.app](https://offprint.app), with the mission of finding a schema that can be used for blog posts and blog sites themselves (if you don't have a self-hosted blog, definitely check those platforms out!). So far it has proven to be the lexicon of choice for publishing content to ATProto with multiple tools and lexicons revolving around the standard. 8 8 9 9 The goal of Sequoia is to make it easier for those with existing self-hosted blogs to publish their content to the ATmosphere, no matter what SSG or framework you might be using. As of right now the focus will be static sites, but if there is enough traction there might be a future package that can be used for SSR frameworks too. 10 10
docs/docs/public/.well-known/.gitkeep

This is a binary file and will not be displayed.

+1
docs/docs/public/.well-known/site.standard.publication
··· 1 + at://did:plc:kq6bvkw4sxof3vdinuitehn5/site.standard.publication/3mdnzt4rqr42v
docs/docs/public/hero.png

This is a binary file and will not be displayed.

+1 -1
docs/package.json
··· 6 6 "scripts": { 7 7 "dev": "vocs dev", 8 8 "build": "vocs build && bun inject-og-tags.ts", 9 - "deploy": "bun run build && bunx wrangler pages deploy docs/dist", 9 + "deploy": "bun run build && sequoia inject && bunx wrangler pages deploy docs/dist", 10 10 "preview": "vocs preview" 11 11 }, 12 12 "dependencies": {
+14
docs/sequoia.json
··· 1 + { 2 + "siteUrl": "https://sequoia.pub", 3 + "contentDir": "docs/pages/blog", 4 + "imagesDir": "docs/public", 5 + "publicDir": "docs/public", 6 + "outputDir": "docs/dist", 7 + "pathPrefix": "/blog", 8 + "publicationUri": "at://did:plc:kq6bvkw4sxof3vdinuitehn5/site.standard.publication/3mdnzt4rqr42v", 9 + "pdsUrl": "https://andromeda.social", 10 + "frontmatter": { 11 + "publishDate": "date" 12 + }, 13 + "ignore": ["index.mdx"] 14 + }
+1 -1
docs/vocs.config.ts
··· 17 17 topNav: [ 18 18 { text: "Docs", link: "/quickstart", match: "/" }, 19 19 { text: "Blog", link: "/blog" }, 20 - { text: "Tanlged", link: "https://tangled.org/stevedylan.dev/sequoia" }, 20 + { text: "Tangled", link: "https://tangled.org/stevedylan.dev/sequoia" }, 21 21 { text: "GitHub", link: "https://github.com/stevedylandev/sequoia" }, 22 22 ], 23 23 sidebar: [
+37
packages/cli/biome.json
··· 1 + { 2 + "$schema": "https://biomejs.dev/schemas/2.3.13/schema.json", 3 + "vcs": { 4 + "enabled": true, 5 + "clientKind": "git", 6 + "useIgnoreFile": true 7 + }, 8 + "files": { 9 + "includes": ["**", "!!**/dist"] 10 + }, 11 + "formatter": { 12 + "enabled": true, 13 + "indentStyle": "tab" 14 + }, 15 + "linter": { 16 + "enabled": true, 17 + "rules": { 18 + "recommended": true, 19 + "style": { 20 + "noNonNullAssertion": "off" 21 + } 22 + } 23 + }, 24 + "javascript": { 25 + "formatter": { 26 + "quoteStyle": "double" 27 + } 28 + }, 29 + "assist": { 30 + "enabled": true, 31 + "actions": { 32 + "source": { 33 + "organizeImports": "on" 34 + } 35 + } 36 + } 37 + }
+14 -8
packages/cli/package.json
··· 1 1 { 2 2 "name": "sequoia-cli", 3 - "version": "0.1.0", 4 - "module": "dist/index.js", 3 + "version": "0.2.1", 5 4 "type": "module", 6 5 "bin": { 7 - "sequoia": "dist/sequoia" 6 + "sequoia": "dist/index.js" 8 7 }, 9 8 "files": [ 10 9 "dist", 11 10 "README.md" 12 11 ], 13 - "main": "./dist/sequoia", 12 + "main": "./dist/index.js", 14 13 "exports": { 15 - ".": "./dist/sequoia" 14 + ".": "./dist/index.js" 16 15 }, 17 16 "scripts": { 18 - "build": "bun build src/index.ts --compile --outfile dist/sequoia", 17 + "lint": "biome lint --write", 18 + "format": "biome format --write", 19 + "build": "bun build src/index.ts --target node --outdir dist", 19 20 "dev": "bun run build && bun link", 20 21 "deploy": "bun run build && bun publish" 21 22 }, 22 23 "devDependencies": { 23 - "@types/bun": "latest" 24 + "@biomejs/biome": "^2.3.13", 25 + "@types/mime-types": "^3.0.1", 26 + "@types/node": "^20" 24 27 }, 25 28 "peerDependencies": { 26 29 "typescript": "^5" 27 30 }, 28 31 "dependencies": { 29 32 "@atproto/api": "^0.18.17", 33 + "@clack/prompts": "^1.0.0", 30 34 "cmd-ts": "^0.14.3", 31 - "@clack/prompts": "^1.0.0" 35 + "glob": "^13.0.0", 36 + "mime-types": "^2.1.35", 37 + "minimatch": "^10.1.1" 32 38 } 33 39 }
+152 -135
packages/cli/src/commands/auth.ts
··· 1 - import { command, flag, option, optional, string } from "cmd-ts"; 2 - import { note, text, password, confirm, select, spinner, log } from "@clack/prompts"; 3 1 import { AtpAgent } from "@atproto/api"; 4 2 import { 5 - saveCredentials, 6 - deleteCredentials, 7 - listCredentials, 8 - getCredentials, 9 - getCredentialsPath, 10 - } from "../lib/credentials"; 3 + confirm, 4 + log, 5 + note, 6 + password, 7 + select, 8 + spinner, 9 + text, 10 + } from "@clack/prompts"; 11 + import { command, flag, option, optional, string } from "cmd-ts"; 11 12 import { resolveHandleToPDS } from "../lib/atproto"; 13 + import { 14 + deleteCredentials, 15 + getCredentials, 16 + getCredentialsPath, 17 + listCredentials, 18 + saveCredentials, 19 + } from "../lib/credentials"; 12 20 import { exitOnCancel } from "../lib/prompts"; 13 21 14 22 export const authCommand = command({ 15 - name: "auth", 16 - description: "Authenticate with your ATProto PDS", 17 - args: { 18 - logout: option({ 19 - long: "logout", 20 - description: "Remove credentials for a specific identity (or all if only one exists)", 21 - type: optional(string), 22 - }), 23 - list: flag({ 24 - long: "list", 25 - description: "List all stored identities", 26 - }), 27 - }, 28 - handler: async ({ logout, list }) => { 29 - // List identities 30 - if (list) { 31 - const identities = await listCredentials(); 32 - if (identities.length === 0) { 33 - log.info("No stored identities"); 34 - } else { 35 - log.info("Stored identities:"); 36 - for (const id of identities) { 37 - console.log(` - ${id}`); 38 - } 39 - } 40 - return; 41 - } 23 + name: "auth", 24 + description: "Authenticate with your ATProto PDS", 25 + args: { 26 + logout: option({ 27 + long: "logout", 28 + description: 29 + "Remove credentials for a specific identity (or all if only one exists)", 30 + type: optional(string), 31 + }), 32 + list: flag({ 33 + long: "list", 34 + description: "List all stored identities", 35 + }), 36 + }, 37 + handler: async ({ logout, list }) => { 38 + // List identities 39 + if (list) { 40 + const identities = await listCredentials(); 41 + if (identities.length === 0) { 42 + log.info("No stored identities"); 43 + } else { 44 + log.info("Stored identities:"); 45 + for (const id of identities) { 46 + console.log(` - ${id}`); 47 + } 48 + } 49 + return; 50 + } 42 51 43 - // Logout 44 - if (logout !== undefined) { 45 - // If --logout was passed without a value, it will be an empty string 46 - const identifier = logout || undefined; 52 + // Logout 53 + if (logout !== undefined) { 54 + // If --logout was passed without a value, it will be an empty string 55 + const identifier = logout || undefined; 47 56 48 - if (!identifier) { 49 - // No identifier provided - show available and prompt 50 - const identities = await listCredentials(); 51 - if (identities.length === 0) { 52 - log.info("No saved credentials found"); 53 - return; 54 - } 55 - if (identities.length === 1) { 56 - const deleted = await deleteCredentials(identities[0]); 57 - if (deleted) { 58 - log.success(`Removed credentials for ${identities[0]}`); 59 - } 60 - return; 61 - } 62 - // Multiple identities - prompt 63 - const selected = exitOnCancel(await select({ 64 - message: "Select identity to remove:", 65 - options: identities.map(id => ({ value: id, label: id })), 66 - })); 67 - const deleted = await deleteCredentials(selected); 68 - if (deleted) { 69 - log.success(`Removed credentials for ${selected}`); 70 - } 71 - return; 72 - } 57 + if (!identifier) { 58 + // No identifier provided - show available and prompt 59 + const identities = await listCredentials(); 60 + if (identities.length === 0) { 61 + log.info("No saved credentials found"); 62 + return; 63 + } 64 + if (identities.length === 1) { 65 + const deleted = await deleteCredentials(identities[0]); 66 + if (deleted) { 67 + log.success(`Removed credentials for ${identities[0]}`); 68 + } 69 + return; 70 + } 71 + // Multiple identities - prompt 72 + const selected = exitOnCancel( 73 + await select({ 74 + message: "Select identity to remove:", 75 + options: identities.map((id) => ({ value: id, label: id })), 76 + }), 77 + ); 78 + const deleted = await deleteCredentials(selected); 79 + if (deleted) { 80 + log.success(`Removed credentials for ${selected}`); 81 + } 82 + return; 83 + } 73 84 74 - const deleted = await deleteCredentials(identifier); 75 - if (deleted) { 76 - log.success(`Removed credentials for ${identifier}`); 77 - } else { 78 - log.info(`No credentials found for ${identifier}`); 79 - } 80 - return; 81 - } 85 + const deleted = await deleteCredentials(identifier); 86 + if (deleted) { 87 + log.success(`Removed credentials for ${identifier}`); 88 + } else { 89 + log.info(`No credentials found for ${identifier}`); 90 + } 91 + return; 92 + } 82 93 83 - note( 84 - "To authenticate, you'll need an App Password.\n\n" + 85 - "Create one at: https://bsky.app/settings/app-passwords\n\n" + 86 - "App Passwords are safer than your main password and can be revoked.", 87 - "Authentication" 88 - ); 94 + note( 95 + "To authenticate, you'll need an App Password.\n\n" + 96 + "Create one at: https://bsky.app/settings/app-passwords\n\n" + 97 + "App Passwords are safer than your main password and can be revoked.", 98 + "Authentication", 99 + ); 89 100 90 - const identifier = exitOnCancel(await text({ 91 - message: "Handle or DID:", 92 - placeholder: "yourhandle.bsky.social", 93 - })); 101 + const identifier = exitOnCancel( 102 + await text({ 103 + message: "Handle or DID:", 104 + placeholder: "yourhandle.bsky.social", 105 + }), 106 + ); 94 107 95 - const appPassword = exitOnCancel(await password({ 96 - message: "App Password:", 97 - })); 108 + const appPassword = exitOnCancel( 109 + await password({ 110 + message: "App Password:", 111 + }), 112 + ); 98 113 99 - if (!identifier || !appPassword) { 100 - log.error("Handle and password are required"); 101 - process.exit(1); 102 - } 114 + if (!identifier || !appPassword) { 115 + log.error("Handle and password are required"); 116 + process.exit(1); 117 + } 103 118 104 - // Check if this identity already exists 105 - const existing = await getCredentials(identifier); 106 - if (existing) { 107 - const overwrite = exitOnCancel(await confirm({ 108 - message: `Credentials for ${identifier} already exist. Update?`, 109 - initialValue: false, 110 - })); 111 - if (!overwrite) { 112 - log.info("Keeping existing credentials"); 113 - return; 114 - } 115 - } 119 + // Check if this identity already exists 120 + const existing = await getCredentials(identifier); 121 + if (existing) { 122 + const overwrite = exitOnCancel( 123 + await confirm({ 124 + message: `Credentials for ${identifier} already exist. Update?`, 125 + initialValue: false, 126 + }), 127 + ); 128 + if (!overwrite) { 129 + log.info("Keeping existing credentials"); 130 + return; 131 + } 132 + } 116 133 117 - // Resolve PDS from handle 118 - const s = spinner(); 119 - s.start("Resolving PDS..."); 120 - let pdsUrl: string; 121 - try { 122 - pdsUrl = await resolveHandleToPDS(identifier); 123 - s.stop(`Found PDS: ${pdsUrl}`); 124 - } catch (error) { 125 - s.stop("Failed to resolve PDS"); 126 - log.error(`Failed to resolve PDS from handle: ${error}`); 127 - process.exit(1); 128 - } 134 + // Resolve PDS from handle 135 + const s = spinner(); 136 + s.start("Resolving PDS..."); 137 + let pdsUrl: string; 138 + try { 139 + pdsUrl = await resolveHandleToPDS(identifier); 140 + s.stop(`Found PDS: ${pdsUrl}`); 141 + } catch (error) { 142 + s.stop("Failed to resolve PDS"); 143 + log.error(`Failed to resolve PDS from handle: ${error}`); 144 + process.exit(1); 145 + } 129 146 130 - // Verify credentials 131 - s.start("Verifying credentials..."); 147 + // Verify credentials 148 + s.start("Verifying credentials..."); 132 149 133 - try { 134 - const agent = new AtpAgent({ service: pdsUrl }); 135 - await agent.login({ 136 - identifier: identifier, 137 - password: appPassword, 138 - }); 150 + try { 151 + const agent = new AtpAgent({ service: pdsUrl }); 152 + await agent.login({ 153 + identifier: identifier, 154 + password: appPassword, 155 + }); 139 156 140 - s.stop(`Logged in as ${agent.session?.handle}`); 157 + s.stop(`Logged in as ${agent.session?.handle}`); 141 158 142 - // Save credentials 143 - await saveCredentials({ 144 - pdsUrl, 145 - identifier: identifier, 146 - password: appPassword, 147 - }); 159 + // Save credentials 160 + await saveCredentials({ 161 + pdsUrl, 162 + identifier: identifier, 163 + password: appPassword, 164 + }); 148 165 149 - log.success(`Credentials saved to ${getCredentialsPath()}`); 150 - } catch (error) { 151 - s.stop("Failed to login"); 152 - log.error(`Failed to login: ${error}`); 153 - process.exit(1); 154 - } 155 - }, 166 + log.success(`Credentials saved to ${getCredentialsPath()}`); 167 + } catch (error) { 168 + s.stop("Failed to login"); 169 + log.error(`Failed to login: ${error}`); 170 + process.exit(1); 171 + } 172 + }, 156 173 });
+66 -13
packages/cli/src/commands/init.ts
··· 1 + import * as fs from "node:fs/promises"; 1 2 import { command } from "cmd-ts"; 2 3 import { 3 4 intro, ··· 10 11 log, 11 12 group, 12 13 } from "@clack/prompts"; 13 - import * as path from "path"; 14 + import * as path from "node:path"; 14 15 import { findConfig, generateConfigTemplate } from "../lib/config"; 15 16 import { loadCredentials } from "../lib/credentials"; 16 17 import { createAgent, createPublication } from "../lib/atproto"; 17 - import type { FrontmatterMapping } from "../lib/types"; 18 + import type { FrontmatterMapping, BlueskyConfig } from "../lib/types"; 19 + 20 + async function fileExists(filePath: string): Promise<boolean> { 21 + try { 22 + await fs.access(filePath); 23 + return true; 24 + } catch { 25 + return false; 26 + } 27 + } 18 28 19 29 const onCancel = () => { 20 30 outro("Setup cancelled"); ··· 127 137 message: "Field name for tags:", 128 138 defaultValue: "tags", 129 139 placeholder: "tags, categories, keywords, etc.", 140 + }), 141 + draftField: () => 142 + text({ 143 + message: "Field name for draft status:", 144 + defaultValue: "draft", 145 + placeholder: "draft, private, hidden, etc.", 130 146 }), 131 147 }, 132 148 { onCancel }, ··· 139 155 ["publishDate", frontmatterConfig.dateField, "publishDate"], 140 156 ["coverImage", frontmatterConfig.coverField, "ogImage"], 141 157 ["tags", frontmatterConfig.tagsField, "tags"], 158 + ["draft", frontmatterConfig.draftField, "draft"], 142 159 ]; 143 160 144 161 const builtMapping = fieldMappings.reduce<FrontmatterMapping>( ··· 182 199 183 200 const s = spinner(); 184 201 s.start("Connecting to ATProto..."); 185 - let agent; 202 + let agent: Awaited<ReturnType<typeof createAgent>> | undefined; 186 203 try { 187 204 agent = await createAgent(credentials); 188 205 s.stop("Connected!"); 189 - } catch (error) { 206 + } catch (_error) { 190 207 s.stop("Failed to connect"); 191 208 log.error( 192 209 "Failed to connect. Check your credentials with 'sequoia auth'.", ··· 253 270 publicationUri = uri as string; 254 271 } 255 272 273 + // Bluesky posting configuration 274 + const enableBluesky = await confirm({ 275 + message: "Enable automatic Bluesky posting when publishing?", 276 + initialValue: false, 277 + }); 278 + 279 + if (enableBluesky === Symbol.for("cancel")) { 280 + onCancel(); 281 + } 282 + 283 + let blueskyConfig: BlueskyConfig | undefined; 284 + if (enableBluesky) { 285 + const maxAgeDaysInput = await text({ 286 + message: "Maximum age (in days) for posts to be shared on Bluesky:", 287 + defaultValue: "7", 288 + placeholder: "7", 289 + validate: (value) => { 290 + const num = parseInt(value, 10); 291 + if (Number.isNaN(num) || num < 1) { 292 + return "Please enter a positive number"; 293 + } 294 + }, 295 + }); 296 + 297 + if (maxAgeDaysInput === Symbol.for("cancel")) { 298 + onCancel(); 299 + } 300 + 301 + const maxAgeDays = parseInt(maxAgeDaysInput as string, 10); 302 + blueskyConfig = { 303 + enabled: true, 304 + ...(maxAgeDays !== 7 && { maxAgeDays }), 305 + }; 306 + } 307 + 256 308 // Get PDS URL from credentials (already loaded earlier) 257 309 const pdsUrl = credentials?.pdsUrl; 258 310 ··· 267 319 publicationUri, 268 320 pdsUrl, 269 321 frontmatter: frontmatterMapping, 322 + bluesky: blueskyConfig, 270 323 }); 271 324 272 325 const configPath = path.join(process.cwd(), "sequoia.json"); 273 - await Bun.write(configPath, configContent); 326 + await fs.writeFile(configPath, configContent); 274 327 275 328 log.success(`Configuration saved to ${configPath}`); 276 329 ··· 283 336 const wellKnownPath = path.join(wellKnownDir, "site.standard.publication"); 284 337 285 338 // Ensure .well-known directory exists 286 - await Bun.write(path.join(wellKnownDir, ".gitkeep"), ""); 287 - await Bun.write(wellKnownPath, publicationUri); 339 + await fs.mkdir(wellKnownDir, { recursive: true }); 340 + await fs.writeFile(path.join(wellKnownDir, ".gitkeep"), ""); 341 + await fs.writeFile(wellKnownPath, publicationUri); 288 342 289 343 log.success(`Created ${wellKnownPath}`); 290 344 291 345 // Update .gitignore 292 346 const gitignorePath = path.join(process.cwd(), ".gitignore"); 293 - const gitignoreFile = Bun.file(gitignorePath); 294 347 const stateFilename = ".sequoia-state.json"; 295 348 296 - if (await gitignoreFile.exists()) { 297 - const gitignoreContent = await gitignoreFile.text(); 349 + if (await fileExists(gitignorePath)) { 350 + const gitignoreContent = await fs.readFile(gitignorePath, "utf-8"); 298 351 if (!gitignoreContent.includes(stateFilename)) { 299 - await Bun.write( 352 + await fs.writeFile( 300 353 gitignorePath, 301 - gitignoreContent + `\n${stateFilename}\n`, 354 + `${gitignoreContent}\n${stateFilename}\n`, 302 355 ); 303 356 log.info(`Added ${stateFilename} to .gitignore`); 304 357 } 305 358 } else { 306 - await Bun.write(gitignorePath, `${stateFilename}\n`); 359 + await fs.writeFile(gitignorePath, `${stateFilename}\n`); 307 360 log.info(`Created .gitignore with ${stateFilename}`); 308 361 } 309 362
+42 -67
packages/cli/src/commands/inject.ts
··· 1 - import { command, flag, option, optional, string } from "cmd-ts"; 2 1 import { log } from "@clack/prompts"; 3 - import * as path from "path"; 4 - import { Glob } from "bun"; 5 - import { loadConfig, loadState, findConfig } from "../lib/config"; 2 + import { command, flag, option, optional, string } from "cmd-ts"; 3 + import { glob } from "glob"; 4 + import * as fs from "node:fs/promises"; 5 + import * as path from "node:path"; 6 + import { findConfig, loadConfig, loadState } from "../lib/config"; 6 7 7 8 export const injectCommand = command({ 8 9 name: "inject", 9 - description: 10 - "Inject site.standard.document link tags into built HTML files", 10 + description: "Inject site.standard.document link tags into built HTML files", 11 11 args: { 12 12 outputDir: option({ 13 13 long: "output", ··· 43 43 // Load state to get atUri mappings 44 44 const state = await loadState(configDir); 45 45 46 - // Generic filenames where the slug is the parent directory, not the filename 47 - // Covers: SvelteKit (+page), Astro/Hugo (index), Next.js (page), etc. 48 - const genericFilenames = new Set([ 49 - "+page", 50 - "index", 51 - "_index", 52 - "page", 53 - "readme", 54 - ]); 55 - 56 - // Build a map of slug/path to atUri from state 57 - const pathToAtUri = new Map<string, string>(); 46 + // Build a map of slug to atUri from state 47 + // The slug is stored in state by the publish command, using the configured slug options 48 + const slugToAtUri = new Map<string, string>(); 58 49 for (const [filePath, postState] of Object.entries(state.posts)) { 59 - if (postState.atUri) { 60 - // Extract slug from file path (e.g., ./content/blog/my-post.md -> my-post) 61 - let basename = path.basename(filePath, path.extname(filePath)); 50 + if (postState.atUri && postState.slug) { 51 + // Use the slug stored in state (computed by publish with config options) 52 + slugToAtUri.set(postState.slug, postState.atUri); 62 53 63 - // If the filename is a generic convention name, use the parent directory as slug 64 - if (genericFilenames.has(basename.toLowerCase())) { 65 - // Split path and filter out route groups like (blog-article) 66 - const pathParts = filePath 67 - .split(/[/\\]/) 68 - .filter((p) => p && !(p.startsWith("(") && p.endsWith(")"))); 69 - // The slug should be the second-to-last part (last is the filename) 70 - if (pathParts.length >= 2) { 71 - const slug = pathParts[pathParts.length - 2]; 72 - if (slug && slug !== "." && slug !== "content" && slug !== "routes" && slug !== "src") { 73 - basename = slug; 74 - } 75 - } 54 + // Also add the last segment for simpler matching 55 + // e.g., "other/my-other-post" -> also map "my-other-post" 56 + const lastSegment = postState.slug.split("/").pop(); 57 + if (lastSegment && lastSegment !== postState.slug) { 58 + slugToAtUri.set(lastSegment, postState.atUri); 76 59 } 77 - 78 - pathToAtUri.set(basename, postState.atUri); 79 - 80 - // Also add variations that might match HTML file paths 81 - // e.g., /blog/my-post, /posts/my-post, my-post/index 82 - const dirName = path.basename(path.dirname(filePath)); 83 - // Skip route groups and common directory names 84 - if (dirName !== "." && dirName !== "content" && !(dirName.startsWith("(") && dirName.endsWith(")"))) { 85 - pathToAtUri.set(`${dirName}/${basename}`, postState.atUri); 86 - } 60 + } else if (postState.atUri) { 61 + // Fallback for older state files without slug field 62 + // Extract slug from file path (e.g., ./content/blog/my-post.md -> my-post) 63 + const basename = path.basename(filePath, path.extname(filePath)); 64 + slugToAtUri.set(basename.toLowerCase(), postState.atUri); 87 65 } 88 66 } 89 67 90 - if (pathToAtUri.size === 0) { 68 + if (slugToAtUri.size === 0) { 91 69 log.warn( 92 70 "No published posts found in state. Run 'sequoia publish' first.", 93 71 ); 94 72 return; 95 73 } 96 74 97 - log.info(`Found ${pathToAtUri.size} published posts in state`); 75 + log.info(`Found ${slugToAtUri.size} slug mappings from published posts`); 98 76 99 77 // Scan for HTML files 100 - const glob = new Glob("**/*.html"); 101 - const htmlFiles: string[] = []; 102 - 103 - for await (const file of glob.scan(resolvedOutputDir)) { 104 - htmlFiles.push(path.join(resolvedOutputDir, file)); 105 - } 78 + const htmlFiles = await glob("**/*.html", { 79 + cwd: resolvedOutputDir, 80 + absolute: false, 81 + }); 106 82 107 83 if (htmlFiles.length === 0) { 108 84 log.warn(`No HTML files found in ${resolvedOutputDir}`); ··· 115 91 let skippedCount = 0; 116 92 let alreadyHasCount = 0; 117 93 118 - for (const htmlPath of htmlFiles) { 94 + for (const file of htmlFiles) { 95 + const htmlPath = path.join(resolvedOutputDir, file); 119 96 // Try to match this HTML file to a published post 120 - const relativePath = path.relative(resolvedOutputDir, htmlPath); 97 + const relativePath = file; 121 98 const htmlDir = path.dirname(relativePath); 122 99 const htmlBasename = path.basename(relativePath, ".html"); 123 100 ··· 125 102 let atUri: string | undefined; 126 103 127 104 // Strategy 1: Direct basename match (e.g., my-post.html -> my-post) 128 - atUri = pathToAtUri.get(htmlBasename); 105 + atUri = slugToAtUri.get(htmlBasename); 129 106 130 - // Strategy 2: Directory name for index.html (e.g., my-post/index.html -> my-post) 107 + // Strategy 2: For index.html, try the directory path 108 + // e.g., posts/40th-puzzle-box/what-a-gift/index.html -> 40th-puzzle-box/what-a-gift 131 109 if (!atUri && htmlBasename === "index" && htmlDir !== ".") { 132 - const slug = path.basename(htmlDir); 133 - atUri = pathToAtUri.get(slug); 110 + // Try full directory path (for nested subdirectories) 111 + atUri = slugToAtUri.get(htmlDir); 134 112 135 - // Also try parent/slug pattern 113 + // Also try just the last directory segment 136 114 if (!atUri) { 137 - const parentDir = path.dirname(htmlDir); 138 - if (parentDir !== ".") { 139 - atUri = pathToAtUri.get(`${path.basename(parentDir)}/${slug}`); 140 - } 115 + const lastDir = path.basename(htmlDir); 116 + atUri = slugToAtUri.get(lastDir); 141 117 } 142 118 } 143 119 144 120 // Strategy 3: Full path match (e.g., blog/my-post.html -> blog/my-post) 145 121 if (!atUri && htmlDir !== ".") { 146 - atUri = pathToAtUri.get(`${htmlDir}/${htmlBasename}`); 122 + atUri = slugToAtUri.get(`${htmlDir}/${htmlBasename}`); 147 123 } 148 124 149 125 if (!atUri) { ··· 152 128 } 153 129 154 130 // Read the HTML file 155 - const file = Bun.file(htmlPath); 156 - let content = await file.text(); 131 + let content = await fs.readFile(htmlPath, "utf-8"); 157 132 158 133 // Check if link tag already exists 159 134 const linkTag = `<link rel="site.standard.document" href="${atUri}">`; ··· 184 159 `${indent}${linkTag}\n${indent}` + 185 160 content.slice(headCloseIndex); 186 161 187 - await Bun.write(htmlPath, content); 162 + await fs.writeFile(htmlPath, content); 188 163 log.success(` Injected into: ${relativePath}`); 189 164 injectedCount++; 190 165 }
+317 -198
packages/cli/src/commands/publish.ts
··· 1 + import * as fs from "node:fs/promises"; 1 2 import { command, flag } from "cmd-ts"; 2 3 import { select, spinner, log } from "@clack/prompts"; 3 - import * as path from "path"; 4 + import * as path from "node:path"; 4 5 import { loadConfig, loadState, saveState, findConfig } from "../lib/config"; 5 - import { loadCredentials, listCredentials, getCredentials } from "../lib/credentials"; 6 - import { createAgent, createDocument, updateDocument, uploadImage, resolveImagePath } from "../lib/atproto"; 6 + import { 7 + loadCredentials, 8 + listCredentials, 9 + getCredentials, 10 + } from "../lib/credentials"; 11 + import { 12 + createAgent, 13 + createDocument, 14 + updateDocument, 15 + uploadImage, 16 + resolveImagePath, 17 + createBlueskyPost, 18 + addBskyPostRefToDocument, 19 + } from "../lib/atproto"; 7 20 import { 8 - scanContentDirectory, 9 - getContentHash, 10 - updateFrontmatterWithAtUri, 21 + scanContentDirectory, 22 + getContentHash, 23 + updateFrontmatterWithAtUri, 11 24 } from "../lib/markdown"; 12 - import type { BlogPost, BlobObject } from "../lib/types"; 25 + import type { BlogPost, BlobObject, StrongRef } from "../lib/types"; 13 26 import { exitOnCancel } from "../lib/prompts"; 14 27 15 28 export const publishCommand = command({ 16 - name: "publish", 17 - description: "Publish content to ATProto", 18 - args: { 19 - force: flag({ 20 - long: "force", 21 - short: "f", 22 - description: "Force publish all posts, ignoring change detection", 23 - }), 24 - dryRun: flag({ 25 - long: "dry-run", 26 - short: "n", 27 - description: "Preview what would be published without making changes", 28 - }), 29 - }, 30 - handler: async ({ force, dryRun }) => { 31 - // Load config 32 - const configPath = await findConfig(); 33 - if (!configPath) { 34 - log.error("No publisher.config.ts found. Run 'publisher init' first."); 35 - process.exit(1); 36 - } 29 + name: "publish", 30 + description: "Publish content to ATProto", 31 + args: { 32 + force: flag({ 33 + long: "force", 34 + short: "f", 35 + description: "Force publish all posts, ignoring change detection", 36 + }), 37 + dryRun: flag({ 38 + long: "dry-run", 39 + short: "n", 40 + description: "Preview what would be published without making changes", 41 + }), 42 + }, 43 + handler: async ({ force, dryRun }) => { 44 + // Load config 45 + const configPath = await findConfig(); 46 + if (!configPath) { 47 + log.error("No publisher.config.ts found. Run 'publisher init' first."); 48 + process.exit(1); 49 + } 37 50 38 - const config = await loadConfig(configPath); 39 - const configDir = path.dirname(configPath); 51 + const config = await loadConfig(configPath); 52 + const configDir = path.dirname(configPath); 40 53 41 - log.info(`Site: ${config.siteUrl}`); 42 - log.info(`Content directory: ${config.contentDir}`); 54 + log.info(`Site: ${config.siteUrl}`); 55 + log.info(`Content directory: ${config.contentDir}`); 43 56 44 - // Load credentials 45 - let credentials = await loadCredentials(config.identity); 57 + // Load credentials 58 + let credentials = await loadCredentials(config.identity); 46 59 47 - // If no credentials resolved, check if we need to prompt for identity selection 48 - if (!credentials) { 49 - const identities = await listCredentials(); 50 - if (identities.length === 0) { 51 - log.error("No credentials found. Run 'sequoia auth' first."); 52 - log.info("Or set ATP_IDENTIFIER and ATP_APP_PASSWORD environment variables."); 53 - process.exit(1); 54 - } 60 + // If no credentials resolved, check if we need to prompt for identity selection 61 + if (!credentials) { 62 + const identities = await listCredentials(); 63 + if (identities.length === 0) { 64 + log.error("No credentials found. Run 'sequoia auth' first."); 65 + log.info( 66 + "Or set ATP_IDENTIFIER and ATP_APP_PASSWORD environment variables.", 67 + ); 68 + process.exit(1); 69 + } 55 70 56 - // Multiple identities exist but none selected - prompt user 57 - log.info("Multiple identities found. Select one to use:"); 58 - const selected = exitOnCancel(await select({ 59 - message: "Identity:", 60 - options: identities.map(id => ({ value: id, label: id })), 61 - })); 71 + // Multiple identities exist but none selected - prompt user 72 + log.info("Multiple identities found. Select one to use:"); 73 + const selected = exitOnCancel( 74 + await select({ 75 + message: "Identity:", 76 + options: identities.map((id) => ({ value: id, label: id })), 77 + }), 78 + ); 62 79 63 - credentials = await getCredentials(selected); 64 - if (!credentials) { 65 - log.error("Failed to load selected credentials."); 66 - process.exit(1); 67 - } 80 + credentials = await getCredentials(selected); 81 + if (!credentials) { 82 + log.error("Failed to load selected credentials."); 83 + process.exit(1); 84 + } 68 85 69 - log.info(`Tip: Add "identity": "${selected}" to sequoia.json to use this by default.`); 70 - } 86 + log.info( 87 + `Tip: Add "identity": "${selected}" to sequoia.json to use this by default.`, 88 + ); 89 + } 71 90 72 - // Resolve content directory 73 - const contentDir = path.isAbsolute(config.contentDir) 74 - ? config.contentDir 75 - : path.join(configDir, config.contentDir); 91 + // Resolve content directory 92 + const contentDir = path.isAbsolute(config.contentDir) 93 + ? config.contentDir 94 + : path.join(configDir, config.contentDir); 95 + 96 + const imagesDir = config.imagesDir 97 + ? path.isAbsolute(config.imagesDir) 98 + ? config.imagesDir 99 + : path.join(configDir, config.imagesDir) 100 + : undefined; 101 + 102 + // Load state 103 + const state = await loadState(configDir); 104 + 105 + // Scan for posts 106 + const s = spinner(); 107 + s.start("Scanning for posts..."); 108 + const posts = await scanContentDirectory(contentDir, { 109 + frontmatterMapping: config.frontmatter, 110 + ignorePatterns: config.ignore, 111 + slugField: config.frontmatter?.slugField, 112 + removeIndexFromSlug: config.removeIndexFromSlug, 113 + }); 114 + s.stop(`Found ${posts.length} posts`); 115 + 116 + // Determine which posts need publishing 117 + const postsToPublish: Array<{ 118 + post: BlogPost; 119 + action: "create" | "update"; 120 + reason: string; 121 + }> = []; 122 + const draftPosts: BlogPost[] = []; 123 + 124 + for (const post of posts) { 125 + // Skip draft posts 126 + if (post.frontmatter.draft) { 127 + draftPosts.push(post); 128 + continue; 129 + } 130 + 131 + const contentHash = await getContentHash(post.rawContent); 132 + const relativeFilePath = path.relative(configDir, post.filePath); 133 + const postState = state.posts[relativeFilePath]; 134 + 135 + if (force) { 136 + postsToPublish.push({ 137 + post, 138 + action: post.frontmatter.atUri ? "update" : "create", 139 + reason: "forced", 140 + }); 141 + } else if (!postState) { 142 + // New post 143 + postsToPublish.push({ 144 + post, 145 + action: "create", 146 + reason: "new post", 147 + }); 148 + } else if (postState.contentHash !== contentHash) { 149 + // Changed post 150 + postsToPublish.push({ 151 + post, 152 + action: post.frontmatter.atUri ? "update" : "create", 153 + reason: "content changed", 154 + }); 155 + } 156 + } 76 157 77 - const imagesDir = config.imagesDir 78 - ? path.isAbsolute(config.imagesDir) 79 - ? config.imagesDir 80 - : path.join(configDir, config.imagesDir) 81 - : undefined; 158 + if (draftPosts.length > 0) { 159 + log.info( 160 + `Skipping ${draftPosts.length} draft post${draftPosts.length === 1 ? "" : "s"}`, 161 + ); 162 + } 82 163 83 - // Load state 84 - const state = await loadState(configDir); 164 + if (postsToPublish.length === 0) { 165 + log.success("All posts are up to date. Nothing to publish."); 166 + return; 167 + } 85 168 86 - // Scan for posts 87 - const s = spinner(); 88 - s.start("Scanning for posts..."); 89 - const posts = await scanContentDirectory(contentDir, config.frontmatter, config.ignore); 90 - s.stop(`Found ${posts.length} posts`); 169 + log.info(`\n${postsToPublish.length} posts to publish:\n`); 91 170 92 - // Determine which posts need publishing 93 - const postsToPublish: Array<{ 94 - post: BlogPost; 95 - action: "create" | "update"; 96 - reason: string; 97 - }> = []; 171 + // Bluesky posting configuration 172 + const blueskyEnabled = config.bluesky?.enabled ?? false; 173 + const maxAgeDays = config.bluesky?.maxAgeDays ?? 7; 174 + const cutoffDate = new Date(); 175 + cutoffDate.setDate(cutoffDate.getDate() - maxAgeDays); 176 + 177 + for (const { post, action, reason } of postsToPublish) { 178 + const icon = action === "create" ? "+" : "~"; 179 + const relativeFilePath = path.relative(configDir, post.filePath); 180 + const existingBskyPostRef = state.posts[relativeFilePath]?.bskyPostRef; 181 + 182 + let bskyNote = ""; 183 + if (blueskyEnabled) { 184 + if (existingBskyPostRef) { 185 + bskyNote = " [bsky: exists]"; 186 + } else { 187 + const publishDate = new Date(post.frontmatter.publishDate); 188 + if (publishDate < cutoffDate) { 189 + bskyNote = ` [bsky: skipped, older than ${maxAgeDays} days]`; 190 + } else { 191 + bskyNote = " [bsky: will post]"; 192 + } 193 + } 194 + } 195 + 196 + log.message(` ${icon} ${post.frontmatter.title} (${reason})${bskyNote}`); 197 + } 198 + 199 + if (dryRun) { 200 + if (blueskyEnabled) { 201 + log.info(`\nBluesky posting: enabled (max age: ${maxAgeDays} days)`); 202 + } 203 + log.info("\nDry run complete. No changes made."); 204 + return; 205 + } 98 206 99 - for (const post of posts) { 100 - const contentHash = await getContentHash(post.rawContent); 101 - const relativeFilePath = path.relative(configDir, post.filePath); 102 - const postState = state.posts[relativeFilePath]; 207 + // Create agent 208 + s.start(`Connecting to ${credentials.pdsUrl}...`); 209 + let agent: Awaited<ReturnType<typeof createAgent>> | undefined; 210 + try { 211 + agent = await createAgent(credentials); 212 + s.stop(`Logged in as ${agent.session?.handle}`); 213 + } catch (error) { 214 + s.stop("Failed to login"); 215 + log.error(`Failed to login: ${error}`); 216 + process.exit(1); 217 + } 103 218 104 - if (force) { 105 - postsToPublish.push({ 106 - post, 107 - action: post.frontmatter.atUri ? "update" : "create", 108 - reason: "forced", 109 - }); 110 - } else if (!postState) { 111 - // New post 112 - postsToPublish.push({ 113 - post, 114 - action: "create", 115 - reason: "new post", 116 - }); 117 - } else if (postState.contentHash !== contentHash) { 118 - // Changed post 119 - postsToPublish.push({ 120 - post, 121 - action: post.frontmatter.atUri ? "update" : "create", 122 - reason: "content changed", 123 - }); 124 - } 125 - } 219 + // Publish posts 220 + let publishedCount = 0; 221 + let updatedCount = 0; 222 + let errorCount = 0; 223 + let bskyPostCount = 0; 126 224 127 - if (postsToPublish.length === 0) { 128 - log.success("All posts are up to date. Nothing to publish."); 129 - return; 130 - } 225 + for (const { post, action } of postsToPublish) { 226 + s.start(`Publishing: ${post.frontmatter.title}`); 131 227 132 - log.info(`\n${postsToPublish.length} posts to publish:\n`); 133 - for (const { post, action, reason } of postsToPublish) { 134 - const icon = action === "create" ? "+" : "~"; 135 - log.message(` ${icon} ${post.frontmatter.title} (${reason})`); 136 - } 228 + try { 229 + // Handle cover image upload 230 + let coverImage: BlobObject | undefined; 231 + if (post.frontmatter.ogImage) { 232 + const imagePath = await resolveImagePath( 233 + post.frontmatter.ogImage, 234 + imagesDir, 235 + contentDir, 236 + ); 137 237 138 - if (dryRun) { 139 - log.info("\nDry run complete. No changes made."); 140 - return; 141 - } 238 + if (imagePath) { 239 + log.info(` Uploading cover image: ${path.basename(imagePath)}`); 240 + coverImage = await uploadImage(agent, imagePath); 241 + if (coverImage) { 242 + log.info(` Uploaded image blob: ${coverImage.ref.$link}`); 243 + } 244 + } else { 245 + log.warn(` Cover image not found: ${post.frontmatter.ogImage}`); 246 + } 247 + } 142 248 143 - // Create agent 144 - s.start(`Connecting to ${credentials.pdsUrl}...`); 145 - let agent; 146 - try { 147 - agent = await createAgent(credentials); 148 - s.stop(`Logged in as ${agent.session?.handle}`); 149 - } catch (error) { 150 - s.stop("Failed to login"); 151 - log.error(`Failed to login: ${error}`); 152 - process.exit(1); 153 - } 249 + // Track atUri, content for state saving, and bskyPostRef 250 + let atUri: string; 251 + let contentForHash: string; 252 + let bskyPostRef: StrongRef | undefined; 253 + const relativeFilePath = path.relative(configDir, post.filePath); 154 254 155 - // Publish posts 156 - let publishedCount = 0; 157 - let updatedCount = 0; 158 - let errorCount = 0; 255 + // Check if bskyPostRef already exists in state 256 + const existingBskyPostRef = state.posts[relativeFilePath]?.bskyPostRef; 159 257 160 - for (const { post, action } of postsToPublish) { 161 - s.start(`Publishing: ${post.frontmatter.title}`); 258 + if (action === "create") { 259 + atUri = await createDocument(agent, post, config, coverImage); 260 + s.stop(`Created: ${atUri}`); 162 261 163 - try { 164 - // Handle cover image upload 165 - let coverImage: BlobObject | undefined; 166 - if (post.frontmatter.ogImage) { 167 - const imagePath = resolveImagePath( 168 - post.frontmatter.ogImage, 169 - imagesDir, 170 - contentDir 171 - ); 262 + // Update frontmatter with atUri 263 + const updatedContent = updateFrontmatterWithAtUri( 264 + post.rawContent, 265 + atUri, 266 + ); 267 + await fs.writeFile(post.filePath, updatedContent); 268 + log.info(` Updated frontmatter in ${path.basename(post.filePath)}`); 172 269 173 - if (imagePath) { 174 - log.info(` Uploading cover image: ${path.basename(imagePath)}`); 175 - coverImage = await uploadImage(agent, imagePath); 176 - if (coverImage) { 177 - log.info(` Uploaded image blob: ${coverImage.ref.$link}`); 178 - } 179 - } else { 180 - log.warn(` Cover image not found: ${post.frontmatter.ogImage}`); 181 - } 182 - } 270 + // Use updated content (with atUri) for hash so next run sees matching hash 271 + contentForHash = updatedContent; 272 + publishedCount++; 273 + } else { 274 + atUri = post.frontmatter.atUri!; 275 + await updateDocument(agent, post, atUri, config, coverImage); 276 + s.stop(`Updated: ${atUri}`); 183 277 184 - // Track atUri and content for state saving 185 - let atUri: string; 186 - let contentForHash: string; 278 + // For updates, rawContent already has atUri 279 + contentForHash = post.rawContent; 280 + updatedCount++; 281 + } 187 282 188 - if (action === "create") { 189 - atUri = await createDocument(agent, post, config, coverImage); 190 - s.stop(`Created: ${atUri}`); 283 + // Create Bluesky post if enabled and conditions are met 284 + if (blueskyEnabled) { 285 + if (existingBskyPostRef) { 286 + log.info(` Bluesky post already exists, skipping`); 287 + bskyPostRef = existingBskyPostRef; 288 + } else { 289 + const publishDate = new Date(post.frontmatter.publishDate); 191 290 192 - // Update frontmatter with atUri 193 - const updatedContent = updateFrontmatterWithAtUri(post.rawContent, atUri); 194 - await Bun.write(post.filePath, updatedContent); 195 - log.info(` Updated frontmatter in ${path.basename(post.filePath)}`); 291 + if (publishDate < cutoffDate) { 292 + log.info( 293 + ` Post is older than ${maxAgeDays} days, skipping Bluesky post`, 294 + ); 295 + } else { 296 + // Create Bluesky post 297 + try { 298 + const pathPrefix = config.pathPrefix || "/posts"; 299 + const canonicalUrl = `${config.siteUrl}${pathPrefix}/${post.slug}`; 196 300 197 - // Use updated content (with atUri) for hash so next run sees matching hash 198 - contentForHash = updatedContent; 199 - publishedCount++; 200 - } else { 201 - atUri = post.frontmatter.atUri!; 202 - await updateDocument(agent, post, atUri, config, coverImage); 203 - s.stop(`Updated: ${atUri}`); 301 + bskyPostRef = await createBlueskyPost(agent, { 302 + title: post.frontmatter.title, 303 + description: post.frontmatter.description, 304 + canonicalUrl, 305 + coverImage, 306 + publishedAt: post.frontmatter.publishDate, 307 + }); 204 308 205 - // For updates, rawContent already has atUri 206 - contentForHash = post.rawContent; 207 - updatedCount++; 208 - } 309 + // Update document record with bskyPostRef 310 + await addBskyPostRefToDocument(agent, atUri, bskyPostRef); 311 + log.info(` Created Bluesky post: ${bskyPostRef.uri}`); 312 + bskyPostCount++; 313 + } catch (bskyError) { 314 + const errorMsg = 315 + bskyError instanceof Error 316 + ? bskyError.message 317 + : String(bskyError); 318 + log.warn(` Failed to create Bluesky post: ${errorMsg}`); 319 + } 320 + } 321 + } 322 + } 209 323 210 - // Update state (use relative path from config directory) 211 - const contentHash = await getContentHash(contentForHash); 212 - const relativeFilePath = path.relative(configDir, post.filePath); 213 - state.posts[relativeFilePath] = { 214 - contentHash, 215 - atUri, 216 - lastPublished: new Date().toISOString(), 217 - }; 218 - } catch (error) { 219 - const errorMessage = error instanceof Error ? error.message : String(error); 220 - s.stop(`Error publishing "${path.basename(post.filePath)}"`); 221 - log.error(` ${errorMessage}`); 222 - errorCount++; 223 - } 224 - } 324 + // Update state (use relative path from config directory) 325 + const contentHash = await getContentHash(contentForHash); 326 + state.posts[relativeFilePath] = { 327 + contentHash, 328 + atUri, 329 + lastPublished: new Date().toISOString(), 330 + slug: post.slug, 331 + bskyPostRef, 332 + }; 333 + } catch (error) { 334 + const errorMessage = 335 + error instanceof Error ? error.message : String(error); 336 + s.stop(`Error publishing "${path.basename(post.filePath)}"`); 337 + log.error(` ${errorMessage}`); 338 + errorCount++; 339 + } 340 + } 225 341 226 - // Save state 227 - await saveState(configDir, state); 342 + // Save state 343 + await saveState(configDir, state); 228 344 229 - // Summary 230 - log.message("\n---"); 231 - log.info(`Published: ${publishedCount}`); 232 - log.info(`Updated: ${updatedCount}`); 233 - if (errorCount > 0) { 234 - log.warn(`Errors: ${errorCount}`); 235 - } 236 - }, 345 + // Summary 346 + log.message("\n---"); 347 + log.info(`Published: ${publishedCount}`); 348 + log.info(`Updated: ${updatedCount}`); 349 + if (bskyPostCount > 0) { 350 + log.info(`Bluesky posts: ${bskyPostCount}`); 351 + } 352 + if (errorCount > 0) { 353 + log.warn(`Errors: ${errorCount}`); 354 + } 355 + }, 237 356 });
+171 -151
packages/cli/src/commands/sync.ts
··· 1 + import * as fs from "node:fs/promises"; 1 2 import { command, flag } from "cmd-ts"; 2 3 import { select, spinner, log } from "@clack/prompts"; 3 - import * as path from "path"; 4 + import * as path from "node:path"; 4 5 import { loadConfig, loadState, saveState, findConfig } from "../lib/config"; 5 - import { loadCredentials, listCredentials, getCredentials } from "../lib/credentials"; 6 + import { 7 + loadCredentials, 8 + listCredentials, 9 + getCredentials, 10 + } from "../lib/credentials"; 6 11 import { createAgent, listDocuments } from "../lib/atproto"; 7 - import { scanContentDirectory, getContentHash, updateFrontmatterWithAtUri } from "../lib/markdown"; 12 + import { 13 + scanContentDirectory, 14 + getContentHash, 15 + updateFrontmatterWithAtUri, 16 + } from "../lib/markdown"; 8 17 import { exitOnCancel } from "../lib/prompts"; 9 18 10 19 export const syncCommand = command({ 11 - name: "sync", 12 - description: "Sync state from ATProto to restore .sequoia-state.json", 13 - args: { 14 - updateFrontmatter: flag({ 15 - long: "update-frontmatter", 16 - short: "u", 17 - description: "Update frontmatter atUri fields in local markdown files", 18 - }), 19 - dryRun: flag({ 20 - long: "dry-run", 21 - short: "n", 22 - description: "Preview what would be synced without making changes", 23 - }), 24 - }, 25 - handler: async ({ updateFrontmatter, dryRun }) => { 26 - // Load config 27 - const configPath = await findConfig(); 28 - if (!configPath) { 29 - log.error("No sequoia.json found. Run 'sequoia init' first."); 30 - process.exit(1); 31 - } 20 + name: "sync", 21 + description: "Sync state from ATProto to restore .sequoia-state.json", 22 + args: { 23 + updateFrontmatter: flag({ 24 + long: "update-frontmatter", 25 + short: "u", 26 + description: "Update frontmatter atUri fields in local markdown files", 27 + }), 28 + dryRun: flag({ 29 + long: "dry-run", 30 + short: "n", 31 + description: "Preview what would be synced without making changes", 32 + }), 33 + }, 34 + handler: async ({ updateFrontmatter, dryRun }) => { 35 + // Load config 36 + const configPath = await findConfig(); 37 + if (!configPath) { 38 + log.error("No sequoia.json found. Run 'sequoia init' first."); 39 + process.exit(1); 40 + } 32 41 33 - const config = await loadConfig(configPath); 34 - const configDir = path.dirname(configPath); 42 + const config = await loadConfig(configPath); 43 + const configDir = path.dirname(configPath); 35 44 36 - log.info(`Site: ${config.siteUrl}`); 37 - log.info(`Publication: ${config.publicationUri}`); 45 + log.info(`Site: ${config.siteUrl}`); 46 + log.info(`Publication: ${config.publicationUri}`); 38 47 39 - // Load credentials 40 - let credentials = await loadCredentials(config.identity); 48 + // Load credentials 49 + let credentials = await loadCredentials(config.identity); 41 50 42 - if (!credentials) { 43 - const identities = await listCredentials(); 44 - if (identities.length === 0) { 45 - log.error("No credentials found. Run 'sequoia auth' first."); 46 - process.exit(1); 47 - } 51 + if (!credentials) { 52 + const identities = await listCredentials(); 53 + if (identities.length === 0) { 54 + log.error("No credentials found. Run 'sequoia auth' first."); 55 + process.exit(1); 56 + } 48 57 49 - log.info("Multiple identities found. Select one to use:"); 50 - const selected = exitOnCancel(await select({ 51 - message: "Identity:", 52 - options: identities.map(id => ({ value: id, label: id })), 53 - })); 58 + log.info("Multiple identities found. Select one to use:"); 59 + const selected = exitOnCancel( 60 + await select({ 61 + message: "Identity:", 62 + options: identities.map((id) => ({ value: id, label: id })), 63 + }), 64 + ); 54 65 55 - credentials = await getCredentials(selected); 56 - if (!credentials) { 57 - log.error("Failed to load selected credentials."); 58 - process.exit(1); 59 - } 60 - } 66 + credentials = await getCredentials(selected); 67 + if (!credentials) { 68 + log.error("Failed to load selected credentials."); 69 + process.exit(1); 70 + } 71 + } 61 72 62 - // Create agent 63 - const s = spinner(); 64 - s.start(`Connecting to ${credentials.pdsUrl}...`); 65 - let agent; 66 - try { 67 - agent = await createAgent(credentials); 68 - s.stop(`Logged in as ${agent.session?.handle}`); 69 - } catch (error) { 70 - s.stop("Failed to login"); 71 - log.error(`Failed to login: ${error}`); 72 - process.exit(1); 73 - } 73 + // Create agent 74 + const s = spinner(); 75 + s.start(`Connecting to ${credentials.pdsUrl}...`); 76 + let agent: Awaited<ReturnType<typeof createAgent>> | undefined; 77 + try { 78 + agent = await createAgent(credentials); 79 + s.stop(`Logged in as ${agent.session?.handle}`); 80 + } catch (error) { 81 + s.stop("Failed to login"); 82 + log.error(`Failed to login: ${error}`); 83 + process.exit(1); 84 + } 74 85 75 - // Fetch documents from PDS 76 - s.start("Fetching documents from PDS..."); 77 - const documents = await listDocuments(agent, config.publicationUri); 78 - s.stop(`Found ${documents.length} documents on PDS`); 86 + // Fetch documents from PDS 87 + s.start("Fetching documents from PDS..."); 88 + const documents = await listDocuments(agent, config.publicationUri); 89 + s.stop(`Found ${documents.length} documents on PDS`); 79 90 80 - if (documents.length === 0) { 81 - log.info("No documents found for this publication."); 82 - return; 83 - } 91 + if (documents.length === 0) { 92 + log.info("No documents found for this publication."); 93 + return; 94 + } 84 95 85 - // Resolve content directory 86 - const contentDir = path.isAbsolute(config.contentDir) 87 - ? config.contentDir 88 - : path.join(configDir, config.contentDir); 96 + // Resolve content directory 97 + const contentDir = path.isAbsolute(config.contentDir) 98 + ? config.contentDir 99 + : path.join(configDir, config.contentDir); 89 100 90 - // Scan local posts 91 - s.start("Scanning local content..."); 92 - const localPosts = await scanContentDirectory(contentDir, config.frontmatter); 93 - s.stop(`Found ${localPosts.length} local posts`); 101 + // Scan local posts 102 + s.start("Scanning local content..."); 103 + const localPosts = await scanContentDirectory(contentDir, { 104 + frontmatterMapping: config.frontmatter, 105 + ignorePatterns: config.ignore, 106 + slugField: config.frontmatter?.slugField, 107 + removeIndexFromSlug: config.removeIndexFromSlug, 108 + }); 109 + s.stop(`Found ${localPosts.length} local posts`); 94 110 95 - // Build a map of path -> local post for matching 96 - // Document path is like /posts/my-post-slug 97 - const postsByPath = new Map<string, typeof localPosts[0]>(); 98 - for (const post of localPosts) { 99 - const postPath = `/posts/${post.slug}`; 100 - postsByPath.set(postPath, post); 101 - } 111 + // Build a map of path -> local post for matching 112 + // Document path is like /posts/my-post-slug (or custom pathPrefix) 113 + const pathPrefix = config.pathPrefix || "/posts"; 114 + const postsByPath = new Map<string, (typeof localPosts)[0]>(); 115 + for (const post of localPosts) { 116 + const postPath = `${pathPrefix}/${post.slug}`; 117 + postsByPath.set(postPath, post); 118 + } 102 119 103 - // Load existing state 104 - const state = await loadState(configDir); 105 - const originalPostCount = Object.keys(state.posts).length; 120 + // Load existing state 121 + const state = await loadState(configDir); 122 + const originalPostCount = Object.keys(state.posts).length; 106 123 107 - // Track changes 108 - let matchedCount = 0; 109 - let unmatchedCount = 0; 110 - let frontmatterUpdates: Array<{ filePath: string; atUri: string }> = []; 124 + // Track changes 125 + let matchedCount = 0; 126 + let unmatchedCount = 0; 127 + const frontmatterUpdates: Array<{ filePath: string; atUri: string }> = []; 111 128 112 - log.message("\nMatching documents to local files:\n"); 129 + log.message("\nMatching documents to local files:\n"); 113 130 114 - for (const doc of documents) { 115 - const docPath = doc.value.path; 116 - const localPost = postsByPath.get(docPath); 131 + for (const doc of documents) { 132 + const docPath = doc.value.path; 133 + const localPost = postsByPath.get(docPath); 117 134 118 - if (localPost) { 119 - matchedCount++; 120 - log.message(` โœ“ ${doc.value.title}`); 121 - log.message(` Path: ${docPath}`); 122 - log.message(` URI: ${doc.uri}`); 123 - log.message(` File: ${path.basename(localPost.filePath)}`); 135 + if (localPost) { 136 + matchedCount++; 137 + log.message(` โœ“ ${doc.value.title}`); 138 + log.message(` Path: ${docPath}`); 139 + log.message(` URI: ${doc.uri}`); 140 + log.message(` File: ${path.basename(localPost.filePath)}`); 124 141 125 - // Update state (use relative path from config directory) 126 - const contentHash = await getContentHash(localPost.rawContent); 127 - const relativeFilePath = path.relative(configDir, localPost.filePath); 128 - state.posts[relativeFilePath] = { 129 - contentHash, 130 - atUri: doc.uri, 131 - lastPublished: doc.value.publishedAt, 132 - }; 142 + // Update state (use relative path from config directory) 143 + const contentHash = await getContentHash(localPost.rawContent); 144 + const relativeFilePath = path.relative(configDir, localPost.filePath); 145 + state.posts[relativeFilePath] = { 146 + contentHash, 147 + atUri: doc.uri, 148 + lastPublished: doc.value.publishedAt, 149 + }; 133 150 134 - // Check if frontmatter needs updating 135 - if (updateFrontmatter && localPost.frontmatter.atUri !== doc.uri) { 136 - frontmatterUpdates.push({ 137 - filePath: localPost.filePath, 138 - atUri: doc.uri, 139 - }); 140 - log.message(` โ†’ Will update frontmatter`); 141 - } 142 - } else { 143 - unmatchedCount++; 144 - log.message(` โœ— ${doc.value.title} (no matching local file)`); 145 - log.message(` Path: ${docPath}`); 146 - log.message(` URI: ${doc.uri}`); 147 - } 148 - log.message(""); 149 - } 151 + // Check if frontmatter needs updating 152 + if (updateFrontmatter && localPost.frontmatter.atUri !== doc.uri) { 153 + frontmatterUpdates.push({ 154 + filePath: localPost.filePath, 155 + atUri: doc.uri, 156 + }); 157 + log.message(` โ†’ Will update frontmatter`); 158 + } 159 + } else { 160 + unmatchedCount++; 161 + log.message(` โœ— ${doc.value.title} (no matching local file)`); 162 + log.message(` Path: ${docPath}`); 163 + log.message(` URI: ${doc.uri}`); 164 + } 165 + log.message(""); 166 + } 150 167 151 - // Summary 152 - log.message("---"); 153 - log.info(`Matched: ${matchedCount} documents`); 154 - if (unmatchedCount > 0) { 155 - log.warn(`Unmatched: ${unmatchedCount} documents (exist on PDS but not locally)`); 156 - } 168 + // Summary 169 + log.message("---"); 170 + log.info(`Matched: ${matchedCount} documents`); 171 + if (unmatchedCount > 0) { 172 + log.warn( 173 + `Unmatched: ${unmatchedCount} documents (exist on PDS but not locally)`, 174 + ); 175 + } 157 176 158 - if (dryRun) { 159 - log.info("\nDry run complete. No changes made."); 160 - return; 161 - } 177 + if (dryRun) { 178 + log.info("\nDry run complete. No changes made."); 179 + return; 180 + } 162 181 163 - // Save updated state 164 - await saveState(configDir, state); 165 - const newPostCount = Object.keys(state.posts).length; 166 - log.success(`\nSaved .sequoia-state.json (${originalPostCount} โ†’ ${newPostCount} entries)`); 182 + // Save updated state 183 + await saveState(configDir, state); 184 + const newPostCount = Object.keys(state.posts).length; 185 + log.success( 186 + `\nSaved .sequoia-state.json (${originalPostCount} โ†’ ${newPostCount} entries)`, 187 + ); 167 188 168 - // Update frontmatter if requested 169 - if (frontmatterUpdates.length > 0) { 170 - s.start(`Updating frontmatter in ${frontmatterUpdates.length} files...`); 171 - for (const { filePath, atUri } of frontmatterUpdates) { 172 - const file = Bun.file(filePath); 173 - const content = await file.text(); 174 - const updated = updateFrontmatterWithAtUri(content, atUri); 175 - await Bun.write(filePath, updated); 176 - log.message(` Updated: ${path.basename(filePath)}`); 177 - } 178 - s.stop("Frontmatter updated"); 179 - } 189 + // Update frontmatter if requested 190 + if (frontmatterUpdates.length > 0) { 191 + s.start(`Updating frontmatter in ${frontmatterUpdates.length} files...`); 192 + for (const { filePath, atUri } of frontmatterUpdates) { 193 + const content = await fs.readFile(filePath, "utf-8"); 194 + const updated = updateFrontmatterWithAtUri(content, atUri); 195 + await fs.writeFile(filePath, updated); 196 + log.message(` Updated: ${path.basename(filePath)}`); 197 + } 198 + s.stop("Frontmatter updated"); 199 + } 180 200 181 - log.success("\nSync complete!"); 182 - }, 201 + log.success("\nSync complete!"); 202 + }, 183 203 });
+3 -3
packages/cli/src/index.ts
··· 1 - #!/usr/bin/env bun 1 + #!/usr/bin/env node 2 2 3 3 import { run, subcommands } from "cmd-ts"; 4 4 import { authCommand } from "./commands/auth"; ··· 31 31 32 32 Publish evergreen content to the ATmosphere 33 33 34 - > https://tanlged.org/stevedylan.dev/sequoia 34 + > https://tangled.org/stevedylan.dev/sequoia 35 35 `, 36 - version: "0.1.0", 36 + version: "0.2.1", 37 37 cmds: { 38 38 auth: authCommand, 39 39 init: initCommand,
+496 -268
packages/cli/src/lib/atproto.ts
··· 1 1 import { AtpAgent } from "@atproto/api"; 2 - import * as path from "path"; 3 - import type { Credentials, BlogPost, BlobObject, PublisherConfig } from "./types"; 2 + import * as mimeTypes from "mime-types"; 3 + import * as fs from "node:fs/promises"; 4 + import * as path from "node:path"; 4 5 import { stripMarkdownForText } from "./markdown"; 6 + import type { 7 + BlobObject, 8 + BlogPost, 9 + Credentials, 10 + PublisherConfig, 11 + StrongRef, 12 + } from "./types"; 13 + 14 + async function fileExists(filePath: string): Promise<boolean> { 15 + try { 16 + await fs.access(filePath); 17 + return true; 18 + } catch { 19 + return false; 20 + } 21 + } 5 22 6 23 export async function resolveHandleToPDS(handle: string): Promise<string> { 7 - // First, resolve the handle to a DID 8 - let did: string; 24 + // First, resolve the handle to a DID 25 + let did: string; 9 26 10 - if (handle.startsWith("did:")) { 11 - did = handle; 12 - } else { 13 - // Try to resolve handle via Bluesky API 14 - const resolveUrl = `https://public.api.bsky.app/xrpc/com.atproto.identity.resolveHandle?handle=${encodeURIComponent(handle)}`; 15 - const resolveResponse = await fetch(resolveUrl); 16 - if (!resolveResponse.ok) { 17 - throw new Error("Could not resolve handle"); 18 - } 19 - const resolveData = (await resolveResponse.json()) as { did: string }; 20 - did = resolveData.did; 21 - } 27 + if (handle.startsWith("did:")) { 28 + did = handle; 29 + } else { 30 + // Try to resolve handle via Bluesky API 31 + const resolveUrl = `https://public.api.bsky.app/xrpc/com.atproto.identity.resolveHandle?handle=${encodeURIComponent(handle)}`; 32 + const resolveResponse = await fetch(resolveUrl); 33 + if (!resolveResponse.ok) { 34 + throw new Error("Could not resolve handle"); 35 + } 36 + const resolveData = (await resolveResponse.json()) as { did: string }; 37 + did = resolveData.did; 38 + } 22 39 23 - // Now resolve the DID to get the PDS URL from the DID document 24 - let pdsUrl: string | undefined; 40 + // Now resolve the DID to get the PDS URL from the DID document 41 + let pdsUrl: string | undefined; 25 42 26 - if (did.startsWith("did:plc:")) { 27 - // Fetch DID document from plc.directory 28 - const didDocUrl = `https://plc.directory/${did}`; 29 - const didDocResponse = await fetch(didDocUrl); 30 - if (!didDocResponse.ok) { 31 - throw new Error("Could not fetch DID document"); 32 - } 33 - const didDoc = (await didDocResponse.json()) as { 34 - service?: Array<{ id: string; type: string; serviceEndpoint: string }>; 35 - }; 43 + if (did.startsWith("did:plc:")) { 44 + // Fetch DID document from plc.directory 45 + const didDocUrl = `https://plc.directory/${did}`; 46 + const didDocResponse = await fetch(didDocUrl); 47 + if (!didDocResponse.ok) { 48 + throw new Error("Could not fetch DID document"); 49 + } 50 + const didDoc = (await didDocResponse.json()) as { 51 + service?: Array<{ id: string; type: string; serviceEndpoint: string }>; 52 + }; 36 53 37 - // Find the PDS service endpoint 38 - const pdsService = didDoc.service?.find( 39 - (s) => s.id === "#atproto_pds" || s.type === "AtprotoPersonalDataServer", 40 - ); 41 - pdsUrl = pdsService?.serviceEndpoint; 42 - } else if (did.startsWith("did:web:")) { 43 - // For did:web, fetch the DID document from the domain 44 - const domain = did.replace("did:web:", ""); 45 - const didDocUrl = `https://${domain}/.well-known/did.json`; 46 - const didDocResponse = await fetch(didDocUrl); 47 - if (!didDocResponse.ok) { 48 - throw new Error("Could not fetch DID document"); 49 - } 50 - const didDoc = (await didDocResponse.json()) as { 51 - service?: Array<{ id: string; type: string; serviceEndpoint: string }>; 52 - }; 54 + // Find the PDS service endpoint 55 + const pdsService = didDoc.service?.find( 56 + (s) => s.id === "#atproto_pds" || s.type === "AtprotoPersonalDataServer", 57 + ); 58 + pdsUrl = pdsService?.serviceEndpoint; 59 + } else if (did.startsWith("did:web:")) { 60 + // For did:web, fetch the DID document from the domain 61 + const domain = did.replace("did:web:", ""); 62 + const didDocUrl = `https://${domain}/.well-known/did.json`; 63 + const didDocResponse = await fetch(didDocUrl); 64 + if (!didDocResponse.ok) { 65 + throw new Error("Could not fetch DID document"); 66 + } 67 + const didDoc = (await didDocResponse.json()) as { 68 + service?: Array<{ id: string; type: string; serviceEndpoint: string }>; 69 + }; 53 70 54 - const pdsService = didDoc.service?.find( 55 - (s) => s.id === "#atproto_pds" || s.type === "AtprotoPersonalDataServer", 56 - ); 57 - pdsUrl = pdsService?.serviceEndpoint; 58 - } 71 + const pdsService = didDoc.service?.find( 72 + (s) => s.id === "#atproto_pds" || s.type === "AtprotoPersonalDataServer", 73 + ); 74 + pdsUrl = pdsService?.serviceEndpoint; 75 + } 59 76 60 - if (!pdsUrl) { 61 - throw new Error("Could not find PDS URL for user"); 62 - } 77 + if (!pdsUrl) { 78 + throw new Error("Could not find PDS URL for user"); 79 + } 63 80 64 - return pdsUrl; 81 + return pdsUrl; 65 82 } 66 83 67 84 export interface CreatePublicationOptions { 68 - url: string; 69 - name: string; 70 - description?: string; 71 - iconPath?: string; 72 - showInDiscover?: boolean; 85 + url: string; 86 + name: string; 87 + description?: string; 88 + iconPath?: string; 89 + showInDiscover?: boolean; 73 90 } 74 91 75 92 export async function createAgent(credentials: Credentials): Promise<AtpAgent> { 76 - const agent = new AtpAgent({ service: credentials.pdsUrl }); 93 + const agent = new AtpAgent({ service: credentials.pdsUrl }); 77 94 78 - await agent.login({ 79 - identifier: credentials.identifier, 80 - password: credentials.password, 81 - }); 95 + await agent.login({ 96 + identifier: credentials.identifier, 97 + password: credentials.password, 98 + }); 82 99 83 - return agent; 100 + return agent; 84 101 } 85 102 86 103 export async function uploadImage( 87 - agent: AtpAgent, 88 - imagePath: string 104 + agent: AtpAgent, 105 + imagePath: string, 89 106 ): Promise<BlobObject | undefined> { 90 - const file = Bun.file(imagePath); 107 + if (!(await fileExists(imagePath))) { 108 + return undefined; 109 + } 91 110 92 - if (!(await file.exists())) { 93 - return undefined; 94 - } 111 + try { 112 + const imageBuffer = await fs.readFile(imagePath); 113 + const mimeType = mimeTypes.lookup(imagePath) || "application/octet-stream"; 95 114 96 - try { 97 - const imageBuffer = await file.arrayBuffer(); 98 - const mimeType = file.type || "application/octet-stream"; 115 + const response = await agent.com.atproto.repo.uploadBlob( 116 + new Uint8Array(imageBuffer), 117 + { 118 + encoding: mimeType, 119 + }, 120 + ); 99 121 100 - const response = await agent.com.atproto.repo.uploadBlob( 101 - new Uint8Array(imageBuffer), 102 - { 103 - encoding: mimeType, 104 - } 105 - ); 106 - 107 - return { 108 - $type: "blob", 109 - ref: { 110 - $link: response.data.blob.ref.toString(), 111 - }, 112 - mimeType, 113 - size: imageBuffer.byteLength, 114 - }; 115 - } catch (error) { 116 - console.error(`Error uploading image ${imagePath}:`, error); 117 - return undefined; 118 - } 122 + return { 123 + $type: "blob", 124 + ref: { 125 + $link: response.data.blob.ref.toString(), 126 + }, 127 + mimeType, 128 + size: imageBuffer.byteLength, 129 + }; 130 + } catch (error) { 131 + console.error(`Error uploading image ${imagePath}:`, error); 132 + return undefined; 133 + } 119 134 } 120 135 121 - export function resolveImagePath( 122 - ogImage: string, 123 - imagesDir: string | undefined, 124 - contentDir: string 125 - ): string | null { 126 - // Try multiple resolution strategies 127 - const filename = path.basename(ogImage); 136 + export async function resolveImagePath( 137 + ogImage: string, 138 + imagesDir: string | undefined, 139 + contentDir: string, 140 + ): Promise<string | null> { 141 + // Try multiple resolution strategies 142 + const filename = path.basename(ogImage); 128 143 129 - // 1. If imagesDir is specified, look there 130 - if (imagesDir) { 131 - const imagePath = path.join(imagesDir, filename); 132 - try { 133 - const stat = Bun.file(imagePath); 134 - if (stat.size > 0) { 135 - return imagePath; 136 - } 137 - } catch { 138 - // File doesn't exist, continue 139 - } 140 - } 144 + // 1. If imagesDir is specified, look there 145 + if (imagesDir) { 146 + const imagePath = path.join(imagesDir, filename); 147 + if (await fileExists(imagePath)) { 148 + const stat = await fs.stat(imagePath); 149 + if (stat.size > 0) { 150 + return imagePath; 151 + } 152 + } 153 + } 141 154 142 - // 2. Try the ogImage path directly (if it's absolute) 143 - if (path.isAbsolute(ogImage)) { 144 - return ogImage; 145 - } 155 + // 2. Try the ogImage path directly (if it's absolute) 156 + if (path.isAbsolute(ogImage)) { 157 + return ogImage; 158 + } 146 159 147 - // 3. Try relative to content directory 148 - const contentRelative = path.join(contentDir, ogImage); 149 - try { 150 - const stat = Bun.file(contentRelative); 151 - if (stat.size > 0) { 152 - return contentRelative; 153 - } 154 - } catch { 155 - // File doesn't exist 156 - } 160 + // 3. Try relative to content directory 161 + const contentRelative = path.join(contentDir, ogImage); 162 + if (await fileExists(contentRelative)) { 163 + const stat = await fs.stat(contentRelative); 164 + if (stat.size > 0) { 165 + return contentRelative; 166 + } 167 + } 157 168 158 - return null; 169 + return null; 159 170 } 160 171 161 172 export async function createDocument( 162 - agent: AtpAgent, 163 - post: BlogPost, 164 - config: PublisherConfig, 165 - coverImage?: BlobObject 173 + agent: AtpAgent, 174 + post: BlogPost, 175 + config: PublisherConfig, 176 + coverImage?: BlobObject, 166 177 ): Promise<string> { 167 - const pathPrefix = config.pathPrefix || "/posts"; 168 - const postPath = `${pathPrefix}/${post.slug}`; 169 - const textContent = stripMarkdownForText(post.content); 170 - const publishDate = new Date(post.frontmatter.publishDate); 178 + const pathPrefix = config.pathPrefix || "/posts"; 179 + const postPath = `${pathPrefix}/${post.slug}`; 180 + const publishDate = new Date(post.frontmatter.publishDate); 181 + 182 + // Determine textContent: use configured field from frontmatter, or fallback to markdown body 183 + let textContent: string; 184 + if ( 185 + config.textContentField && 186 + post.rawFrontmatter?.[config.textContentField] 187 + ) { 188 + textContent = String(post.rawFrontmatter[config.textContentField]); 189 + } else { 190 + textContent = stripMarkdownForText(post.content); 191 + } 192 + 193 + const record: Record<string, unknown> = { 194 + $type: "site.standard.document", 195 + title: post.frontmatter.title, 196 + site: config.publicationUri, 197 + path: postPath, 198 + textContent: textContent.slice(0, 10000), 199 + publishedAt: publishDate.toISOString(), 200 + canonicalUrl: `${config.siteUrl}${postPath}`, 201 + }; 171 202 172 - const record: Record<string, unknown> = { 173 - $type: "site.standard.document", 174 - title: post.frontmatter.title, 175 - site: config.publicationUri, 176 - path: postPath, 177 - textContent: textContent.slice(0, 10000), 178 - publishedAt: publishDate.toISOString(), 179 - canonicalUrl: `${config.siteUrl}${postPath}`, 180 - }; 203 + if (post.frontmatter.description) { 204 + record.description = post.frontmatter.description; 205 + } 181 206 182 - if (coverImage) { 183 - record.coverImage = coverImage; 184 - } 207 + if (coverImage) { 208 + record.coverImage = coverImage; 209 + } 185 210 186 - if (post.frontmatter.tags && post.frontmatter.tags.length > 0) { 187 - record.tags = post.frontmatter.tags; 188 - } 211 + if (post.frontmatter.tags && post.frontmatter.tags.length > 0) { 212 + record.tags = post.frontmatter.tags; 213 + } 189 214 190 - const response = await agent.com.atproto.repo.createRecord({ 191 - repo: agent.session!.did, 192 - collection: "site.standard.document", 193 - record, 194 - }); 215 + const response = await agent.com.atproto.repo.createRecord({ 216 + repo: agent.session!.did, 217 + collection: "site.standard.document", 218 + record, 219 + }); 195 220 196 - return response.data.uri; 221 + return response.data.uri; 197 222 } 198 223 199 224 export async function updateDocument( 200 - agent: AtpAgent, 201 - post: BlogPost, 202 - atUri: string, 203 - config: PublisherConfig, 204 - coverImage?: BlobObject 225 + agent: AtpAgent, 226 + post: BlogPost, 227 + atUri: string, 228 + config: PublisherConfig, 229 + coverImage?: BlobObject, 205 230 ): Promise<void> { 206 - // Parse the atUri to get the collection and rkey 207 - // Format: at://did:plc:xxx/collection/rkey 208 - const uriMatch = atUri.match(/^at:\/\/([^/]+)\/([^/]+)\/(.+)$/); 209 - if (!uriMatch) { 210 - throw new Error(`Invalid atUri format: ${atUri}`); 211 - } 231 + // Parse the atUri to get the collection and rkey 232 + // Format: at://did:plc:xxx/collection/rkey 233 + const uriMatch = atUri.match(/^at:\/\/([^/]+)\/([^/]+)\/(.+)$/); 234 + if (!uriMatch) { 235 + throw new Error(`Invalid atUri format: ${atUri}`); 236 + } 212 237 213 - const [, , collection, rkey] = uriMatch; 238 + const [, , collection, rkey] = uriMatch; 214 239 215 - const pathPrefix = config.pathPrefix || "/posts"; 216 - const postPath = `${pathPrefix}/${post.slug}`; 217 - const textContent = stripMarkdownForText(post.content); 218 - const publishDate = new Date(post.frontmatter.publishDate); 240 + const pathPrefix = config.pathPrefix || "/posts"; 241 + const postPath = `${pathPrefix}/${post.slug}`; 242 + const publishDate = new Date(post.frontmatter.publishDate); 219 243 220 - const record: Record<string, unknown> = { 221 - $type: "site.standard.document", 222 - title: post.frontmatter.title, 223 - site: config.publicationUri, 224 - path: postPath, 225 - textContent: textContent.slice(0, 10000), 226 - publishedAt: publishDate.toISOString(), 227 - canonicalUrl: `${config.siteUrl}${postPath}`, 228 - }; 244 + // Determine textContent: use configured field from frontmatter, or fallback to markdown body 245 + let textContent: string; 246 + if ( 247 + config.textContentField && 248 + post.rawFrontmatter?.[config.textContentField] 249 + ) { 250 + textContent = String(post.rawFrontmatter[config.textContentField]); 251 + } else { 252 + textContent = stripMarkdownForText(post.content); 253 + } 229 254 230 - if (coverImage) { 231 - record.coverImage = coverImage; 232 - } 255 + const record: Record<string, unknown> = { 256 + $type: "site.standard.document", 257 + title: post.frontmatter.title, 258 + site: config.publicationUri, 259 + path: postPath, 260 + textContent: textContent.slice(0, 10000), 261 + publishedAt: publishDate.toISOString(), 262 + canonicalUrl: `${config.siteUrl}${postPath}`, 263 + }; 233 264 234 - if (post.frontmatter.tags && post.frontmatter.tags.length > 0) { 235 - record.tags = post.frontmatter.tags; 236 - } 265 + if (post.frontmatter.description) { 266 + record.description = post.frontmatter.description; 267 + } 237 268 238 - await agent.com.atproto.repo.putRecord({ 239 - repo: agent.session!.did, 240 - collection: collection!, 241 - rkey: rkey!, 242 - record, 243 - }); 269 + if (coverImage) { 270 + record.coverImage = coverImage; 271 + } 272 + 273 + if (post.frontmatter.tags && post.frontmatter.tags.length > 0) { 274 + record.tags = post.frontmatter.tags; 275 + } 276 + 277 + await agent.com.atproto.repo.putRecord({ 278 + repo: agent.session!.did, 279 + collection: collection!, 280 + rkey: rkey!, 281 + record, 282 + }); 244 283 } 245 284 246 - export function parseAtUri(atUri: string): { did: string; collection: string; rkey: string } | null { 247 - const match = atUri.match(/^at:\/\/([^/]+)\/([^/]+)\/(.+)$/); 248 - if (!match) return null; 249 - return { 250 - did: match[1]!, 251 - collection: match[2]!, 252 - rkey: match[3]!, 253 - }; 285 + export function parseAtUri( 286 + atUri: string, 287 + ): { did: string; collection: string; rkey: string } | null { 288 + const match = atUri.match(/^at:\/\/([^/]+)\/([^/]+)\/(.+)$/); 289 + if (!match) return null; 290 + return { 291 + did: match[1]!, 292 + collection: match[2]!, 293 + rkey: match[3]!, 294 + }; 254 295 } 255 296 256 297 export interface DocumentRecord { 257 - $type: "site.standard.document"; 258 - title: string; 259 - site: string; 260 - path: string; 261 - textContent: string; 262 - publishedAt: string; 263 - canonicalUrl?: string; 264 - coverImage?: BlobObject; 265 - tags?: string[]; 266 - location?: string; 298 + $type: "site.standard.document"; 299 + title: string; 300 + site: string; 301 + path: string; 302 + textContent: string; 303 + publishedAt: string; 304 + canonicalUrl?: string; 305 + description?: string; 306 + coverImage?: BlobObject; 307 + tags?: string[]; 308 + location?: string; 267 309 } 268 310 269 311 export interface ListDocumentsResult { 270 - uri: string; 271 - cid: string; 272 - value: DocumentRecord; 312 + uri: string; 313 + cid: string; 314 + value: DocumentRecord; 273 315 } 274 316 275 317 export async function listDocuments( 276 - agent: AtpAgent, 277 - publicationUri?: string 318 + agent: AtpAgent, 319 + publicationUri?: string, 278 320 ): Promise<ListDocumentsResult[]> { 279 - const documents: ListDocumentsResult[] = []; 280 - let cursor: string | undefined; 321 + const documents: ListDocumentsResult[] = []; 322 + let cursor: string | undefined; 281 323 282 - do { 283 - const response = await agent.com.atproto.repo.listRecords({ 284 - repo: agent.session!.did, 285 - collection: "site.standard.document", 286 - limit: 100, 287 - cursor, 288 - }); 324 + do { 325 + const response = await agent.com.atproto.repo.listRecords({ 326 + repo: agent.session!.did, 327 + collection: "site.standard.document", 328 + limit: 100, 329 + cursor, 330 + }); 289 331 290 - for (const record of response.data.records) { 291 - const value = record.value as unknown as DocumentRecord; 332 + for (const record of response.data.records) { 333 + const value = record.value as unknown as DocumentRecord; 292 334 293 - // If publicationUri is specified, only include documents from that publication 294 - if (publicationUri && value.site !== publicationUri) { 295 - continue; 296 - } 335 + // If publicationUri is specified, only include documents from that publication 336 + if (publicationUri && value.site !== publicationUri) { 337 + continue; 338 + } 297 339 298 - documents.push({ 299 - uri: record.uri, 300 - cid: record.cid, 301 - value, 302 - }); 303 - } 340 + documents.push({ 341 + uri: record.uri, 342 + cid: record.cid, 343 + value, 344 + }); 345 + } 304 346 305 - cursor = response.data.cursor; 306 - } while (cursor); 347 + cursor = response.data.cursor; 348 + } while (cursor); 307 349 308 - return documents; 350 + return documents; 309 351 } 310 352 311 353 export async function createPublication( 312 - agent: AtpAgent, 313 - options: CreatePublicationOptions 354 + agent: AtpAgent, 355 + options: CreatePublicationOptions, 314 356 ): Promise<string> { 315 - let icon: BlobObject | undefined; 357 + let icon: BlobObject | undefined; 316 358 317 - if (options.iconPath) { 318 - icon = await uploadImage(agent, options.iconPath); 319 - } 359 + if (options.iconPath) { 360 + icon = await uploadImage(agent, options.iconPath); 361 + } 320 362 321 - const record: Record<string, unknown> = { 322 - $type: "site.standard.publication", 323 - url: options.url, 324 - name: options.name, 325 - createdAt: new Date().toISOString(), 326 - }; 363 + const record: Record<string, unknown> = { 364 + $type: "site.standard.publication", 365 + url: options.url, 366 + name: options.name, 367 + createdAt: new Date().toISOString(), 368 + }; 327 369 328 - if (options.description) { 329 - record.description = options.description; 330 - } 370 + if (options.description) { 371 + record.description = options.description; 372 + } 331 373 332 - if (icon) { 333 - record.icon = icon; 334 - } 374 + if (icon) { 375 + record.icon = icon; 376 + } 335 377 336 - if (options.showInDiscover !== undefined) { 337 - record.preferences = { 338 - showInDiscover: options.showInDiscover, 339 - }; 340 - } 378 + if (options.showInDiscover !== undefined) { 379 + record.preferences = { 380 + showInDiscover: options.showInDiscover, 381 + }; 382 + } 341 383 342 - const response = await agent.com.atproto.repo.createRecord({ 343 - repo: agent.session!.did, 344 - collection: "site.standard.publication", 345 - record, 346 - }); 384 + const response = await agent.com.atproto.repo.createRecord({ 385 + repo: agent.session!.did, 386 + collection: "site.standard.publication", 387 + record, 388 + }); 347 389 348 - return response.data.uri; 390 + return response.data.uri; 391 + } 392 + 393 + // --- Bluesky Post Creation --- 394 + 395 + export interface CreateBlueskyPostOptions { 396 + title: string; 397 + description?: string; 398 + canonicalUrl: string; 399 + coverImage?: BlobObject; 400 + publishedAt: string; // Used as createdAt for the post 401 + } 402 + 403 + /** 404 + * Count graphemes in a string (for Bluesky's 300 grapheme limit) 405 + */ 406 + function countGraphemes(str: string): number { 407 + // Use Intl.Segmenter if available, otherwise fallback to spread operator 408 + if (typeof Intl !== "undefined" && Intl.Segmenter) { 409 + const segmenter = new Intl.Segmenter("en", { granularity: "grapheme" }); 410 + return [...segmenter.segment(str)].length; 411 + } 412 + return [...str].length; 413 + } 414 + 415 + /** 416 + * Truncate a string to a maximum number of graphemes 417 + */ 418 + function truncateToGraphemes(str: string, maxGraphemes: number): string { 419 + if (typeof Intl !== "undefined" && Intl.Segmenter) { 420 + const segmenter = new Intl.Segmenter("en", { granularity: "grapheme" }); 421 + const segments = [...segmenter.segment(str)]; 422 + if (segments.length <= maxGraphemes) return str; 423 + return `${segments 424 + .slice(0, maxGraphemes - 3) 425 + .map((s) => s.segment) 426 + .join("")}...`; 427 + } 428 + // Fallback 429 + const chars = [...str]; 430 + if (chars.length <= maxGraphemes) return str; 431 + return `${chars.slice(0, maxGraphemes - 3).join("")}...`; 432 + } 433 + 434 + /** 435 + * Create a Bluesky post with external link embed 436 + */ 437 + export async function createBlueskyPost( 438 + agent: AtpAgent, 439 + options: CreateBlueskyPostOptions, 440 + ): Promise<StrongRef> { 441 + const { title, description, canonicalUrl, coverImage, publishedAt } = options; 442 + 443 + // Build post text: title + description + URL 444 + // Max 300 graphemes for Bluesky posts 445 + const MAX_GRAPHEMES = 300; 446 + 447 + let postText: string; 448 + const urlPart = `\n\n${canonicalUrl}`; 449 + const urlGraphemes = countGraphemes(urlPart); 450 + 451 + if (description) { 452 + // Try: title + description + URL 453 + const fullText = `${title}\n\n${description}${urlPart}`; 454 + if (countGraphemes(fullText) <= MAX_GRAPHEMES) { 455 + postText = fullText; 456 + } else { 457 + // Truncate description to fit 458 + const availableForDesc = 459 + MAX_GRAPHEMES - 460 + countGraphemes(title) - 461 + countGraphemes("\n\n") - 462 + urlGraphemes - 463 + countGraphemes("\n\n"); 464 + if (availableForDesc > 10) { 465 + const truncatedDesc = truncateToGraphemes( 466 + description, 467 + availableForDesc, 468 + ); 469 + postText = `${title}\n\n${truncatedDesc}${urlPart}`; 470 + } else { 471 + // Just title + URL 472 + postText = `${title}${urlPart}`; 473 + } 474 + } 475 + } else { 476 + // Just title + URL 477 + postText = `${title}${urlPart}`; 478 + } 479 + 480 + // Final truncation if still too long (shouldn't happen but safety check) 481 + if (countGraphemes(postText) > MAX_GRAPHEMES) { 482 + postText = truncateToGraphemes(postText, MAX_GRAPHEMES); 483 + } 484 + 485 + // Calculate byte indices for the URL facet 486 + const encoder = new TextEncoder(); 487 + const urlStartInText = postText.lastIndexOf(canonicalUrl); 488 + const beforeUrl = postText.substring(0, urlStartInText); 489 + const byteStart = encoder.encode(beforeUrl).length; 490 + const byteEnd = byteStart + encoder.encode(canonicalUrl).length; 491 + 492 + // Build facets for the URL link 493 + const facets = [ 494 + { 495 + index: { 496 + byteStart, 497 + byteEnd, 498 + }, 499 + features: [ 500 + { 501 + $type: "app.bsky.richtext.facet#link", 502 + uri: canonicalUrl, 503 + }, 504 + ], 505 + }, 506 + ]; 507 + 508 + // Build external embed 509 + const embed: Record<string, unknown> = { 510 + $type: "app.bsky.embed.external", 511 + external: { 512 + uri: canonicalUrl, 513 + title: title.substring(0, 500), // Max 500 chars for title 514 + description: (description || "").substring(0, 1000), // Max 1000 chars for description 515 + }, 516 + }; 517 + 518 + // Add thumbnail if coverImage is available 519 + if (coverImage) { 520 + (embed.external as Record<string, unknown>).thumb = coverImage; 521 + } 522 + 523 + // Create the post record 524 + const record: Record<string, unknown> = { 525 + $type: "app.bsky.feed.post", 526 + text: postText, 527 + facets, 528 + embed, 529 + createdAt: new Date(publishedAt).toISOString(), 530 + }; 531 + 532 + const response = await agent.com.atproto.repo.createRecord({ 533 + repo: agent.session!.did, 534 + collection: "app.bsky.feed.post", 535 + record, 536 + }); 537 + 538 + return { 539 + uri: response.data.uri, 540 + cid: response.data.cid, 541 + }; 542 + } 543 + 544 + /** 545 + * Add bskyPostRef to an existing document record 546 + */ 547 + export async function addBskyPostRefToDocument( 548 + agent: AtpAgent, 549 + documentAtUri: string, 550 + bskyPostRef: StrongRef, 551 + ): Promise<void> { 552 + const parsed = parseAtUri(documentAtUri); 553 + if (!parsed) { 554 + throw new Error(`Invalid document URI: ${documentAtUri}`); 555 + } 556 + 557 + // Fetch existing record 558 + const existingRecord = await agent.com.atproto.repo.getRecord({ 559 + repo: parsed.did, 560 + collection: parsed.collection, 561 + rkey: parsed.rkey, 562 + }); 563 + 564 + // Add bskyPostRef to the record 565 + const updatedRecord = { 566 + ...(existingRecord.data.value as Record<string, unknown>), 567 + bskyPostRef, 568 + }; 569 + 570 + // Update the record 571 + await agent.com.atproto.repo.putRecord({ 572 + repo: parsed.did, 573 + collection: parsed.collection, 574 + rkey: parsed.rkey, 575 + record: updatedRecord, 576 + }); 349 577 }
+36 -10
packages/cli/src/lib/config.ts
··· 1 - import * as path from "path"; 2 - import type { PublisherConfig, PublisherState, FrontmatterMapping } from "./types"; 1 + import * as fs from "node:fs/promises"; 2 + import * as path from "node:path"; 3 + import type { 4 + PublisherConfig, 5 + PublisherState, 6 + FrontmatterMapping, 7 + BlueskyConfig, 8 + } from "./types"; 3 9 4 10 const CONFIG_FILENAME = "sequoia.json"; 5 11 const STATE_FILENAME = ".sequoia-state.json"; 6 12 13 + async function fileExists(filePath: string): Promise<boolean> { 14 + try { 15 + await fs.access(filePath); 16 + return true; 17 + } catch { 18 + return false; 19 + } 20 + } 21 + 7 22 export async function findConfig( 8 23 startDir: string = process.cwd(), 9 24 ): Promise<string | null> { ··· 11 26 12 27 while (true) { 13 28 const configPath = path.join(currentDir, CONFIG_FILENAME); 14 - const file = Bun.file(configPath); 15 29 16 - if (await file.exists()) { 30 + if (await fileExists(configPath)) { 17 31 return configPath; 18 32 } 19 33 ··· 38 52 } 39 53 40 54 try { 41 - const file = Bun.file(resolvedPath); 42 - const content = await file.text(); 55 + const content = await fs.readFile(resolvedPath, "utf-8"); 43 56 const config = JSON.parse(content) as PublisherConfig; 44 57 45 58 // Validate required fields ··· 68 81 pdsUrl?: string; 69 82 frontmatter?: FrontmatterMapping; 70 83 ignore?: string[]; 84 + removeIndexFromSlug?: boolean; 85 + textContentField?: string; 86 + bluesky?: BlueskyConfig; 71 87 }): string { 72 88 const config: Record<string, unknown> = { 73 89 siteUrl: options.siteUrl, ··· 104 120 config.ignore = options.ignore; 105 121 } 106 122 123 + if (options.removeIndexFromSlug) { 124 + config.removeIndexFromSlug = options.removeIndexFromSlug; 125 + } 126 + 127 + if (options.textContentField) { 128 + config.textContentField = options.textContentField; 129 + } 130 + if (options.bluesky) { 131 + config.bluesky = options.bluesky; 132 + } 133 + 107 134 return JSON.stringify(config, null, 2); 108 135 } 109 136 110 137 export async function loadState(configDir: string): Promise<PublisherState> { 111 138 const statePath = path.join(configDir, STATE_FILENAME); 112 - const file = Bun.file(statePath); 113 139 114 - if (!(await file.exists())) { 140 + if (!(await fileExists(statePath))) { 115 141 return { posts: {} }; 116 142 } 117 143 118 144 try { 119 - const content = await file.text(); 145 + const content = await fs.readFile(statePath, "utf-8"); 120 146 return JSON.parse(content) as PublisherState; 121 147 } catch { 122 148 return { posts: {} }; ··· 128 154 state: PublisherState, 129 155 ): Promise<void> { 130 156 const statePath = path.join(configDir, STATE_FILENAME); 131 - await Bun.write(statePath, JSON.stringify(state, null, 2)); 157 + await fs.writeFile(statePath, JSON.stringify(state, null, 2)); 132 158 } 133 159 134 160 export function getStatePath(configDir: string): string {
+93 -84
packages/cli/src/lib/credentials.ts
··· 1 - import * as path from "path"; 2 - import * as os from "os"; 1 + import * as fs from "node:fs/promises"; 2 + import * as os from "node:os"; 3 + import * as path from "node:path"; 3 4 import type { Credentials } from "./types"; 4 5 5 6 const CONFIG_DIR = path.join(os.homedir(), ".config", "sequoia"); ··· 8 9 // Stored credentials keyed by identifier 9 10 type CredentialsStore = Record<string, Credentials>; 10 11 12 + async function fileExists(filePath: string): Promise<boolean> { 13 + try { 14 + await fs.access(filePath); 15 + return true; 16 + } catch { 17 + return false; 18 + } 19 + } 20 + 11 21 /** 12 22 * Load all stored credentials 13 23 */ 14 24 async function loadCredentialsStore(): Promise<CredentialsStore> { 15 - const file = Bun.file(CREDENTIALS_FILE); 16 - if (!(await file.exists())) { 17 - return {}; 18 - } 25 + if (!(await fileExists(CREDENTIALS_FILE))) { 26 + return {}; 27 + } 19 28 20 - try { 21 - const content = await file.text(); 22 - const parsed = JSON.parse(content); 29 + try { 30 + const content = await fs.readFile(CREDENTIALS_FILE, "utf-8"); 31 + const parsed = JSON.parse(content); 23 32 24 - // Handle legacy single-credential format (migrate on read) 25 - if (parsed.identifier && parsed.password) { 26 - const legacy = parsed as Credentials; 27 - return { [legacy.identifier]: legacy }; 28 - } 33 + // Handle legacy single-credential format (migrate on read) 34 + if (parsed.identifier && parsed.password) { 35 + const legacy = parsed as Credentials; 36 + return { [legacy.identifier]: legacy }; 37 + } 29 38 30 - return parsed as CredentialsStore; 31 - } catch { 32 - return {}; 33 - } 39 + return parsed as CredentialsStore; 40 + } catch { 41 + return {}; 42 + } 34 43 } 35 44 36 45 /** 37 46 * Save the entire credentials store 38 47 */ 39 48 async function saveCredentialsStore(store: CredentialsStore): Promise<void> { 40 - await Bun.$`mkdir -p ${CONFIG_DIR}`; 41 - await Bun.write(CREDENTIALS_FILE, JSON.stringify(store, null, 2)); 42 - await Bun.$`chmod 600 ${CREDENTIALS_FILE}`; 49 + await fs.mkdir(CONFIG_DIR, { recursive: true }); 50 + await fs.writeFile(CREDENTIALS_FILE, JSON.stringify(store, null, 2)); 51 + await fs.chmod(CREDENTIALS_FILE, 0o600); 43 52 } 44 53 45 54 /** ··· 53 62 * 5. Return null (caller should prompt user) 54 63 */ 55 64 export async function loadCredentials( 56 - projectIdentity?: string 65 + projectIdentity?: string, 57 66 ): Promise<Credentials | null> { 58 - // 1. Check environment variables first (full override) 59 - const envIdentifier = process.env.ATP_IDENTIFIER; 60 - const envPassword = process.env.ATP_APP_PASSWORD; 61 - const envPdsUrl = process.env.PDS_URL; 67 + // 1. Check environment variables first (full override) 68 + const envIdentifier = process.env.ATP_IDENTIFIER; 69 + const envPassword = process.env.ATP_APP_PASSWORD; 70 + const envPdsUrl = process.env.PDS_URL; 62 71 63 - if (envIdentifier && envPassword) { 64 - return { 65 - identifier: envIdentifier, 66 - password: envPassword, 67 - pdsUrl: envPdsUrl || "https://bsky.social", 68 - }; 69 - } 72 + if (envIdentifier && envPassword) { 73 + return { 74 + identifier: envIdentifier, 75 + password: envPassword, 76 + pdsUrl: envPdsUrl || "https://bsky.social", 77 + }; 78 + } 70 79 71 - const store = await loadCredentialsStore(); 72 - const identifiers = Object.keys(store); 80 + const store = await loadCredentialsStore(); 81 + const identifiers = Object.keys(store); 73 82 74 - if (identifiers.length === 0) { 75 - return null; 76 - } 83 + if (identifiers.length === 0) { 84 + return null; 85 + } 77 86 78 - // 2. SEQUOIA_PROFILE env var 79 - const profileEnv = process.env.SEQUOIA_PROFILE; 80 - if (profileEnv && store[profileEnv]) { 81 - return store[profileEnv]; 82 - } 87 + // 2. SEQUOIA_PROFILE env var 88 + const profileEnv = process.env.SEQUOIA_PROFILE; 89 + if (profileEnv && store[profileEnv]) { 90 + return store[profileEnv]; 91 + } 83 92 84 - // 3. Project-specific identity (from sequoia.json) 85 - if (projectIdentity && store[projectIdentity]) { 86 - return store[projectIdentity]; 87 - } 93 + // 3. Project-specific identity (from sequoia.json) 94 + if (projectIdentity && store[projectIdentity]) { 95 + return store[projectIdentity]; 96 + } 88 97 89 - // 4. If only one identity, use it 90 - if (identifiers.length === 1 && identifiers[0]) { 91 - return store[identifiers[0]] ?? null; 92 - } 98 + // 4. If only one identity, use it 99 + if (identifiers.length === 1 && identifiers[0]) { 100 + return store[identifiers[0]] ?? null; 101 + } 93 102 94 - // Multiple identities exist but none selected 95 - return null; 103 + // Multiple identities exist but none selected 104 + return null; 96 105 } 97 106 98 107 /** 99 108 * Get a specific identity by identifier 100 109 */ 101 110 export async function getCredentials( 102 - identifier: string 111 + identifier: string, 103 112 ): Promise<Credentials | null> { 104 - const store = await loadCredentialsStore(); 105 - return store[identifier] || null; 113 + const store = await loadCredentialsStore(); 114 + return store[identifier] || null; 106 115 } 107 116 108 117 /** 109 118 * List all stored identities 110 119 */ 111 120 export async function listCredentials(): Promise<string[]> { 112 - const store = await loadCredentialsStore(); 113 - return Object.keys(store); 121 + const store = await loadCredentialsStore(); 122 + return Object.keys(store); 114 123 } 115 124 116 125 /** 117 126 * Save credentials for an identity (adds or updates) 118 127 */ 119 128 export async function saveCredentials(credentials: Credentials): Promise<void> { 120 - const store = await loadCredentialsStore(); 121 - store[credentials.identifier] = credentials; 122 - await saveCredentialsStore(store); 129 + const store = await loadCredentialsStore(); 130 + store[credentials.identifier] = credentials; 131 + await saveCredentialsStore(store); 123 132 } 124 133 125 134 /** 126 135 * Delete credentials for a specific identity 127 136 */ 128 137 export async function deleteCredentials(identifier?: string): Promise<boolean> { 129 - const store = await loadCredentialsStore(); 130 - const identifiers = Object.keys(store); 138 + const store = await loadCredentialsStore(); 139 + const identifiers = Object.keys(store); 131 140 132 - if (identifiers.length === 0) { 133 - return false; 134 - } 141 + if (identifiers.length === 0) { 142 + return false; 143 + } 135 144 136 - // If identifier specified, delete just that one 137 - if (identifier) { 138 - if (!store[identifier]) { 139 - return false; 140 - } 141 - delete store[identifier]; 142 - await saveCredentialsStore(store); 143 - return true; 144 - } 145 + // If identifier specified, delete just that one 146 + if (identifier) { 147 + if (!store[identifier]) { 148 + return false; 149 + } 150 + delete store[identifier]; 151 + await saveCredentialsStore(store); 152 + return true; 153 + } 145 154 146 - // If only one identity, delete it (backwards compat behavior) 147 - if (identifiers.length === 1 && identifiers[0]) { 148 - delete store[identifiers[0]]; 149 - await saveCredentialsStore(store); 150 - return true; 151 - } 155 + // If only one identity, delete it (backwards compat behavior) 156 + if (identifiers.length === 1 && identifiers[0]) { 157 + delete store[identifiers[0]]; 158 + await saveCredentialsStore(store); 159 + return true; 160 + } 152 161 153 - // Multiple identities but none specified 154 - return false; 162 + // Multiple identities but none specified 163 + return false; 155 164 } 156 165 157 166 export function getCredentialsPath(): string { 158 - return CREDENTIALS_FILE; 167 + return CREDENTIALS_FILE; 159 168 }
+326 -172
packages/cli/src/lib/markdown.ts
··· 1 - import * as path from "path"; 2 - import { Glob } from "bun"; 3 - import type { PostFrontmatter, BlogPost, FrontmatterMapping } from "./types"; 1 + import * as fs from "node:fs/promises"; 2 + import * as path from "node:path"; 3 + import { glob } from "glob"; 4 + import { minimatch } from "minimatch"; 5 + import type { BlogPost, FrontmatterMapping, PostFrontmatter } from "./types"; 4 6 5 - export function parseFrontmatter(content: string, mapping?: FrontmatterMapping): { 6 - frontmatter: PostFrontmatter; 7 - body: string; 7 + export function parseFrontmatter( 8 + content: string, 9 + mapping?: FrontmatterMapping, 10 + ): { 11 + frontmatter: PostFrontmatter; 12 + body: string; 13 + rawFrontmatter: Record<string, unknown>; 8 14 } { 9 - // Support multiple frontmatter delimiters: 10 - // --- (YAML) - Jekyll, Astro, most SSGs 11 - // +++ (TOML) - Hugo 12 - // *** - Alternative format 13 - const frontmatterRegex = /^(---|\+\+\+|\*\*\*)\n([\s\S]*?)\n\1\n([\s\S]*)$/; 14 - const match = content.match(frontmatterRegex); 15 + // Support multiple frontmatter delimiters: 16 + // --- (YAML) - Jekyll, Astro, most SSGs 17 + // +++ (TOML) - Hugo 18 + // *** - Alternative format 19 + const frontmatterRegex = /^(---|\+\+\+|\*\*\*)\n([\s\S]*?)\n\1\n([\s\S]*)$/; 20 + const match = content.match(frontmatterRegex); 21 + 22 + if (!match) { 23 + throw new Error("Could not parse frontmatter"); 24 + } 15 25 16 - if (!match) { 17 - throw new Error("Could not parse frontmatter"); 18 - } 26 + const delimiter = match[1]; 27 + const frontmatterStr = match[2] ?? ""; 28 + const body = match[3] ?? ""; 19 29 20 - const delimiter = match[1]; 21 - const frontmatterStr = match[2] ?? ""; 22 - const body = match[3] ?? ""; 30 + // Determine format based on delimiter: 31 + // +++ uses TOML (key = value) 32 + // --- and *** use YAML (key: value) 33 + const isToml = delimiter === "+++"; 34 + const separator = isToml ? "=" : ":"; 23 35 24 - // Determine format based on delimiter: 25 - // +++ uses TOML (key = value) 26 - // --- and *** use YAML (key: value) 27 - const isToml = delimiter === "+++"; 28 - const separator = isToml ? "=" : ":"; 36 + // Parse frontmatter manually 37 + const raw: Record<string, unknown> = {}; 38 + const lines = frontmatterStr.split("\n"); 29 39 30 - // Parse frontmatter manually 31 - const raw: Record<string, unknown> = {}; 32 - const lines = frontmatterStr.split("\n"); 40 + let i = 0; 41 + while (i < lines.length) { 42 + const line = lines[i]; 43 + if (line === undefined) { 44 + i++; 45 + continue; 46 + } 47 + const sepIndex = line.indexOf(separator); 48 + if (sepIndex === -1) { 49 + i++; 50 + continue; 51 + } 33 52 34 - for (const line of lines) { 35 - const sepIndex = line.indexOf(separator); 36 - if (sepIndex === -1) continue; 53 + const key = line.slice(0, sepIndex).trim(); 54 + let value = line.slice(sepIndex + 1).trim(); 37 55 38 - const key = line.slice(0, sepIndex).trim(); 39 - let value = line.slice(sepIndex + 1).trim(); 56 + // Handle quoted strings 57 + if ( 58 + (value.startsWith('"') && value.endsWith('"')) || 59 + (value.startsWith("'") && value.endsWith("'")) 60 + ) { 61 + value = value.slice(1, -1); 62 + } 40 63 41 - // Handle quoted strings 42 - if ( 43 - (value.startsWith('"') && value.endsWith('"')) || 44 - (value.startsWith("'") && value.endsWith("'")) 45 - ) { 46 - value = value.slice(1, -1); 47 - } 64 + // Handle inline arrays (simple case for tags) 65 + if (value.startsWith("[") && value.endsWith("]")) { 66 + const arrayContent = value.slice(1, -1); 67 + raw[key] = arrayContent 68 + .split(",") 69 + .map((item) => item.trim().replace(/^["']|["']$/g, "")); 70 + } else if (value === "" && !isToml) { 71 + // Check for YAML-style multiline array (key with no value followed by - items) 72 + const arrayItems: string[] = []; 73 + let j = i + 1; 74 + while (j < lines.length) { 75 + const nextLine = lines[j]; 76 + if (nextLine === undefined) { 77 + j++; 78 + continue; 79 + } 80 + // Check if line is a list item (starts with whitespace and -) 81 + const listMatch = nextLine.match(/^\s+-\s*(.*)$/); 82 + if (listMatch && listMatch[1] !== undefined) { 83 + let itemValue = listMatch[1].trim(); 84 + // Remove quotes if present 85 + if ( 86 + (itemValue.startsWith('"') && itemValue.endsWith('"')) || 87 + (itemValue.startsWith("'") && itemValue.endsWith("'")) 88 + ) { 89 + itemValue = itemValue.slice(1, -1); 90 + } 91 + arrayItems.push(itemValue); 92 + j++; 93 + } else if (nextLine.trim() === "") { 94 + // Skip empty lines within the array 95 + j++; 96 + } else { 97 + // Hit a new key or non-list content 98 + break; 99 + } 100 + } 101 + if (arrayItems.length > 0) { 102 + raw[key] = arrayItems; 103 + i = j; 104 + continue; 105 + } else { 106 + raw[key] = value; 107 + } 108 + } else if (value === "true") { 109 + raw[key] = true; 110 + } else if (value === "false") { 111 + raw[key] = false; 112 + } else { 113 + raw[key] = value; 114 + } 115 + i++; 116 + } 48 117 49 - // Handle arrays (simple case for tags) 50 - if (value.startsWith("[") && value.endsWith("]")) { 51 - const arrayContent = value.slice(1, -1); 52 - raw[key] = arrayContent 53 - .split(",") 54 - .map((item) => item.trim().replace(/^["']|["']$/g, "")); 55 - } else if (value === "true") { 56 - raw[key] = true; 57 - } else if (value === "false") { 58 - raw[key] = false; 59 - } else { 60 - raw[key] = value; 61 - } 62 - } 118 + // Apply field mappings to normalize to standard PostFrontmatter fields 119 + const frontmatter: Record<string, unknown> = {}; 63 120 64 - // Apply field mappings to normalize to standard PostFrontmatter fields 65 - const frontmatter: Record<string, unknown> = {}; 121 + // Title mapping 122 + const titleField = mapping?.title || "title"; 123 + frontmatter.title = raw[titleField] || raw.title; 66 124 67 - // Title mapping 68 - const titleField = mapping?.title || "title"; 69 - frontmatter.title = raw[titleField] || raw.title; 125 + // Description mapping 126 + const descField = mapping?.description || "description"; 127 + frontmatter.description = raw[descField] || raw.description; 70 128 71 - // Description mapping 72 - const descField = mapping?.description || "description"; 73 - frontmatter.description = raw[descField] || raw.description; 129 + // Publish date mapping - check custom field first, then fallbacks 130 + const dateField = mapping?.publishDate; 131 + if (dateField && raw[dateField]) { 132 + frontmatter.publishDate = raw[dateField]; 133 + } else if (raw.publishDate) { 134 + frontmatter.publishDate = raw.publishDate; 135 + } else { 136 + // Fallback to common date field names 137 + const dateFields = ["pubDate", "date", "createdAt", "created_at"]; 138 + for (const field of dateFields) { 139 + if (raw[field]) { 140 + frontmatter.publishDate = raw[field]; 141 + break; 142 + } 143 + } 144 + } 74 145 75 - // Publish date mapping - check custom field first, then fallbacks 76 - const dateField = mapping?.publishDate; 77 - if (dateField && raw[dateField]) { 78 - frontmatter.publishDate = raw[dateField]; 79 - } else if (raw.publishDate) { 80 - frontmatter.publishDate = raw.publishDate; 81 - } else { 82 - // Fallback to common date field names 83 - const dateFields = ["pubDate", "date", "createdAt", "created_at"]; 84 - for (const field of dateFields) { 85 - if (raw[field]) { 86 - frontmatter.publishDate = raw[field]; 87 - break; 88 - } 89 - } 90 - } 146 + // Cover image mapping 147 + const coverField = mapping?.coverImage || "ogImage"; 148 + frontmatter.ogImage = raw[coverField] || raw.ogImage; 91 149 92 - // Cover image mapping 93 - const coverField = mapping?.coverImage || "ogImage"; 94 - frontmatter.ogImage = raw[coverField] || raw.ogImage; 150 + // Tags mapping 151 + const tagsField = mapping?.tags || "tags"; 152 + frontmatter.tags = raw[tagsField] || raw.tags; 95 153 96 - // Tags mapping 97 - const tagsField = mapping?.tags || "tags"; 98 - frontmatter.tags = raw[tagsField] || raw.tags; 154 + // Draft mapping 155 + const draftField = mapping?.draft || "draft"; 156 + const draftValue = raw[draftField] ?? raw.draft; 157 + if (draftValue !== undefined) { 158 + frontmatter.draft = draftValue === true || draftValue === "true"; 159 + } 99 160 100 - // Always preserve atUri (internal field) 101 - frontmatter.atUri = raw.atUri; 161 + // Always preserve atUri (internal field) 162 + frontmatter.atUri = raw.atUri; 102 163 103 - return { frontmatter: frontmatter as unknown as PostFrontmatter, body }; 164 + return { 165 + frontmatter: frontmatter as unknown as PostFrontmatter, 166 + body, 167 + rawFrontmatter: raw, 168 + }; 104 169 } 105 170 106 171 export function getSlugFromFilename(filename: string): string { 107 - return filename 108 - .replace(/\.mdx?$/, "") 109 - .toLowerCase() 110 - .replace(/\s+/g, "-"); 172 + return filename 173 + .replace(/\.mdx?$/, "") 174 + .toLowerCase() 175 + .replace(/\s+/g, "-"); 176 + } 177 + 178 + export interface SlugOptions { 179 + slugField?: string; 180 + removeIndexFromSlug?: boolean; 181 + } 182 + 183 + export function getSlugFromOptions( 184 + relativePath: string, 185 + rawFrontmatter: Record<string, unknown>, 186 + options: SlugOptions = {}, 187 + ): string { 188 + const { slugField, removeIndexFromSlug = false } = options; 189 + 190 + let slug: string; 191 + 192 + // If slugField is set, try to get the value from frontmatter 193 + if (slugField) { 194 + const frontmatterValue = rawFrontmatter[slugField]; 195 + if (frontmatterValue && typeof frontmatterValue === "string") { 196 + // Remove leading slash if present 197 + slug = frontmatterValue 198 + .replace(/^\//, "") 199 + .toLowerCase() 200 + .replace(/\s+/g, "-"); 201 + } else { 202 + // Fallback to filepath if frontmatter field not found 203 + slug = relativePath 204 + .replace(/\.mdx?$/, "") 205 + .toLowerCase() 206 + .replace(/\s+/g, "-"); 207 + } 208 + } else { 209 + // Default: use filepath 210 + slug = relativePath 211 + .replace(/\.mdx?$/, "") 212 + .toLowerCase() 213 + .replace(/\s+/g, "-"); 214 + } 215 + 216 + // Remove /index or /_index suffix if configured 217 + if (removeIndexFromSlug) { 218 + slug = slug.replace(/\/_?index$/, ""); 219 + } 220 + 221 + return slug; 111 222 } 112 223 113 224 export async function getContentHash(content: string): Promise<string> { 114 - const encoder = new TextEncoder(); 115 - const data = encoder.encode(content); 116 - const hashBuffer = await crypto.subtle.digest("SHA-256", data); 117 - const hashArray = Array.from(new Uint8Array(hashBuffer)); 118 - return hashArray.map((b) => b.toString(16).padStart(2, "0")).join(""); 225 + const encoder = new TextEncoder(); 226 + const data = encoder.encode(content); 227 + const hashBuffer = await crypto.subtle.digest("SHA-256", data); 228 + const hashArray = Array.from(new Uint8Array(hashBuffer)); 229 + return hashArray.map((b) => b.toString(16).padStart(2, "0")).join(""); 119 230 } 120 231 121 232 function shouldIgnore(relativePath: string, ignorePatterns: string[]): boolean { 122 - for (const pattern of ignorePatterns) { 123 - const glob = new Glob(pattern); 124 - if (glob.match(relativePath)) { 125 - return true; 126 - } 127 - } 128 - return false; 233 + for (const pattern of ignorePatterns) { 234 + if (minimatch(relativePath, pattern)) { 235 + return true; 236 + } 237 + } 238 + return false; 239 + } 240 + 241 + export interface ScanOptions { 242 + frontmatterMapping?: FrontmatterMapping; 243 + ignorePatterns?: string[]; 244 + slugField?: string; 245 + removeIndexFromSlug?: boolean; 129 246 } 130 247 131 248 export async function scanContentDirectory( 132 - contentDir: string, 133 - frontmatterMapping?: FrontmatterMapping, 134 - ignorePatterns: string[] = [] 249 + contentDir: string, 250 + frontmatterMappingOrOptions?: FrontmatterMapping | ScanOptions, 251 + ignorePatterns: string[] = [], 135 252 ): Promise<BlogPost[]> { 136 - const patterns = ["**/*.md", "**/*.mdx"]; 137 - const posts: BlogPost[] = []; 253 + // Handle both old signature (frontmatterMapping, ignorePatterns) and new signature (options) 254 + let options: ScanOptions; 255 + if ( 256 + frontmatterMappingOrOptions && 257 + ("frontmatterMapping" in frontmatterMappingOrOptions || 258 + "ignorePatterns" in frontmatterMappingOrOptions || 259 + "slugField" in frontmatterMappingOrOptions) 260 + ) { 261 + options = frontmatterMappingOrOptions as ScanOptions; 262 + } else { 263 + // Old signature: (contentDir, frontmatterMapping?, ignorePatterns?) 264 + options = { 265 + frontmatterMapping: frontmatterMappingOrOptions as 266 + | FrontmatterMapping 267 + | undefined, 268 + ignorePatterns, 269 + }; 270 + } 138 271 139 - for (const pattern of patterns) { 140 - const glob = new Glob(pattern); 272 + const { 273 + frontmatterMapping, 274 + ignorePatterns: ignore = [], 275 + slugField, 276 + removeIndexFromSlug, 277 + } = options; 278 + 279 + const patterns = ["**/*.md", "**/*.mdx"]; 280 + const posts: BlogPost[] = []; 281 + 282 + for (const pattern of patterns) { 283 + const files = await glob(pattern, { 284 + cwd: contentDir, 285 + absolute: false, 286 + }); 141 287 142 - for await (const relativePath of glob.scan({ 143 - cwd: contentDir, 144 - absolute: false, 145 - })) { 146 - // Skip files matching ignore patterns 147 - if (shouldIgnore(relativePath, ignorePatterns)) { 148 - continue; 149 - } 288 + for (const relativePath of files) { 289 + // Skip files matching ignore patterns 290 + if (shouldIgnore(relativePath, ignore)) { 291 + continue; 292 + } 150 293 151 - const filePath = path.join(contentDir, relativePath); 152 - const file = Bun.file(filePath); 153 - const rawContent = await file.text(); 294 + const filePath = path.join(contentDir, relativePath); 295 + const rawContent = await fs.readFile(filePath, "utf-8"); 154 296 155 - try { 156 - const { frontmatter, body } = parseFrontmatter(rawContent, frontmatterMapping); 157 - const filename = path.basename(relativePath); 158 - const slug = getSlugFromFilename(filename); 297 + try { 298 + const { frontmatter, body, rawFrontmatter } = parseFrontmatter( 299 + rawContent, 300 + frontmatterMapping, 301 + ); 302 + const slug = getSlugFromOptions(relativePath, rawFrontmatter, { 303 + slugField, 304 + removeIndexFromSlug, 305 + }); 159 306 160 - posts.push({ 161 - filePath, 162 - slug, 163 - frontmatter, 164 - content: body, 165 - rawContent, 166 - }); 167 - } catch (error) { 168 - console.error(`Error parsing ${relativePath}:`, error); 169 - } 170 - } 171 - } 307 + posts.push({ 308 + filePath, 309 + slug, 310 + frontmatter, 311 + content: body, 312 + rawContent, 313 + rawFrontmatter, 314 + }); 315 + } catch (error) { 316 + console.error(`Error parsing ${relativePath}:`, error); 317 + } 318 + } 319 + } 172 320 173 - // Sort by publish date (newest first) 174 - posts.sort((a, b) => { 175 - const dateA = new Date(a.frontmatter.publishDate); 176 - const dateB = new Date(b.frontmatter.publishDate); 177 - return dateB.getTime() - dateA.getTime(); 178 - }); 321 + // Sort by publish date (newest first) 322 + posts.sort((a, b) => { 323 + const dateA = new Date(a.frontmatter.publishDate); 324 + const dateB = new Date(b.frontmatter.publishDate); 325 + return dateB.getTime() - dateA.getTime(); 326 + }); 179 327 180 - return posts; 328 + return posts; 181 329 } 182 330 183 - export function updateFrontmatterWithAtUri(rawContent: string, atUri: string): string { 184 - // Detect which delimiter is used (---, +++, or ***) 185 - const delimiterMatch = rawContent.match(/^(---|\+\+\+|\*\*\*)/); 186 - const delimiter = delimiterMatch?.[1] ?? "---"; 187 - const isToml = delimiter === "+++"; 331 + export function updateFrontmatterWithAtUri( 332 + rawContent: string, 333 + atUri: string, 334 + ): string { 335 + // Detect which delimiter is used (---, +++, or ***) 336 + const delimiterMatch = rawContent.match(/^(---|\+\+\+|\*\*\*)/); 337 + const delimiter = delimiterMatch?.[1] ?? "---"; 338 + const isToml = delimiter === "+++"; 188 339 189 - // Format the atUri entry based on frontmatter type 190 - const atUriEntry = isToml ? `atUri = "${atUri}"` : `atUri: "${atUri}"`; 340 + // Format the atUri entry based on frontmatter type 341 + const atUriEntry = isToml ? `atUri = "${atUri}"` : `atUri: "${atUri}"`; 191 342 192 - // Check if atUri already exists in frontmatter (handle both formats) 193 - if (rawContent.includes("atUri:") || rawContent.includes("atUri =")) { 194 - // Replace existing atUri (match both YAML and TOML formats) 195 - return rawContent.replace(/atUri\s*[=:]\s*["']?[^"'\n]+["']?\n?/, `${atUriEntry}\n`); 196 - } 343 + // Check if atUri already exists in frontmatter (handle both formats) 344 + if (rawContent.includes("atUri:") || rawContent.includes("atUri =")) { 345 + // Replace existing atUri (match both YAML and TOML formats) 346 + return rawContent.replace( 347 + /atUri\s*[=:]\s*["']?[^"'\n]+["']?\n?/, 348 + `${atUriEntry}\n`, 349 + ); 350 + } 197 351 198 - // Insert atUri before the closing delimiter 199 - const frontmatterEndIndex = rawContent.indexOf(delimiter, 4); 200 - if (frontmatterEndIndex === -1) { 201 - throw new Error("Could not find frontmatter end"); 202 - } 352 + // Insert atUri before the closing delimiter 353 + const frontmatterEndIndex = rawContent.indexOf(delimiter, 4); 354 + if (frontmatterEndIndex === -1) { 355 + throw new Error("Could not find frontmatter end"); 356 + } 203 357 204 - const beforeEnd = rawContent.slice(0, frontmatterEndIndex); 205 - const afterEnd = rawContent.slice(frontmatterEndIndex); 358 + const beforeEnd = rawContent.slice(0, frontmatterEndIndex); 359 + const afterEnd = rawContent.slice(frontmatterEndIndex); 206 360 207 - return `${beforeEnd}${atUriEntry}\n${afterEnd}`; 361 + return `${beforeEnd}${atUriEntry}\n${afterEnd}`; 208 362 } 209 363 210 364 export function stripMarkdownForText(markdown: string): string { 211 - return markdown 212 - .replace(/#{1,6}\s/g, "") // Remove headers 213 - .replace(/\*\*([^*]+)\*\*/g, "$1") // Remove bold 214 - .replace(/\*([^*]+)\*/g, "$1") // Remove italic 215 - .replace(/\[([^\]]+)\]\([^)]+\)/g, "$1") // Remove links, keep text 216 - .replace(/`{3}[\s\S]*?`{3}/g, "") // Remove code blocks 217 - .replace(/`([^`]+)`/g, "$1") // Remove inline code formatting 218 - .replace(/!\[.*?\]\(.*?\)/g, "") // Remove images 219 - .replace(/\n{3,}/g, "\n\n") // Normalize multiple newlines 220 - .trim(); 365 + return markdown 366 + .replace(/#{1,6}\s/g, "") // Remove headers 367 + .replace(/\*\*([^*]+)\*\*/g, "$1") // Remove bold 368 + .replace(/\*([^*]+)\*/g, "$1") // Remove italic 369 + .replace(/\[([^\]]+)\]\([^)]+\)/g, "$1") // Remove links, keep text 370 + .replace(/`{3}[\s\S]*?`{3}/g, "") // Remove code blocks 371 + .replace(/`([^`]+)`/g, "$1") // Remove inline code formatting 372 + .replace(/!\[.*?\]\(.*?\)/g, "") // Remove images 373 + .replace(/\n{3,}/g, "\n\n") // Normalize multiple newlines 374 + .trim(); 221 375 }
+6 -6
packages/cli/src/lib/prompts.ts
··· 1 - import { isCancel, cancel } from "@clack/prompts"; 1 + import { cancel, isCancel } from "@clack/prompts"; 2 2 3 3 export function exitOnCancel<T>(value: T | symbol): T { 4 - if (isCancel(value)) { 5 - cancel("Cancelled"); 6 - process.exit(0); 7 - } 8 - return value as T; 4 + if (isCancel(value)) { 5 + cancel("Cancelled"); 6 + process.exit(0); 7 + } 8 + return value as T; 9 9 }
+21
packages/cli/src/lib/types.ts
··· 4 4 publishDate?: string; // Field name for publish date (default: "publishDate", also checks "pubDate", "date", "createdAt", "created_at") 5 5 coverImage?: string; // Field name for cover image (default: "ogImage") 6 6 tags?: string; // Field name for tags (default: "tags") 7 + draft?: string; // Field name for draft status (default: "draft") 8 + slugField?: string; // Frontmatter field to use for slug (if set, uses frontmatter value; otherwise uses filepath) 9 + } 10 + 11 + // Strong reference for Bluesky post (com.atproto.repo.strongRef) 12 + export interface StrongRef { 13 + uri: string; // at:// URI format 14 + cid: string; // Content ID 15 + } 16 + 17 + // Bluesky posting configuration 18 + export interface BlueskyConfig { 19 + enabled: boolean; 20 + maxAgeDays?: number; // Only post if published within N days (default: 7) 7 21 } 8 22 9 23 export interface PublisherConfig { ··· 18 32 identity?: string; // Which stored identity to use (matches identifier) 19 33 frontmatter?: FrontmatterMapping; // Custom frontmatter field mappings 20 34 ignore?: string[]; // Glob patterns for files to ignore (e.g., ["_index.md", "**/drafts/**"]) 35 + removeIndexFromSlug?: boolean; // Remove "/index" or "/_index" suffix from paths (default: false) 36 + textContentField?: string; // Frontmatter field to use for textContent instead of markdown body 37 + bluesky?: BlueskyConfig; // Optional Bluesky posting configuration 21 38 } 22 39 23 40 export interface Credentials { ··· 33 50 tags?: string[]; 34 51 ogImage?: string; 35 52 atUri?: string; 53 + draft?: boolean; 36 54 } 37 55 38 56 export interface BlogPost { ··· 41 59 frontmatter: PostFrontmatter; 42 60 content: string; 43 61 rawContent: string; 62 + rawFrontmatter: Record<string, unknown>; // For accessing custom fields like textContentField 44 63 } 45 64 46 65 export interface BlobRef { ··· 62 81 contentHash: string; 63 82 atUri?: string; 64 83 lastPublished?: string; 84 + slug?: string; // The generated slug for this post (used by inject command) 85 + bskyPostRef?: StrongRef; // Reference to corresponding Bluesky post 65 86 } 66 87 67 88 export interface PublicationRecord {
+20 -29
packages/cli/tsconfig.json
··· 1 1 { 2 - "compilerOptions": { 3 - // Environment setup & latest features 4 - "lib": ["ESNext"], 5 - "target": "ESNext", 6 - "module": "Preserve", 7 - "moduleDetection": "force", 8 - "jsx": "react-jsx", 9 - "allowJs": true, 10 - 11 - // Bundler mode 12 - "moduleResolution": "bundler", 13 - "allowImportingTsExtensions": true, 14 - "verbatimModuleSyntax": true, 15 - "noEmit": true, 16 - 17 - // Best practices 18 - "strict": true, 19 - "skipLibCheck": true, 20 - "noFallthroughCasesInSwitch": true, 21 - "noUncheckedIndexedAccess": true, 22 - "noImplicitOverride": true, 23 - 24 - // Some stricter flags (disabled by default) 25 - "noUnusedLocals": false, 26 - "noUnusedParameters": false, 27 - "noPropertyAccessFromIndexSignature": false, 28 - "composite": true 29 - }, 30 - "include": ["src"] 2 + "compilerOptions": { 3 + "lib": ["ES2022"], 4 + "target": "ES2022", 5 + "module": "ESNext", 6 + "moduleResolution": "bundler", 7 + "outDir": "./dist", 8 + "rootDir": "./src", 9 + "declaration": true, 10 + "sourceMap": true, 11 + "strict": true, 12 + "skipLibCheck": true, 13 + "esModuleInterop": true, 14 + "resolveJsonModule": true, 15 + "forceConsistentCasingInFileNames": true, 16 + "noFallthroughCasesInSwitch": true, 17 + "noUncheckedIndexedAccess": true, 18 + "noUnusedLocals": false, 19 + "noUnusedParameters": false 20 + }, 21 + "include": ["src"] 31 22 }