+1
.gitignore
+1
.gitignore
-3
.gitmodules
-3
.gitmodules
-1
.tangled/workflows/deploy-wisp.yml
-1
.tangled/workflows/deploy-wisp.yml
+4
.tangled/workflows/test.yml
+4
.tangled/workflows/test.yml
···
14
14
- name: install dependencies
15
15
command: |
16
16
export PATH="$HOME/.nix-profile/bin:$PATH"
17
+
18
+
# have to regenerate otherwise it wont install necessary dependencies to run
19
+
rm -rf bun.lock package-lock.json
20
+
bun install @oven/bun-linux-aarch64
17
21
bun install
18
22
19
23
- name: run all tests
+33
-3
README.md
+33
-3
README.md
···
43
43
44
44
# Hosting service
45
45
cd hosting-service
46
-
cargo run
46
+
npm run start
47
47
48
48
# CLI
49
49
cd cli
50
50
cargo build
51
51
```
52
52
53
+
## Features
54
+
55
+
### URL Redirects and Rewrites
56
+
57
+
The hosting service supports Netlify-style `_redirects` files for managing URLs. Place a `_redirects` file in your site root to enable:
58
+
59
+
- **301/302 Redirects**: Permanent and temporary URL redirects
60
+
- **200 Rewrites**: Serve different content without changing the URL
61
+
- **404 Custom Pages**: Custom error pages for specific paths
62
+
- **Splats & Placeholders**: Dynamic path matching (`/blog/:year/:month/:day`, `/news/*`)
63
+
- **Query Parameter Matching**: Redirect based on URL parameters
64
+
- **Conditional Redirects**: Route by country, language, or cookie presence
65
+
- **Force Redirects**: Override existing files with redirects
66
+
67
+
Example `_redirects`:
68
+
```
69
+
# Single-page app routing (React, Vue, etc.)
70
+
/* /index.html 200
71
+
72
+
# Simple redirects
73
+
/home /
74
+
/old-blog/* /blog/:splat
75
+
76
+
# API proxy
77
+
/api/* https://api.example.com/:splat 200
78
+
79
+
# Country-based routing
80
+
/ /us/ 302 Country=us
81
+
/ /uk/ 302 Country=gb
82
+
```
83
+
53
84
## Limits
54
85
55
86
- Max file size: 100MB (PDS limit)
56
-
- Max site size: 300MB
57
87
- Max files: 2000
58
88
59
89
## Tech Stack
60
90
61
91
- Backend: Bun + Elysia + PostgreSQL
62
92
- Frontend: React 19 + Tailwind 4 + Radix UI
63
-
- Hosting: Rust microservice
93
+
- Hosting: Node microservice using Hono
64
94
- CLI: Rust + Jacquard (AT Protocol library)
65
95
- Protocol: AT Protocol OAuth + custom lexicons
66
96
+627
-67
cli/Cargo.lock
+627
-67
cli/Cargo.lock
···
139
139
140
140
[[package]]
141
141
name = "async-compression"
142
-
version = "0.4.32"
142
+
version = "0.4.33"
143
143
source = "registry+https://github.com/rust-lang/crates.io-index"
144
-
checksum = "5a89bce6054c720275ac2432fbba080a66a2106a44a1b804553930ca6909f4e0"
144
+
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
145
145
dependencies = [
146
146
"compression-codecs",
147
147
"compression-core",
···
158
158
dependencies = [
159
159
"proc-macro2",
160
160
"quote",
161
-
"syn 2.0.108",
161
+
"syn 2.0.110",
162
162
]
163
163
164
164
[[package]]
···
174
174
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
175
175
176
176
[[package]]
177
+
name = "axum"
178
+
version = "0.7.9"
179
+
source = "registry+https://github.com/rust-lang/crates.io-index"
180
+
checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
181
+
dependencies = [
182
+
"async-trait",
183
+
"axum-core",
184
+
"bytes",
185
+
"futures-util",
186
+
"http",
187
+
"http-body",
188
+
"http-body-util",
189
+
"hyper",
190
+
"hyper-util",
191
+
"itoa",
192
+
"matchit",
193
+
"memchr",
194
+
"mime",
195
+
"percent-encoding",
196
+
"pin-project-lite",
197
+
"rustversion",
198
+
"serde",
199
+
"serde_json",
200
+
"serde_path_to_error",
201
+
"serde_urlencoded",
202
+
"sync_wrapper",
203
+
"tokio",
204
+
"tower 0.5.2",
205
+
"tower-layer",
206
+
"tower-service",
207
+
"tracing",
208
+
]
209
+
210
+
[[package]]
211
+
name = "axum-core"
212
+
version = "0.4.5"
213
+
source = "registry+https://github.com/rust-lang/crates.io-index"
214
+
checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199"
215
+
dependencies = [
216
+
"async-trait",
217
+
"bytes",
218
+
"futures-util",
219
+
"http",
220
+
"http-body",
221
+
"http-body-util",
222
+
"mime",
223
+
"pin-project-lite",
224
+
"rustversion",
225
+
"sync_wrapper",
226
+
"tower-layer",
227
+
"tower-service",
228
+
"tracing",
229
+
]
230
+
231
+
[[package]]
177
232
name = "backtrace"
178
233
version = "0.3.76"
179
234
source = "registry+https://github.com/rust-lang/crates.io-index"
···
274
329
"proc-macro2",
275
330
"quote",
276
331
"rustversion",
277
-
"syn 2.0.108",
332
+
"syn 2.0.110",
278
333
]
279
334
280
335
[[package]]
···
348
403
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
349
404
350
405
[[package]]
406
+
name = "byteorder"
407
+
version = "1.5.0"
408
+
source = "registry+https://github.com/rust-lang/crates.io-index"
409
+
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
410
+
411
+
[[package]]
351
412
name = "bytes"
352
413
version = "1.10.1"
353
414
source = "registry+https://github.com/rust-lang/crates.io-index"
···
367
428
368
429
[[package]]
369
430
name = "cc"
370
-
version = "1.2.44"
431
+
version = "1.2.45"
371
432
source = "registry+https://github.com/rust-lang/crates.io-index"
372
-
checksum = "37521ac7aabe3d13122dc382493e20c9416f299d2ccd5b3a5340a2570cdeb0f3"
433
+
checksum = "35900b6c8d709fb1d854671ae27aeaa9eec2f8b01b364e1619a40da3e6fe2afe"
373
434
dependencies = [
374
435
"find-msvc-tools",
375
436
"shlex",
···
494
555
"heck 0.5.0",
495
556
"proc-macro2",
496
557
"quote",
497
-
"syn 2.0.108",
558
+
"syn 2.0.110",
498
559
]
499
560
500
561
[[package]]
···
521
582
522
583
[[package]]
523
584
name = "compression-codecs"
524
-
version = "0.4.31"
585
+
version = "0.4.32"
525
586
source = "registry+https://github.com/rust-lang/crates.io-index"
526
-
checksum = "ef8a506ec4b81c460798f572caead636d57d3d7e940f998160f52bd254bf2d23"
587
+
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
527
588
dependencies = [
528
589
"compression-core",
529
590
"flate2",
···
532
593
533
594
[[package]]
534
595
name = "compression-core"
535
-
version = "0.4.29"
596
+
version = "0.4.30"
536
597
source = "registry+https://github.com/rust-lang/crates.io-index"
537
-
checksum = "e47641d3deaf41fb1538ac1f54735925e275eaf3bf4d55c81b137fba797e5cbb"
598
+
checksum = "3a9b614a5787ef0c8802a55766480563cb3a93b435898c422ed2a359cf811582"
538
599
539
600
[[package]]
540
601
name = "const-oid"
···
549
610
checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3"
550
611
551
612
[[package]]
613
+
name = "cordyceps"
614
+
version = "0.3.4"
615
+
source = "registry+https://github.com/rust-lang/crates.io-index"
616
+
checksum = "688d7fbb8092b8de775ef2536f36c8c31f2bc4006ece2e8d8ad2d17d00ce0a2a"
617
+
dependencies = [
618
+
"loom",
619
+
"tracing",
620
+
]
621
+
622
+
[[package]]
552
623
name = "core-foundation"
553
624
version = "0.9.4"
554
625
source = "registry+https://github.com/rust-lang/crates.io-index"
···
665
736
"proc-macro2",
666
737
"quote",
667
738
"strsim",
668
-
"syn 2.0.108",
739
+
"syn 2.0.110",
669
740
]
670
741
671
742
[[package]]
···
676
747
dependencies = [
677
748
"darling_core",
678
749
"quote",
679
-
"syn 2.0.108",
750
+
"syn 2.0.110",
680
751
]
681
752
682
753
[[package]]
···
716
787
checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976"
717
788
dependencies = [
718
789
"data-encoding",
719
-
"syn 2.0.108",
790
+
"syn 2.0.110",
720
791
]
721
792
722
793
[[package]]
···
751
822
]
752
823
753
824
[[package]]
825
+
name = "derive_more"
826
+
version = "1.0.0"
827
+
source = "registry+https://github.com/rust-lang/crates.io-index"
828
+
checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
829
+
dependencies = [
830
+
"derive_more-impl",
831
+
]
832
+
833
+
[[package]]
834
+
name = "derive_more-impl"
835
+
version = "1.0.0"
836
+
source = "registry+https://github.com/rust-lang/crates.io-index"
837
+
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
838
+
dependencies = [
839
+
"proc-macro2",
840
+
"quote",
841
+
"syn 2.0.110",
842
+
"unicode-xid",
843
+
]
844
+
845
+
[[package]]
846
+
name = "diatomic-waker"
847
+
version = "0.2.3"
848
+
source = "registry+https://github.com/rust-lang/crates.io-index"
849
+
checksum = "ab03c107fafeb3ee9f5925686dbb7a73bc76e3932abb0d2b365cb64b169cf04c"
850
+
851
+
[[package]]
754
852
name = "digest"
755
853
version = "0.10.7"
756
854
source = "registry+https://github.com/rust-lang/crates.io-index"
···
791
889
dependencies = [
792
890
"proc-macro2",
793
891
"quote",
794
-
"syn 2.0.108",
892
+
"syn 2.0.110",
795
893
]
796
894
797
895
[[package]]
···
852
950
"heck 0.5.0",
853
951
"proc-macro2",
854
952
"quote",
855
-
"syn 2.0.108",
953
+
"syn 2.0.110",
856
954
]
857
955
858
956
[[package]]
···
956
1054
]
957
1055
958
1056
[[package]]
1057
+
name = "futures-buffered"
1058
+
version = "0.2.12"
1059
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1060
+
checksum = "a8e0e1f38ec07ba4abbde21eed377082f17ccb988be9d988a5adbf4bafc118fd"
1061
+
dependencies = [
1062
+
"cordyceps",
1063
+
"diatomic-waker",
1064
+
"futures-core",
1065
+
"pin-project-lite",
1066
+
"spin 0.10.0",
1067
+
]
1068
+
1069
+
[[package]]
959
1070
name = "futures-channel"
960
1071
version = "0.3.31"
961
1072
source = "registry+https://github.com/rust-lang/crates.io-index"
···
989
1100
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
990
1101
991
1102
[[package]]
1103
+
name = "futures-lite"
1104
+
version = "2.6.1"
1105
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1106
+
checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad"
1107
+
dependencies = [
1108
+
"fastrand",
1109
+
"futures-core",
1110
+
"futures-io",
1111
+
"parking",
1112
+
"pin-project-lite",
1113
+
]
1114
+
1115
+
[[package]]
992
1116
name = "futures-macro"
993
1117
version = "0.3.31"
994
1118
source = "registry+https://github.com/rust-lang/crates.io-index"
···
996
1120
dependencies = [
997
1121
"proc-macro2",
998
1122
"quote",
999
-
"syn 2.0.108",
1123
+
"syn 2.0.110",
1000
1124
]
1001
1125
1002
1126
[[package]]
···
1027
1151
"pin-project-lite",
1028
1152
"pin-utils",
1029
1153
"slab",
1154
+
]
1155
+
1156
+
[[package]]
1157
+
name = "generator"
1158
+
version = "0.8.7"
1159
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1160
+
checksum = "605183a538e3e2a9c1038635cc5c2d194e2ee8fd0d1b66b8349fad7dbacce5a2"
1161
+
dependencies = [
1162
+
"cc",
1163
+
"cfg-if",
1164
+
"libc",
1165
+
"log",
1166
+
"rustversion",
1167
+
"windows",
1030
1168
]
1031
1169
1032
1170
[[package]]
···
1236
1374
"markup5ever",
1237
1375
"proc-macro2",
1238
1376
"quote",
1239
-
"syn 2.0.108",
1377
+
"syn 2.0.110",
1240
1378
]
1241
1379
1242
1380
[[package]]
···
1274
1412
]
1275
1413
1276
1414
[[package]]
1415
+
name = "http-range-header"
1416
+
version = "0.4.2"
1417
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1418
+
checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c"
1419
+
1420
+
[[package]]
1277
1421
name = "httparse"
1278
1422
version = "1.10.1"
1279
1423
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1287
1431
1288
1432
[[package]]
1289
1433
name = "hyper"
1290
-
version = "1.7.0"
1434
+
version = "1.8.0"
1291
1435
source = "registry+https://github.com/rust-lang/crates.io-index"
1292
-
checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e"
1436
+
checksum = "1744436df46f0bde35af3eda22aeaba453aada65d8f1c171cd8a5f59030bd69f"
1293
1437
dependencies = [
1294
1438
"atomic-waker",
1295
1439
"bytes",
···
1299
1443
"http",
1300
1444
"http-body",
1301
1445
"httparse",
1446
+
"httpdate",
1302
1447
"itoa",
1303
1448
"pin-project-lite",
1304
1449
"pin-utils",
···
1362
1507
"js-sys",
1363
1508
"log",
1364
1509
"wasm-bindgen",
1365
-
"windows-core",
1510
+
"windows-core 0.62.2",
1366
1511
]
1367
1512
1368
1513
[[package]]
···
1554
1699
1555
1700
[[package]]
1556
1701
name = "iri-string"
1557
-
version = "0.7.8"
1702
+
version = "0.7.9"
1558
1703
source = "registry+https://github.com/rust-lang/crates.io-index"
1559
-
checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2"
1704
+
checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397"
1560
1705
dependencies = [
1561
1706
"memchr",
1562
1707
"serde",
···
1583
1728
[[package]]
1584
1729
name = "jacquard"
1585
1730
version = "0.9.0"
1731
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1586
1732
dependencies = [
1587
1733
"bytes",
1588
1734
"getrandom 0.2.16",
···
1610
1756
[[package]]
1611
1757
name = "jacquard-api"
1612
1758
version = "0.9.0"
1759
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1613
1760
dependencies = [
1614
1761
"bon",
1615
1762
"bytes",
···
1627
1774
[[package]]
1628
1775
name = "jacquard-common"
1629
1776
version = "0.9.0"
1777
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1630
1778
dependencies = [
1631
1779
"base64 0.22.1",
1632
1780
"bon",
1633
1781
"bytes",
1634
1782
"chrono",
1783
+
"ciborium",
1635
1784
"cid",
1785
+
"futures",
1636
1786
"getrandom 0.2.16",
1637
1787
"getrandom 0.3.4",
1638
1788
"http",
···
1642
1792
"miette",
1643
1793
"multibase",
1644
1794
"multihash",
1795
+
"n0-future",
1645
1796
"ouroboros",
1646
1797
"p256",
1647
1798
"rand 0.9.2",
···
1655
1806
"smol_str",
1656
1807
"thiserror 2.0.17",
1657
1808
"tokio",
1809
+
"tokio-tungstenite-wasm",
1658
1810
"tokio-util",
1659
1811
"trait-variant",
1660
1812
"url",
···
1663
1815
[[package]]
1664
1816
name = "jacquard-derive"
1665
1817
version = "0.9.0"
1818
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1666
1819
dependencies = [
1667
1820
"heck 0.5.0",
1668
1821
"jacquard-lexicon",
1669
1822
"proc-macro2",
1670
1823
"quote",
1671
-
"syn 2.0.108",
1824
+
"syn 2.0.110",
1672
1825
]
1673
1826
1674
1827
[[package]]
1675
1828
name = "jacquard-identity"
1676
-
version = "0.9.0"
1829
+
version = "0.9.1"
1830
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1677
1831
dependencies = [
1678
1832
"bon",
1679
1833
"bytes",
···
1698
1852
1699
1853
[[package]]
1700
1854
name = "jacquard-lexicon"
1701
-
version = "0.9.0"
1855
+
version = "0.9.1"
1856
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1702
1857
dependencies = [
1703
1858
"cid",
1704
1859
"dashmap",
···
1716
1871
"serde_repr",
1717
1872
"serde_with",
1718
1873
"sha2",
1719
-
"syn 2.0.108",
1874
+
"syn 2.0.110",
1720
1875
"thiserror 2.0.17",
1721
1876
"unicode-segmentation",
1722
1877
]
···
1724
1879
[[package]]
1725
1880
name = "jacquard-oauth"
1726
1881
version = "0.9.0"
1882
+
source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65"
1727
1883
dependencies = [
1728
1884
"base64 0.22.1",
1729
1885
"bytes",
···
1849
2005
source = "registry+https://github.com/rust-lang/crates.io-index"
1850
2006
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
1851
2007
dependencies = [
1852
-
"spin",
2008
+
"spin 0.9.8",
1853
2009
]
1854
2010
1855
2011
[[package]]
···
1909
2065
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
1910
2066
1911
2067
[[package]]
2068
+
name = "loom"
2069
+
version = "0.7.2"
2070
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2071
+
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
2072
+
dependencies = [
2073
+
"cfg-if",
2074
+
"generator",
2075
+
"scoped-tls",
2076
+
"tracing",
2077
+
"tracing-subscriber",
2078
+
]
2079
+
2080
+
[[package]]
1912
2081
name = "lru-cache"
1913
2082
version = "0.1.2"
1914
2083
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1965
2134
"quote",
1966
2135
"syn 1.0.109",
1967
2136
]
2137
+
2138
+
[[package]]
2139
+
name = "matchers"
2140
+
version = "0.2.0"
2141
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2142
+
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
2143
+
dependencies = [
2144
+
"regex-automata",
2145
+
]
2146
+
2147
+
[[package]]
2148
+
name = "matchit"
2149
+
version = "0.7.3"
2150
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2151
+
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
1968
2152
1969
2153
[[package]]
1970
2154
name = "memchr"
···
1999
2183
dependencies = [
2000
2184
"proc-macro2",
2001
2185
"quote",
2002
-
"syn 2.0.108",
2186
+
"syn 2.0.110",
2003
2187
]
2004
2188
2005
2189
[[package]]
···
2101
2285
]
2102
2286
2103
2287
[[package]]
2288
+
name = "n0-future"
2289
+
version = "0.1.3"
2290
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2291
+
checksum = "7bb0e5d99e681ab3c938842b96fcb41bf8a7bb4bfdb11ccbd653a7e83e06c794"
2292
+
dependencies = [
2293
+
"cfg_aliases",
2294
+
"derive_more",
2295
+
"futures-buffered",
2296
+
"futures-lite",
2297
+
"futures-util",
2298
+
"js-sys",
2299
+
"pin-project",
2300
+
"send_wrapper",
2301
+
"tokio",
2302
+
"tokio-util",
2303
+
"wasm-bindgen",
2304
+
"wasm-bindgen-futures",
2305
+
"web-time",
2306
+
]
2307
+
2308
+
[[package]]
2104
2309
name = "ndk-context"
2105
2310
version = "0.1.1"
2106
2311
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2123
2328
]
2124
2329
2125
2330
[[package]]
2331
+
name = "nu-ansi-term"
2332
+
version = "0.50.3"
2333
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2334
+
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
2335
+
dependencies = [
2336
+
"windows-sys 0.61.2",
2337
+
]
2338
+
2339
+
[[package]]
2126
2340
name = "num-bigint-dig"
2127
-
version = "0.8.5"
2341
+
version = "0.8.6"
2128
2342
source = "registry+https://github.com/rust-lang/crates.io-index"
2129
-
checksum = "82c79c15c05d4bf82b6f5ef163104cc81a760d8e874d38ac50ab67c8877b647b"
2343
+
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
2130
2344
dependencies = [
2131
2345
"lazy_static",
2132
2346
"libm",
···
2240
2454
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
2241
2455
2242
2456
[[package]]
2457
+
name = "openssl-probe"
2458
+
version = "0.1.6"
2459
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2460
+
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
2461
+
2462
+
[[package]]
2243
2463
name = "option-ext"
2244
2464
version = "0.2.0"
2245
2465
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2266
2486
"proc-macro2",
2267
2487
"proc-macro2-diagnostics",
2268
2488
"quote",
2269
-
"syn 2.0.108",
2489
+
"syn 2.0.110",
2270
2490
]
2271
2491
2272
2492
[[package]]
···
2296
2516
"elliptic-curve",
2297
2517
"primeorder",
2298
2518
]
2519
+
2520
+
[[package]]
2521
+
name = "parking"
2522
+
version = "2.2.1"
2523
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2524
+
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
2299
2525
2300
2526
[[package]]
2301
2527
name = "parking_lot"
···
2374
2600
]
2375
2601
2376
2602
[[package]]
2603
+
name = "pin-project"
2604
+
version = "1.1.10"
2605
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2606
+
checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
2607
+
dependencies = [
2608
+
"pin-project-internal",
2609
+
]
2610
+
2611
+
[[package]]
2612
+
name = "pin-project-internal"
2613
+
version = "1.1.10"
2614
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2615
+
checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
2616
+
dependencies = [
2617
+
"proc-macro2",
2618
+
"quote",
2619
+
"syn 2.0.110",
2620
+
]
2621
+
2622
+
[[package]]
2377
2623
name = "pin-project-lite"
2378
2624
version = "0.2.16"
2379
2625
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2443
2689
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
2444
2690
dependencies = [
2445
2691
"proc-macro2",
2446
-
"syn 2.0.108",
2692
+
"syn 2.0.110",
2447
2693
]
2448
2694
2449
2695
[[package]]
···
2496
2742
dependencies = [
2497
2743
"proc-macro2",
2498
2744
"quote",
2499
-
"syn 2.0.108",
2745
+
"syn 2.0.110",
2500
2746
"version_check",
2501
2747
"yansi",
2502
2748
]
···
2564
2810
2565
2811
[[package]]
2566
2812
name = "quote"
2567
-
version = "1.0.41"
2813
+
version = "1.0.42"
2568
2814
source = "registry+https://github.com/rust-lang/crates.io-index"
2569
-
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
2815
+
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
2570
2816
dependencies = [
2571
2817
"proc-macro2",
2572
2818
]
···
2679
2925
dependencies = [
2680
2926
"proc-macro2",
2681
2927
"quote",
2682
-
"syn 2.0.108",
2928
+
"syn 2.0.110",
2683
2929
]
2684
2930
2685
2931
[[package]]
···
2745
2991
"tokio",
2746
2992
"tokio-rustls",
2747
2993
"tokio-util",
2748
-
"tower",
2749
-
"tower-http",
2994
+
"tower 0.5.2",
2995
+
"tower-http 0.6.6",
2750
2996
"tower-service",
2751
2997
"url",
2752
2998
"wasm-bindgen",
···
2857
3103
2858
3104
[[package]]
2859
3105
name = "rustls"
2860
-
version = "0.23.34"
3106
+
version = "0.23.35"
2861
3107
source = "registry+https://github.com/rust-lang/crates.io-index"
2862
-
checksum = "6a9586e9ee2b4f8fab52a0048ca7334d7024eef48e2cb9407e3497bb7cab7fa7"
3108
+
checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f"
2863
3109
dependencies = [
2864
3110
"once_cell",
2865
3111
"ring",
···
2867
3113
"rustls-webpki",
2868
3114
"subtle",
2869
3115
"zeroize",
3116
+
]
3117
+
3118
+
[[package]]
3119
+
name = "rustls-native-certs"
3120
+
version = "0.8.2"
3121
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3122
+
checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923"
3123
+
dependencies = [
3124
+
"openssl-probe",
3125
+
"rustls-pki-types",
3126
+
"schannel",
3127
+
"security-framework",
2870
3128
]
2871
3129
2872
3130
[[package]]
···
2918
3176
]
2919
3177
2920
3178
[[package]]
3179
+
name = "schannel"
3180
+
version = "0.1.28"
3181
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3182
+
checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1"
3183
+
dependencies = [
3184
+
"windows-sys 0.61.2",
3185
+
]
3186
+
3187
+
[[package]]
2921
3188
name = "schemars"
2922
3189
version = "0.9.0"
2923
3190
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2931
3198
2932
3199
[[package]]
2933
3200
name = "schemars"
2934
-
version = "1.0.4"
3201
+
version = "1.1.0"
2935
3202
source = "registry+https://github.com/rust-lang/crates.io-index"
2936
-
checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
3203
+
checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289"
2937
3204
dependencies = [
2938
3205
"dyn-clone",
2939
3206
"ref-cast",
···
2942
3209
]
2943
3210
2944
3211
[[package]]
3212
+
name = "scoped-tls"
3213
+
version = "1.0.1"
3214
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3215
+
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
3216
+
3217
+
[[package]]
2945
3218
name = "scopeguard"
2946
3219
version = "1.2.0"
2947
3220
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2962
3235
]
2963
3236
2964
3237
[[package]]
3238
+
name = "security-framework"
3239
+
version = "3.5.1"
3240
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3241
+
checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef"
3242
+
dependencies = [
3243
+
"bitflags",
3244
+
"core-foundation 0.10.1",
3245
+
"core-foundation-sys",
3246
+
"libc",
3247
+
"security-framework-sys",
3248
+
]
3249
+
3250
+
[[package]]
3251
+
name = "security-framework-sys"
3252
+
version = "2.15.0"
3253
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3254
+
checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0"
3255
+
dependencies = [
3256
+
"core-foundation-sys",
3257
+
"libc",
3258
+
]
3259
+
3260
+
[[package]]
3261
+
name = "send_wrapper"
3262
+
version = "0.6.0"
3263
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3264
+
checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73"
3265
+
3266
+
[[package]]
2965
3267
name = "serde"
2966
3268
version = "1.0.228"
2967
3269
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2998
3300
dependencies = [
2999
3301
"proc-macro2",
3000
3302
"quote",
3001
-
"syn 2.0.108",
3303
+
"syn 2.0.110",
3002
3304
]
3003
3305
3004
3306
[[package]]
···
3040
3342
]
3041
3343
3042
3344
[[package]]
3345
+
name = "serde_path_to_error"
3346
+
version = "0.1.20"
3347
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3348
+
checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
3349
+
dependencies = [
3350
+
"itoa",
3351
+
"serde",
3352
+
"serde_core",
3353
+
]
3354
+
3355
+
[[package]]
3043
3356
name = "serde_repr"
3044
3357
version = "0.1.20"
3045
3358
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3047
3360
dependencies = [
3048
3361
"proc-macro2",
3049
3362
"quote",
3050
-
"syn 2.0.108",
3363
+
"syn 2.0.110",
3051
3364
]
3052
3365
3053
3366
[[package]]
···
3074
3387
"indexmap 1.9.3",
3075
3388
"indexmap 2.12.0",
3076
3389
"schemars 0.9.0",
3077
-
"schemars 1.0.4",
3390
+
"schemars 1.1.0",
3078
3391
"serde_core",
3079
3392
"serde_json",
3080
3393
"serde_with_macros",
···
3090
3403
"darling",
3091
3404
"proc-macro2",
3092
3405
"quote",
3093
-
"syn 2.0.108",
3406
+
"syn 2.0.110",
3407
+
]
3408
+
3409
+
[[package]]
3410
+
name = "sha1"
3411
+
version = "0.10.6"
3412
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3413
+
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
3414
+
dependencies = [
3415
+
"cfg-if",
3416
+
"cpufeatures",
3417
+
"digest",
3094
3418
]
3095
3419
3096
3420
[[package]]
···
3108
3432
"cfg-if",
3109
3433
"cpufeatures",
3110
3434
"digest",
3435
+
]
3436
+
3437
+
[[package]]
3438
+
name = "sharded-slab"
3439
+
version = "0.1.7"
3440
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3441
+
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
3442
+
dependencies = [
3443
+
"lazy_static",
3111
3444
]
3112
3445
3113
3446
[[package]]
···
3205
3538
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
3206
3539
3207
3540
[[package]]
3541
+
name = "spin"
3542
+
version = "0.10.0"
3543
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3544
+
checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591"
3545
+
3546
+
[[package]]
3208
3547
name = "spki"
3209
3548
version = "0.7.3"
3210
3549
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3236
3575
"quote",
3237
3576
"serde",
3238
3577
"sha2",
3239
-
"syn 2.0.108",
3578
+
"syn 2.0.110",
3240
3579
"thiserror 1.0.69",
3241
3580
]
3242
3581
···
3317
3656
3318
3657
[[package]]
3319
3658
name = "syn"
3320
-
version = "2.0.108"
3659
+
version = "2.0.110"
3321
3660
source = "registry+https://github.com/rust-lang/crates.io-index"
3322
-
checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917"
3661
+
checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea"
3323
3662
dependencies = [
3324
3663
"proc-macro2",
3325
3664
"quote",
···
3343
3682
dependencies = [
3344
3683
"proc-macro2",
3345
3684
"quote",
3346
-
"syn 2.0.108",
3685
+
"syn 2.0.110",
3347
3686
]
3348
3687
3349
3688
[[package]]
···
3443
3782
dependencies = [
3444
3783
"proc-macro2",
3445
3784
"quote",
3446
-
"syn 2.0.108",
3785
+
"syn 2.0.110",
3447
3786
]
3448
3787
3449
3788
[[package]]
···
3454
3793
dependencies = [
3455
3794
"proc-macro2",
3456
3795
"quote",
3457
-
"syn 2.0.108",
3796
+
"syn 2.0.110",
3797
+
]
3798
+
3799
+
[[package]]
3800
+
name = "thread_local"
3801
+
version = "1.1.9"
3802
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3803
+
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
3804
+
dependencies = [
3805
+
"cfg-if",
3458
3806
]
3459
3807
3460
3808
[[package]]
···
3561
3909
dependencies = [
3562
3910
"proc-macro2",
3563
3911
"quote",
3564
-
"syn 2.0.108",
3912
+
"syn 2.0.110",
3565
3913
]
3566
3914
3567
3915
[[package]]
···
3575
3923
]
3576
3924
3577
3925
[[package]]
3926
+
name = "tokio-tungstenite"
3927
+
version = "0.24.0"
3928
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3929
+
checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9"
3930
+
dependencies = [
3931
+
"futures-util",
3932
+
"log",
3933
+
"rustls",
3934
+
"rustls-native-certs",
3935
+
"rustls-pki-types",
3936
+
"tokio",
3937
+
"tokio-rustls",
3938
+
"tungstenite",
3939
+
]
3940
+
3941
+
[[package]]
3942
+
name = "tokio-tungstenite-wasm"
3943
+
version = "0.4.0"
3944
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3945
+
checksum = "e21a5c399399c3db9f08d8297ac12b500e86bca82e930253fdc62eaf9c0de6ae"
3946
+
dependencies = [
3947
+
"futures-channel",
3948
+
"futures-util",
3949
+
"http",
3950
+
"httparse",
3951
+
"js-sys",
3952
+
"rustls",
3953
+
"thiserror 1.0.69",
3954
+
"tokio",
3955
+
"tokio-tungstenite",
3956
+
"wasm-bindgen",
3957
+
"web-sys",
3958
+
]
3959
+
3960
+
[[package]]
3578
3961
name = "tokio-util"
3579
-
version = "0.7.16"
3962
+
version = "0.7.17"
3580
3963
source = "registry+https://github.com/rust-lang/crates.io-index"
3581
-
checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5"
3964
+
checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
3582
3965
dependencies = [
3583
3966
"bytes",
3584
3967
"futures-core",
3585
3968
"futures-sink",
3969
+
"futures-util",
3586
3970
"pin-project-lite",
3587
3971
"tokio",
3588
3972
]
3589
3973
3590
3974
[[package]]
3591
3975
name = "tower"
3976
+
version = "0.4.13"
3977
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3978
+
checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
3979
+
dependencies = [
3980
+
"tower-layer",
3981
+
"tower-service",
3982
+
"tracing",
3983
+
]
3984
+
3985
+
[[package]]
3986
+
name = "tower"
3592
3987
version = "0.5.2"
3593
3988
source = "registry+https://github.com/rust-lang/crates.io-index"
3594
3989
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
···
3600
3995
"tokio",
3601
3996
"tower-layer",
3602
3997
"tower-service",
3998
+
"tracing",
3999
+
]
4000
+
4001
+
[[package]]
4002
+
name = "tower-http"
4003
+
version = "0.5.2"
4004
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4005
+
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
4006
+
dependencies = [
4007
+
"async-compression",
4008
+
"bitflags",
4009
+
"bytes",
4010
+
"futures-core",
4011
+
"futures-util",
4012
+
"http",
4013
+
"http-body",
4014
+
"http-body-util",
4015
+
"http-range-header",
4016
+
"httpdate",
4017
+
"mime",
4018
+
"mime_guess",
4019
+
"percent-encoding",
4020
+
"pin-project-lite",
4021
+
"tokio",
4022
+
"tokio-util",
4023
+
"tower-layer",
4024
+
"tower-service",
4025
+
"tracing",
3603
4026
]
3604
4027
3605
4028
[[package]]
···
3615
4038
"http-body",
3616
4039
"iri-string",
3617
4040
"pin-project-lite",
3618
-
"tower",
4041
+
"tower 0.5.2",
3619
4042
"tower-layer",
3620
4043
"tower-service",
3621
4044
]
···
3638
4061
source = "registry+https://github.com/rust-lang/crates.io-index"
3639
4062
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
3640
4063
dependencies = [
4064
+
"log",
3641
4065
"pin-project-lite",
3642
4066
"tracing-attributes",
3643
4067
"tracing-core",
···
3651
4075
dependencies = [
3652
4076
"proc-macro2",
3653
4077
"quote",
3654
-
"syn 2.0.108",
4078
+
"syn 2.0.110",
3655
4079
]
3656
4080
3657
4081
[[package]]
···
3661
4085
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
3662
4086
dependencies = [
3663
4087
"once_cell",
4088
+
"valuable",
4089
+
]
4090
+
4091
+
[[package]]
4092
+
name = "tracing-log"
4093
+
version = "0.2.0"
4094
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4095
+
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
4096
+
dependencies = [
4097
+
"log",
4098
+
"once_cell",
4099
+
"tracing-core",
4100
+
]
4101
+
4102
+
[[package]]
4103
+
name = "tracing-subscriber"
4104
+
version = "0.3.20"
4105
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4106
+
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
4107
+
dependencies = [
4108
+
"matchers",
4109
+
"nu-ansi-term",
4110
+
"once_cell",
4111
+
"regex-automata",
4112
+
"sharded-slab",
4113
+
"smallvec",
4114
+
"thread_local",
4115
+
"tracing",
4116
+
"tracing-core",
4117
+
"tracing-log",
3664
4118
]
3665
4119
3666
4120
[[package]]
···
3671
4125
dependencies = [
3672
4126
"proc-macro2",
3673
4127
"quote",
3674
-
"syn 2.0.108",
4128
+
"syn 2.0.110",
3675
4129
]
3676
4130
3677
4131
[[package]]
···
3687
4141
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
3688
4142
3689
4143
[[package]]
4144
+
name = "tungstenite"
4145
+
version = "0.24.0"
4146
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4147
+
checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a"
4148
+
dependencies = [
4149
+
"byteorder",
4150
+
"bytes",
4151
+
"data-encoding",
4152
+
"http",
4153
+
"httparse",
4154
+
"log",
4155
+
"rand 0.8.5",
4156
+
"rustls",
4157
+
"rustls-pki-types",
4158
+
"sha1",
4159
+
"thiserror 1.0.69",
4160
+
"utf-8",
4161
+
]
4162
+
4163
+
[[package]]
3690
4164
name = "twoway"
3691
4165
version = "0.1.8"
3692
4166
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3736
4210
version = "0.2.2"
3737
4211
source = "registry+https://github.com/rust-lang/crates.io-index"
3738
4212
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
4213
+
4214
+
[[package]]
4215
+
name = "unicode-xid"
4216
+
version = "0.2.6"
4217
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4218
+
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
3739
4219
3740
4220
[[package]]
3741
4221
name = "unsigned-varint"
···
3786
4266
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
3787
4267
3788
4268
[[package]]
4269
+
name = "valuable"
4270
+
version = "0.1.1"
4271
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4272
+
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
4273
+
4274
+
[[package]]
3789
4275
name = "version_check"
3790
4276
version = "0.9.5"
3791
4277
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3870
4356
"bumpalo",
3871
4357
"proc-macro2",
3872
4358
"quote",
3873
-
"syn 2.0.108",
4359
+
"syn 2.0.110",
3874
4360
"wasm-bindgen-shared",
3875
4361
]
3876
4362
···
3969
4455
]
3970
4456
3971
4457
[[package]]
4458
+
name = "windows"
4459
+
version = "0.61.3"
4460
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4461
+
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
4462
+
dependencies = [
4463
+
"windows-collections",
4464
+
"windows-core 0.61.2",
4465
+
"windows-future",
4466
+
"windows-link 0.1.3",
4467
+
"windows-numerics",
4468
+
]
4469
+
4470
+
[[package]]
4471
+
name = "windows-collections"
4472
+
version = "0.2.0"
4473
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4474
+
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
4475
+
dependencies = [
4476
+
"windows-core 0.61.2",
4477
+
]
4478
+
4479
+
[[package]]
4480
+
name = "windows-core"
4481
+
version = "0.61.2"
4482
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4483
+
checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
4484
+
dependencies = [
4485
+
"windows-implement",
4486
+
"windows-interface",
4487
+
"windows-link 0.1.3",
4488
+
"windows-result 0.3.4",
4489
+
"windows-strings 0.4.2",
4490
+
]
4491
+
4492
+
[[package]]
3972
4493
name = "windows-core"
3973
4494
version = "0.62.2"
3974
4495
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3982
4503
]
3983
4504
3984
4505
[[package]]
4506
+
name = "windows-future"
4507
+
version = "0.2.1"
4508
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4509
+
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
4510
+
dependencies = [
4511
+
"windows-core 0.61.2",
4512
+
"windows-link 0.1.3",
4513
+
"windows-threading",
4514
+
]
4515
+
4516
+
[[package]]
3985
4517
name = "windows-implement"
3986
4518
version = "0.60.2"
3987
4519
source = "registry+https://github.com/rust-lang/crates.io-index"
···
3989
4521
dependencies = [
3990
4522
"proc-macro2",
3991
4523
"quote",
3992
-
"syn 2.0.108",
4524
+
"syn 2.0.110",
3993
4525
]
3994
4526
3995
4527
[[package]]
···
4000
4532
dependencies = [
4001
4533
"proc-macro2",
4002
4534
"quote",
4003
-
"syn 2.0.108",
4535
+
"syn 2.0.110",
4004
4536
]
4005
4537
4006
4538
[[package]]
···
4014
4546
version = "0.2.1"
4015
4547
source = "registry+https://github.com/rust-lang/crates.io-index"
4016
4548
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
4549
+
4550
+
[[package]]
4551
+
name = "windows-numerics"
4552
+
version = "0.2.0"
4553
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4554
+
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
4555
+
dependencies = [
4556
+
"windows-core 0.61.2",
4557
+
"windows-link 0.1.3",
4558
+
]
4017
4559
4018
4560
[[package]]
4019
4561
name = "windows-registry"
···
4171
4713
]
4172
4714
4173
4715
[[package]]
4716
+
name = "windows-threading"
4717
+
version = "0.1.0"
4718
+
source = "registry+https://github.com/rust-lang/crates.io-index"
4719
+
checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
4720
+
dependencies = [
4721
+
"windows-link 0.1.3",
4722
+
]
4723
+
4724
+
[[package]]
4174
4725
name = "windows_aarch64_gnullvm"
4175
4726
version = "0.42.2"
4176
4727
source = "registry+https://github.com/rust-lang/crates.io-index"
···
4362
4913
4363
4914
[[package]]
4364
4915
name = "wisp-cli"
4365
-
version = "0.1.0"
4916
+
version = "0.2.0"
4366
4917
dependencies = [
4918
+
"axum",
4367
4919
"base64 0.22.1",
4368
4920
"bytes",
4921
+
"chrono",
4369
4922
"clap",
4370
4923
"flate2",
4371
4924
"futures",
···
4378
4931
"jacquard-oauth",
4379
4932
"miette",
4380
4933
"mime_guess",
4934
+
"multibase",
4935
+
"multihash",
4936
+
"n0-future",
4381
4937
"reqwest",
4382
4938
"rustversion",
4383
4939
"serde",
4384
4940
"serde_json",
4941
+
"sha2",
4385
4942
"shellexpand",
4386
4943
"tokio",
4944
+
"tower 0.4.13",
4945
+
"tower-http 0.5.2",
4946
+
"url",
4387
4947
"walkdir",
4388
4948
]
4389
4949
···
4435
4995
dependencies = [
4436
4996
"proc-macro2",
4437
4997
"quote",
4438
-
"syn 2.0.108",
4998
+
"syn 2.0.110",
4439
4999
"synstructure",
4440
5000
]
4441
5001
···
4456
5016
dependencies = [
4457
5017
"proc-macro2",
4458
5018
"quote",
4459
-
"syn 2.0.108",
5019
+
"syn 2.0.110",
4460
5020
]
4461
5021
4462
5022
[[package]]
···
4476
5036
dependencies = [
4477
5037
"proc-macro2",
4478
5038
"quote",
4479
-
"syn 2.0.108",
5039
+
"syn 2.0.110",
4480
5040
"synstructure",
4481
5041
]
4482
5042
···
4519
5079
dependencies = [
4520
5080
"proc-macro2",
4521
5081
"quote",
4522
-
"syn 2.0.108",
5082
+
"syn 2.0.110",
4523
5083
]
+17
-8
cli/Cargo.toml
+17
-8
cli/Cargo.toml
···
1
1
[package]
2
2
name = "wisp-cli"
3
-
version = "0.1.0"
3
+
version = "0.2.0"
4
4
edition = "2024"
5
5
6
6
[features]
···
8
8
place_wisp = []
9
9
10
10
[dependencies]
11
-
jacquard = { path = "jacquard/crates/jacquard", features = ["loopback"] }
12
-
jacquard-oauth = { path = "jacquard/crates/jacquard-oauth" }
13
-
jacquard-api = { path = "jacquard/crates/jacquard-api" }
14
-
jacquard-common = { path = "jacquard/crates/jacquard-common" }
15
-
jacquard-identity = { path = "jacquard/crates/jacquard-identity", features = ["dns"] }
16
-
jacquard-derive = { path = "jacquard/crates/jacquard-derive" }
17
-
jacquard-lexicon = { path = "jacquard/crates/jacquard-lexicon" }
11
+
jacquard = { git = "https://tangled.org/@nonbinary.computer/jacquard", features = ["loopback"] }
12
+
jacquard-oauth = { git = "https://tangled.org/@nonbinary.computer/jacquard" }
13
+
jacquard-api = { git = "https://tangled.org/@nonbinary.computer/jacquard" }
14
+
jacquard-common = { git = "https://tangled.org/@nonbinary.computer/jacquard", features = ["websocket"] }
15
+
jacquard-identity = { git = "https://tangled.org/@nonbinary.computer/jacquard", features = ["dns"] }
16
+
jacquard-derive = { git = "https://tangled.org/@nonbinary.computer/jacquard" }
17
+
jacquard-lexicon = { git = "https://tangled.org/@nonbinary.computer/jacquard" }
18
18
clap = { version = "4.5.51", features = ["derive"] }
19
19
tokio = { version = "1.48", features = ["full"] }
20
20
miette = { version = "7.6.0", features = ["fancy"] }
···
30
30
mime_guess = "2.0"
31
31
bytes = "1.10"
32
32
futures = "0.3.31"
33
+
multihash = "0.19.3"
34
+
multibase = "0.9"
35
+
sha2 = "0.10"
36
+
axum = "0.7"
37
+
tower-http = { version = "0.5", features = ["fs", "compression-gzip"] }
38
+
tower = "0.4"
39
+
n0-future = "0.1"
40
+
chrono = "0.4"
41
+
url = "2.5"
+85
cli/src/blob_map.rs
+85
cli/src/blob_map.rs
···
1
+
use jacquard_common::types::blob::BlobRef;
2
+
use jacquard_common::IntoStatic;
3
+
use std::collections::HashMap;
4
+
5
+
use crate::place_wisp::fs::{Directory, EntryNode};
6
+
7
+
/// Extract blob information from a directory tree
8
+
/// Returns a map of file paths to their blob refs and CIDs
9
+
///
10
+
/// This mirrors the TypeScript implementation in src/lib/wisp-utils.ts lines 275-302
11
+
pub fn extract_blob_map(
12
+
directory: &Directory,
13
+
) -> HashMap<String, (BlobRef<'static>, String)> {
14
+
extract_blob_map_recursive(directory, String::new())
15
+
}
16
+
17
+
fn extract_blob_map_recursive(
18
+
directory: &Directory,
19
+
current_path: String,
20
+
) -> HashMap<String, (BlobRef<'static>, String)> {
21
+
let mut blob_map = HashMap::new();
22
+
23
+
for entry in &directory.entries {
24
+
let full_path = if current_path.is_empty() {
25
+
entry.name.to_string()
26
+
} else {
27
+
format!("{}/{}", current_path, entry.name)
28
+
};
29
+
30
+
match &entry.node {
31
+
EntryNode::File(file_node) => {
32
+
// Extract CID from blob ref
33
+
// BlobRef is an enum with Blob variant, which has a ref field (CidLink)
34
+
let blob_ref = &file_node.blob;
35
+
let cid_string = blob_ref.blob().r#ref.to_string();
36
+
37
+
// Store with full path (mirrors TypeScript implementation)
38
+
blob_map.insert(
39
+
full_path,
40
+
(blob_ref.clone().into_static(), cid_string)
41
+
);
42
+
}
43
+
EntryNode::Directory(subdir) => {
44
+
let sub_map = extract_blob_map_recursive(subdir, full_path);
45
+
blob_map.extend(sub_map);
46
+
}
47
+
EntryNode::Unknown(_) => {
48
+
// Skip unknown node types
49
+
}
50
+
}
51
+
}
52
+
53
+
blob_map
54
+
}
55
+
56
+
/// Normalize file path by removing base folder prefix
57
+
/// Example: "cobblemon/index.html" -> "index.html"
58
+
///
59
+
/// Note: This function is kept for reference but is no longer used in production code.
60
+
/// The TypeScript server has a similar normalization (src/routes/wisp.ts line 291) to handle
61
+
/// uploads that include a base folder prefix, but our CLI doesn't need this since we
62
+
/// track full paths consistently.
63
+
#[allow(dead_code)]
64
+
pub fn normalize_path(path: &str) -> String {
65
+
// Remove base folder prefix (everything before first /)
66
+
if let Some(idx) = path.find('/') {
67
+
path[idx + 1..].to_string()
68
+
} else {
69
+
path.to_string()
70
+
}
71
+
}
72
+
73
+
#[cfg(test)]
74
+
mod tests {
75
+
use super::*;
76
+
77
+
#[test]
78
+
fn test_normalize_path() {
79
+
assert_eq!(normalize_path("index.html"), "index.html");
80
+
assert_eq!(normalize_path("cobblemon/index.html"), "index.html");
81
+
assert_eq!(normalize_path("folder/subfolder/file.txt"), "subfolder/file.txt");
82
+
assert_eq!(normalize_path("a/b/c/d.txt"), "b/c/d.txt");
83
+
}
84
+
}
85
+
+66
cli/src/cid.rs
+66
cli/src/cid.rs
···
1
+
use jacquard_common::types::cid::IpldCid;
2
+
use sha2::{Digest, Sha256};
3
+
4
+
/// Compute CID (Content Identifier) for blob content
5
+
/// Uses the same algorithm as AT Protocol: CIDv1 with raw codec (0x55) and SHA-256
6
+
///
7
+
/// CRITICAL: This must be called on BASE64-ENCODED GZIPPED content, not just gzipped content
8
+
///
9
+
/// Based on @atproto/common/src/ipld.ts sha256RawToCid implementation
10
+
pub fn compute_cid(content: &[u8]) -> String {
11
+
// Use node crypto to compute sha256 hash (same as AT Protocol)
12
+
let hash = Sha256::digest(content);
13
+
14
+
// Create multihash (code 0x12 = sha2-256)
15
+
let multihash = multihash::Multihash::wrap(0x12, &hash)
16
+
.expect("SHA-256 hash should always fit in multihash");
17
+
18
+
// Create CIDv1 with raw codec (0x55)
19
+
let cid = IpldCid::new_v1(0x55, multihash);
20
+
21
+
// Convert to base32 string representation
22
+
cid.to_string_of_base(multibase::Base::Base32Lower)
23
+
.unwrap_or_else(|_| cid.to_string())
24
+
}
25
+
26
+
#[cfg(test)]
27
+
mod tests {
28
+
use super::*;
29
+
use base64::Engine;
30
+
31
+
#[test]
32
+
fn test_compute_cid() {
33
+
// Test with a simple string: "hello"
34
+
let content = b"hello";
35
+
let cid = compute_cid(content);
36
+
37
+
// CID should start with 'baf' for raw codec base32
38
+
assert!(cid.starts_with("baf"));
39
+
}
40
+
41
+
#[test]
42
+
fn test_compute_cid_base64_encoded() {
43
+
// Simulate the actual use case: gzipped then base64 encoded
44
+
use flate2::write::GzEncoder;
45
+
use flate2::Compression;
46
+
use std::io::Write;
47
+
48
+
let original = b"hello world";
49
+
50
+
// Gzip compress
51
+
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
52
+
encoder.write_all(original).unwrap();
53
+
let gzipped = encoder.finish().unwrap();
54
+
55
+
// Base64 encode the gzipped data
56
+
let base64_bytes = base64::prelude::BASE64_STANDARD.encode(&gzipped).into_bytes();
57
+
58
+
// Compute CID on the base64 bytes
59
+
let cid = compute_cid(&base64_bytes);
60
+
61
+
// Should be a valid CID
62
+
assert!(cid.starts_with("baf"));
63
+
assert!(cid.len() > 10);
64
+
}
65
+
}
66
+
+71
cli/src/download.rs
+71
cli/src/download.rs
···
1
+
use base64::Engine;
2
+
use bytes::Bytes;
3
+
use flate2::read::GzDecoder;
4
+
use jacquard_common::types::blob::BlobRef;
5
+
use miette::IntoDiagnostic;
6
+
use std::io::Read;
7
+
use url::Url;
8
+
9
+
/// Download a blob from the PDS
10
+
pub async fn download_blob(pds_url: &Url, blob_ref: &BlobRef<'_>, did: &str) -> miette::Result<Bytes> {
11
+
// Extract CID from blob ref
12
+
let cid = blob_ref.blob().r#ref.to_string();
13
+
14
+
// Construct blob download URL
15
+
// The correct endpoint is: /xrpc/com.atproto.sync.getBlob?did={did}&cid={cid}
16
+
let blob_url = pds_url
17
+
.join(&format!("/xrpc/com.atproto.sync.getBlob?did={}&cid={}", did, cid))
18
+
.into_diagnostic()?;
19
+
20
+
let client = reqwest::Client::new();
21
+
let response = client
22
+
.get(blob_url)
23
+
.send()
24
+
.await
25
+
.into_diagnostic()?;
26
+
27
+
if !response.status().is_success() {
28
+
return Err(miette::miette!(
29
+
"Failed to download blob: {}",
30
+
response.status()
31
+
));
32
+
}
33
+
34
+
let bytes = response.bytes().await.into_diagnostic()?;
35
+
Ok(bytes)
36
+
}
37
+
38
+
/// Decompress and decode a blob (base64 + gzip)
39
+
pub fn decompress_blob(data: &[u8], is_base64: bool, is_gzipped: bool) -> miette::Result<Vec<u8>> {
40
+
let mut current_data = data.to_vec();
41
+
42
+
// First, decode base64 if needed
43
+
if is_base64 {
44
+
current_data = base64::prelude::BASE64_STANDARD
45
+
.decode(¤t_data)
46
+
.into_diagnostic()?;
47
+
}
48
+
49
+
// Then, decompress gzip if needed
50
+
if is_gzipped {
51
+
let mut decoder = GzDecoder::new(¤t_data[..]);
52
+
let mut decompressed = Vec::new();
53
+
decoder.read_to_end(&mut decompressed).into_diagnostic()?;
54
+
current_data = decompressed;
55
+
}
56
+
57
+
Ok(current_data)
58
+
}
59
+
60
+
/// Download and decompress a blob
61
+
pub async fn download_and_decompress_blob(
62
+
pds_url: &Url,
63
+
blob_ref: &BlobRef<'_>,
64
+
did: &str,
65
+
is_base64: bool,
66
+
is_gzipped: bool,
67
+
) -> miette::Result<Vec<u8>> {
68
+
let data = download_blob(pds_url, blob_ref, did).await?;
69
+
decompress_blob(&data, is_base64, is_gzipped)
70
+
}
71
+
+243
-56
cli/src/main.rs
+243
-56
cli/src/main.rs
···
1
1
mod builder_types;
2
2
mod place_wisp;
3
+
mod cid;
4
+
mod blob_map;
5
+
mod metadata;
6
+
mod download;
7
+
mod pull;
8
+
mod serve;
3
9
4
-
use clap::Parser;
10
+
use clap::{Parser, Subcommand};
5
11
use jacquard::CowStr;
6
-
use jacquard::client::{Agent, FileAuthStore, AgentSessionExt, MemoryCredentialSession};
12
+
use jacquard::client::{Agent, FileAuthStore, AgentSessionExt, MemoryCredentialSession, AgentSession};
7
13
use jacquard::oauth::client::OAuthClient;
8
14
use jacquard::oauth::loopback::LoopbackConfig;
9
15
use jacquard::prelude::IdentityResolver;
···
11
17
use jacquard_common::types::blob::MimeType;
12
18
use miette::IntoDiagnostic;
13
19
use std::path::{Path, PathBuf};
20
+
use std::collections::HashMap;
14
21
use flate2::Compression;
15
22
use flate2::write::GzEncoder;
16
23
use std::io::Write;
···
20
27
use place_wisp::fs::*;
21
28
22
29
#[derive(Parser, Debug)]
23
-
#[command(author, version, about = "Deploy a static site to wisp.place")]
30
+
#[command(author, version, about = "wisp.place CLI tool")]
24
31
struct Args {
32
+
#[command(subcommand)]
33
+
command: Option<Commands>,
34
+
35
+
// Deploy arguments (when no subcommand is specified)
25
36
/// Handle (e.g., alice.bsky.social), DID, or PDS URL
26
-
input: CowStr<'static>,
37
+
#[arg(global = true, conflicts_with = "command")]
38
+
input: Option<CowStr<'static>>,
27
39
28
40
/// Path to the directory containing your static site
29
-
#[arg(short, long, default_value = ".")]
30
-
path: PathBuf,
41
+
#[arg(short, long, global = true, conflicts_with = "command")]
42
+
path: Option<PathBuf>,
31
43
32
44
/// Site name (defaults to directory name)
33
-
#[arg(short, long)]
45
+
#[arg(short, long, global = true, conflicts_with = "command")]
34
46
site: Option<String>,
35
47
36
-
/// Path to auth store file (will be created if missing, only used with OAuth)
37
-
#[arg(long, default_value = "/tmp/wisp-oauth-session.json")]
38
-
store: String,
48
+
/// Path to auth store file
49
+
#[arg(long, global = true, conflicts_with = "command")]
50
+
store: Option<String>,
39
51
40
-
/// App Password for authentication (alternative to OAuth)
41
-
#[arg(long)]
52
+
/// App Password for authentication
53
+
#[arg(long, global = true, conflicts_with = "command")]
42
54
password: Option<CowStr<'static>>,
43
55
}
44
56
57
+
#[derive(Subcommand, Debug)]
58
+
enum Commands {
59
+
/// Deploy a static site to wisp.place (default command)
60
+
Deploy {
61
+
/// Handle (e.g., alice.bsky.social), DID, or PDS URL
62
+
input: CowStr<'static>,
63
+
64
+
/// Path to the directory containing your static site
65
+
#[arg(short, long, default_value = ".")]
66
+
path: PathBuf,
67
+
68
+
/// Site name (defaults to directory name)
69
+
#[arg(short, long)]
70
+
site: Option<String>,
71
+
72
+
/// Path to auth store file (will be created if missing, only used with OAuth)
73
+
#[arg(long, default_value = "/tmp/wisp-oauth-session.json")]
74
+
store: String,
75
+
76
+
/// App Password for authentication (alternative to OAuth)
77
+
#[arg(long)]
78
+
password: Option<CowStr<'static>>,
79
+
},
80
+
/// Pull a site from the PDS to a local directory
81
+
Pull {
82
+
/// Handle (e.g., alice.bsky.social) or DID
83
+
input: CowStr<'static>,
84
+
85
+
/// Site name (record key)
86
+
#[arg(short, long)]
87
+
site: String,
88
+
89
+
/// Output directory for the downloaded site
90
+
#[arg(short, long, default_value = ".")]
91
+
output: PathBuf,
92
+
},
93
+
/// Serve a site locally with real-time firehose updates
94
+
Serve {
95
+
/// Handle (e.g., alice.bsky.social) or DID
96
+
input: CowStr<'static>,
97
+
98
+
/// Site name (record key)
99
+
#[arg(short, long)]
100
+
site: String,
101
+
102
+
/// Output directory for the site files
103
+
#[arg(short, long, default_value = ".")]
104
+
output: PathBuf,
105
+
106
+
/// Port to serve on
107
+
#[arg(short, long, default_value = "8080")]
108
+
port: u16,
109
+
},
110
+
}
111
+
45
112
#[tokio::main]
46
113
async fn main() -> miette::Result<()> {
47
114
let args = Args::parse();
48
115
49
-
// Dispatch to appropriate authentication method
50
-
if let Some(password) = args.password {
51
-
run_with_app_password(args.input, password, args.path, args.site).await
52
-
} else {
53
-
run_with_oauth(args.input, args.store, args.path, args.site).await
116
+
match args.command {
117
+
Some(Commands::Deploy { input, path, site, store, password }) => {
118
+
// Dispatch to appropriate authentication method
119
+
if let Some(password) = password {
120
+
run_with_app_password(input, password, path, site).await
121
+
} else {
122
+
run_with_oauth(input, store, path, site).await
123
+
}
124
+
}
125
+
Some(Commands::Pull { input, site, output }) => {
126
+
pull::pull_site(input, CowStr::from(site), output).await
127
+
}
128
+
Some(Commands::Serve { input, site, output, port }) => {
129
+
serve::serve_site(input, CowStr::from(site), output, port).await
130
+
}
131
+
None => {
132
+
// Legacy mode: if input is provided, assume deploy command
133
+
if let Some(input) = args.input {
134
+
let path = args.path.unwrap_or_else(|| PathBuf::from("."));
135
+
let store = args.store.unwrap_or_else(|| "/tmp/wisp-oauth-session.json".to_string());
136
+
137
+
// Dispatch to appropriate authentication method
138
+
if let Some(password) = args.password {
139
+
run_with_app_password(input, password, path, args.site).await
140
+
} else {
141
+
run_with_oauth(input, store, path, args.site).await
142
+
}
143
+
} else {
144
+
// No command and no input, show help
145
+
use clap::CommandFactory;
146
+
Args::command().print_help().into_diagnostic()?;
147
+
Ok(())
148
+
}
149
+
}
54
150
}
55
151
}
56
152
···
107
203
108
204
println!("Deploying site '{}'...", site_name);
109
205
110
-
// Build directory tree
111
-
let root_dir = build_directory(agent, &path).await?;
206
+
// Try to fetch existing manifest for incremental updates
207
+
let existing_blob_map: HashMap<String, (jacquard_common::types::blob::BlobRef<'static>, String)> = {
208
+
use jacquard_common::types::string::AtUri;
209
+
210
+
// Get the DID for this session
211
+
let session_info = agent.session_info().await;
212
+
if let Some((did, _)) = session_info {
213
+
// Construct the AT URI for the record
214
+
let uri_string = format!("at://{}/place.wisp.fs/{}", did, site_name);
215
+
if let Ok(uri) = AtUri::new(&uri_string) {
216
+
match agent.get_record::<Fs>(&uri).await {
217
+
Ok(response) => {
218
+
match response.into_output() {
219
+
Ok(record_output) => {
220
+
let existing_manifest = record_output.value;
221
+
let blob_map = blob_map::extract_blob_map(&existing_manifest.root);
222
+
println!("Found existing manifest with {} files, checking for changes...", blob_map.len());
223
+
blob_map
224
+
}
225
+
Err(_) => {
226
+
println!("No existing manifest found, uploading all files...");
227
+
HashMap::new()
228
+
}
229
+
}
230
+
}
231
+
Err(_) => {
232
+
// Record doesn't exist yet - this is a new site
233
+
println!("No existing manifest found, uploading all files...");
234
+
HashMap::new()
235
+
}
236
+
}
237
+
} else {
238
+
println!("No existing manifest found (invalid URI), uploading all files...");
239
+
HashMap::new()
240
+
}
241
+
} else {
242
+
println!("No existing manifest found (could not get DID), uploading all files...");
243
+
HashMap::new()
244
+
}
245
+
};
112
246
113
-
// Count total files
114
-
let file_count = count_files(&root_dir);
247
+
// Build directory tree
248
+
let (root_dir, total_files, reused_count) = build_directory(agent, &path, &existing_blob_map, String::new()).await?;
249
+
let uploaded_count = total_files - reused_count;
115
250
116
251
// Create the Fs record
117
252
let fs_record = Fs::new()
118
253
.site(CowStr::from(site_name.clone()))
119
254
.root(root_dir)
120
-
.file_count(file_count as i64)
255
+
.file_count(total_files as i64)
121
256
.created_at(Datetime::now())
122
257
.build();
123
258
···
132
267
.and_then(|s| s.split('/').next())
133
268
.ok_or_else(|| miette::miette!("Failed to parse DID from URI"))?;
134
269
135
-
println!("Deployed site '{}': {}", site_name, output.uri);
136
-
println!("Available at: https://sites.wisp.place/{}/{}", did, site_name);
270
+
println!("\nโ Deployed site '{}': {}", site_name, output.uri);
271
+
println!(" Total files: {} ({} reused, {} uploaded)", total_files, reused_count, uploaded_count);
272
+
println!(" Available at: https://sites.wisp.place/{}/{}", did, site_name);
137
273
138
274
Ok(())
139
275
}
140
276
141
277
/// Recursively build a Directory from a filesystem path
278
+
/// current_path is the path from the root of the site (e.g., "" for root, "config" for config dir)
142
279
fn build_directory<'a>(
143
280
agent: &'a Agent<impl jacquard::client::AgentSession + IdentityResolver + 'a>,
144
281
dir_path: &'a Path,
145
-
) -> std::pin::Pin<Box<dyn std::future::Future<Output = miette::Result<Directory<'static>>> + 'a>>
282
+
existing_blobs: &'a HashMap<String, (jacquard_common::types::blob::BlobRef<'static>, String)>,
283
+
current_path: String,
284
+
) -> std::pin::Pin<Box<dyn std::future::Future<Output = miette::Result<(Directory<'static>, usize, usize)>> + 'a>>
146
285
{
147
286
Box::pin(async move {
148
287
// Collect all directory entries first
···
170
309
let metadata = entry.metadata().into_diagnostic()?;
171
310
172
311
if metadata.is_file() {
173
-
file_tasks.push((name_str, path));
312
+
// Construct full path for this file (for blob map lookup)
313
+
let full_path = if current_path.is_empty() {
314
+
name_str.clone()
315
+
} else {
316
+
format!("{}/{}", current_path, name_str)
317
+
};
318
+
file_tasks.push((name_str, path, full_path));
174
319
} else if metadata.is_dir() {
175
320
dir_tasks.push((name_str, path));
176
321
}
177
322
}
178
323
179
324
// Process files concurrently with a limit of 5
180
-
let file_entries: Vec<Entry> = stream::iter(file_tasks)
181
-
.map(|(name, path)| async move {
182
-
let file_node = process_file(agent, &path).await?;
183
-
Ok::<_, miette::Report>(Entry::new()
325
+
let file_results: Vec<(Entry<'static>, bool)> = stream::iter(file_tasks)
326
+
.map(|(name, path, full_path)| async move {
327
+
let (file_node, reused) = process_file(agent, &path, &full_path, existing_blobs).await?;
328
+
let entry = Entry::new()
184
329
.name(CowStr::from(name))
185
330
.node(EntryNode::File(Box::new(file_node)))
186
-
.build())
331
+
.build();
332
+
Ok::<_, miette::Report>((entry, reused))
187
333
})
188
334
.buffer_unordered(5)
189
335
.collect::<Vec<_>>()
190
336
.await
191
337
.into_iter()
192
338
.collect::<miette::Result<Vec<_>>>()?;
339
+
340
+
let mut file_entries = Vec::new();
341
+
let mut reused_count = 0;
342
+
let mut total_files = 0;
343
+
344
+
for (entry, reused) in file_results {
345
+
file_entries.push(entry);
346
+
total_files += 1;
347
+
if reused {
348
+
reused_count += 1;
349
+
}
350
+
}
193
351
194
352
// Process directories recursively (sequentially to avoid too much nesting)
195
353
let mut dir_entries = Vec::new();
196
354
for (name, path) in dir_tasks {
197
-
let subdir = build_directory(agent, &path).await?;
355
+
// Construct full path for subdirectory
356
+
let subdir_path = if current_path.is_empty() {
357
+
name.clone()
358
+
} else {
359
+
format!("{}/{}", current_path, name)
360
+
};
361
+
let (subdir, sub_total, sub_reused) = build_directory(agent, &path, existing_blobs, subdir_path).await?;
198
362
dir_entries.push(Entry::new()
199
363
.name(CowStr::from(name))
200
364
.node(EntryNode::Directory(Box::new(subdir)))
201
365
.build());
366
+
total_files += sub_total;
367
+
reused_count += sub_reused;
202
368
}
203
369
204
370
// Combine file and directory entries
205
371
let mut entries = file_entries;
206
372
entries.extend(dir_entries);
207
373
208
-
Ok(Directory::new()
374
+
let directory = Directory::new()
209
375
.r#type(CowStr::from("directory"))
210
376
.entries(entries)
211
-
.build())
377
+
.build();
378
+
379
+
Ok((directory, total_files, reused_count))
212
380
})
213
381
}
214
382
215
-
/// Process a single file: gzip -> base64 -> upload blob
383
+
/// Process a single file: gzip -> base64 -> upload blob (or reuse existing)
384
+
/// Returns (File, reused: bool)
385
+
/// file_path_key is the full path from the site root (e.g., "config/file.json") for blob map lookup
216
386
async fn process_file(
217
387
agent: &Agent<impl jacquard::client::AgentSession + IdentityResolver>,
218
388
file_path: &Path,
219
-
) -> miette::Result<File<'static>>
389
+
file_path_key: &str,
390
+
existing_blobs: &HashMap<String, (jacquard_common::types::blob::BlobRef<'static>, String)>,
391
+
) -> miette::Result<(File<'static>, bool)>
220
392
{
221
393
// Read file
222
394
let file_data = std::fs::read(file_path).into_diagnostic()?;
···
234
406
// Base64 encode the gzipped data
235
407
let base64_bytes = base64::prelude::BASE64_STANDARD.encode(&gzipped).into_bytes();
236
408
237
-
// Upload blob as octet-stream
409
+
// Compute CID for this file (CRITICAL: on base64-encoded gzipped content)
410
+
let file_cid = cid::compute_cid(&base64_bytes);
411
+
412
+
// Check if we have an existing blob with the same CID
413
+
let existing_blob = existing_blobs.get(file_path_key);
414
+
415
+
if let Some((existing_blob_ref, existing_cid)) = existing_blob {
416
+
if existing_cid == &file_cid {
417
+
// CIDs match - reuse existing blob
418
+
println!(" โ Reusing blob for {} (CID: {})", file_path_key, file_cid);
419
+
return Ok((
420
+
File::new()
421
+
.r#type(CowStr::from("file"))
422
+
.blob(existing_blob_ref.clone())
423
+
.encoding(CowStr::from("gzip"))
424
+
.mime_type(CowStr::from(original_mime))
425
+
.base64(true)
426
+
.build(),
427
+
true
428
+
));
429
+
}
430
+
}
431
+
432
+
// File is new or changed - upload it
433
+
println!(" โ Uploading {} ({} bytes, CID: {})", file_path_key, base64_bytes.len(), file_cid);
238
434
let blob = agent.upload_blob(
239
435
base64_bytes,
240
436
MimeType::new_static("application/octet-stream"),
241
437
).await?;
242
438
243
-
Ok(File::new()
244
-
.r#type(CowStr::from("file"))
245
-
.blob(blob)
246
-
.encoding(CowStr::from("gzip"))
247
-
.mime_type(CowStr::from(original_mime))
248
-
.base64(true)
249
-
.build())
439
+
Ok((
440
+
File::new()
441
+
.r#type(CowStr::from("file"))
442
+
.blob(blob)
443
+
.encoding(CowStr::from("gzip"))
444
+
.mime_type(CowStr::from(original_mime))
445
+
.base64(true)
446
+
.build(),
447
+
false
448
+
))
250
449
}
251
450
252
-
/// Count total files in a directory tree
253
-
fn count_files(dir: &Directory) -> usize {
254
-
let mut count = 0;
255
-
for entry in &dir.entries {
256
-
match &entry.node {
257
-
EntryNode::File(_) => count += 1,
258
-
EntryNode::Directory(subdir) => count += count_files(subdir),
259
-
_ => {} // Unknown variants
260
-
}
261
-
}
262
-
count
263
-
}
+46
cli/src/metadata.rs
+46
cli/src/metadata.rs
···
1
+
use serde::{Deserialize, Serialize};
2
+
use std::collections::HashMap;
3
+
use std::path::Path;
4
+
use miette::IntoDiagnostic;
5
+
6
+
/// Metadata tracking file CIDs for incremental updates
7
+
#[derive(Debug, Clone, Serialize, Deserialize)]
8
+
pub struct SiteMetadata {
9
+
/// Record CID from the PDS
10
+
pub record_cid: String,
11
+
/// Map of file paths to their blob CIDs
12
+
pub file_cids: HashMap<String, String>,
13
+
/// Timestamp when the site was last synced
14
+
pub last_sync: i64,
15
+
}
16
+
17
+
impl SiteMetadata {
18
+
pub fn new(record_cid: String, file_cids: HashMap<String, String>) -> Self {
19
+
Self {
20
+
record_cid,
21
+
file_cids,
22
+
last_sync: chrono::Utc::now().timestamp(),
23
+
}
24
+
}
25
+
26
+
/// Load metadata from a directory
27
+
pub fn load(dir: &Path) -> miette::Result<Option<Self>> {
28
+
let metadata_path = dir.join(".wisp-metadata.json");
29
+
if !metadata_path.exists() {
30
+
return Ok(None);
31
+
}
32
+
33
+
let contents = std::fs::read_to_string(&metadata_path).into_diagnostic()?;
34
+
let metadata: SiteMetadata = serde_json::from_str(&contents).into_diagnostic()?;
35
+
Ok(Some(metadata))
36
+
}
37
+
38
+
/// Save metadata to a directory
39
+
pub fn save(&self, dir: &Path) -> miette::Result<()> {
40
+
let metadata_path = dir.join(".wisp-metadata.json");
41
+
let contents = serde_json::to_string_pretty(self).into_diagnostic()?;
42
+
std::fs::write(&metadata_path, contents).into_diagnostic()?;
43
+
Ok(())
44
+
}
45
+
}
46
+
+305
cli/src/pull.rs
+305
cli/src/pull.rs
···
1
+
use crate::blob_map;
2
+
use crate::download;
3
+
use crate::metadata::SiteMetadata;
4
+
use crate::place_wisp::fs::*;
5
+
use jacquard::CowStr;
6
+
use jacquard::prelude::IdentityResolver;
7
+
use jacquard_common::types::string::Did;
8
+
use jacquard_common::xrpc::XrpcExt;
9
+
use jacquard_identity::PublicResolver;
10
+
use miette::IntoDiagnostic;
11
+
use std::collections::HashMap;
12
+
use std::path::{Path, PathBuf};
13
+
use url::Url;
14
+
15
+
/// Pull a site from the PDS to a local directory
16
+
pub async fn pull_site(
17
+
input: CowStr<'static>,
18
+
rkey: CowStr<'static>,
19
+
output_dir: PathBuf,
20
+
) -> miette::Result<()> {
21
+
println!("Pulling site {} from {}...", rkey, input);
22
+
23
+
// Resolve handle to DID if needed
24
+
let resolver = PublicResolver::default();
25
+
let did = if input.starts_with("did:") {
26
+
Did::new(&input).into_diagnostic()?
27
+
} else {
28
+
// It's a handle, resolve it
29
+
let handle = jacquard_common::types::string::Handle::new(&input).into_diagnostic()?;
30
+
resolver.resolve_handle(&handle).await.into_diagnostic()?
31
+
};
32
+
33
+
// Resolve PDS endpoint for the DID
34
+
let pds_url = resolver.pds_for_did(&did).await.into_diagnostic()?;
35
+
println!("Resolved PDS: {}", pds_url);
36
+
37
+
// Fetch the place.wisp.fs record
38
+
39
+
println!("Fetching record from PDS...");
40
+
let client = reqwest::Client::new();
41
+
42
+
// Use com.atproto.repo.getRecord
43
+
use jacquard::api::com_atproto::repo::get_record::GetRecord;
44
+
use jacquard_common::types::string::Rkey as RkeyType;
45
+
let rkey_parsed = RkeyType::new(&rkey).into_diagnostic()?;
46
+
47
+
use jacquard_common::types::ident::AtIdentifier;
48
+
use jacquard_common::types::string::RecordKey;
49
+
let request = GetRecord::new()
50
+
.repo(AtIdentifier::Did(did.clone()))
51
+
.collection(CowStr::from("place.wisp.fs"))
52
+
.rkey(RecordKey::from(rkey_parsed))
53
+
.build();
54
+
55
+
let response = client
56
+
.xrpc(pds_url.clone())
57
+
.send(&request)
58
+
.await
59
+
.into_diagnostic()?;
60
+
61
+
let record_output = response.into_output().into_diagnostic()?;
62
+
let record_cid = record_output.cid.as_ref().map(|c| c.to_string()).unwrap_or_default();
63
+
64
+
// Parse the record value as Fs
65
+
use jacquard_common::types::value::from_data;
66
+
let fs_record: Fs = from_data(&record_output.value).into_diagnostic()?;
67
+
68
+
let file_count = fs_record.file_count.map(|c| c.to_string()).unwrap_or_else(|| "?".to_string());
69
+
println!("Found site '{}' with {} files", fs_record.site, file_count);
70
+
71
+
// Load existing metadata for incremental updates
72
+
let existing_metadata = SiteMetadata::load(&output_dir)?;
73
+
let existing_file_cids = existing_metadata
74
+
.as_ref()
75
+
.map(|m| m.file_cids.clone())
76
+
.unwrap_or_default();
77
+
78
+
// Extract blob map from the new manifest
79
+
let new_blob_map = blob_map::extract_blob_map(&fs_record.root);
80
+
let new_file_cids: HashMap<String, String> = new_blob_map
81
+
.iter()
82
+
.map(|(path, (_blob_ref, cid))| (path.clone(), cid.clone()))
83
+
.collect();
84
+
85
+
// Clean up any leftover temp directories from previous failed attempts
86
+
let parent = output_dir.parent().unwrap_or_else(|| std::path::Path::new("."));
87
+
let output_name = output_dir.file_name().unwrap_or_else(|| std::ffi::OsStr::new("site")).to_string_lossy();
88
+
let temp_prefix = format!(".tmp-{}-", output_name);
89
+
90
+
if let Ok(entries) = parent.read_dir() {
91
+
for entry in entries.flatten() {
92
+
let name = entry.file_name();
93
+
if name.to_string_lossy().starts_with(&temp_prefix) {
94
+
let _ = std::fs::remove_dir_all(entry.path());
95
+
}
96
+
}
97
+
}
98
+
99
+
// Check if we need to update (but only if output directory actually exists with files)
100
+
if let Some(metadata) = &existing_metadata {
101
+
if metadata.record_cid == record_cid {
102
+
// Verify that the output directory actually exists and has content
103
+
let has_content = output_dir.exists() &&
104
+
output_dir.read_dir()
105
+
.map(|mut entries| entries.any(|e| {
106
+
if let Ok(entry) = e {
107
+
!entry.file_name().to_string_lossy().starts_with(".wisp-metadata")
108
+
} else {
109
+
false
110
+
}
111
+
}))
112
+
.unwrap_or(false);
113
+
114
+
if has_content {
115
+
println!("Site is already up to date!");
116
+
return Ok(());
117
+
}
118
+
}
119
+
}
120
+
121
+
// Create temporary directory for atomic update
122
+
// Place temp dir in parent directory to avoid issues with non-existent output_dir
123
+
let parent = output_dir.parent().unwrap_or_else(|| std::path::Path::new("."));
124
+
let temp_dir_name = format!(
125
+
".tmp-{}-{}",
126
+
output_dir.file_name().unwrap_or_else(|| std::ffi::OsStr::new("site")).to_string_lossy(),
127
+
chrono::Utc::now().timestamp()
128
+
);
129
+
let temp_dir = parent.join(temp_dir_name);
130
+
std::fs::create_dir_all(&temp_dir).into_diagnostic()?;
131
+
132
+
println!("Downloading files...");
133
+
let mut downloaded = 0;
134
+
let mut reused = 0;
135
+
136
+
// Download files recursively
137
+
let download_result = download_directory(
138
+
&fs_record.root,
139
+
&temp_dir,
140
+
&pds_url,
141
+
did.as_str(),
142
+
&new_blob_map,
143
+
&existing_file_cids,
144
+
&output_dir,
145
+
String::new(),
146
+
&mut downloaded,
147
+
&mut reused,
148
+
)
149
+
.await;
150
+
151
+
// If download failed, clean up temp directory
152
+
if let Err(e) = download_result {
153
+
let _ = std::fs::remove_dir_all(&temp_dir);
154
+
return Err(e);
155
+
}
156
+
157
+
println!(
158
+
"Downloaded {} files, reused {} files",
159
+
downloaded, reused
160
+
);
161
+
162
+
// Save metadata
163
+
let metadata = SiteMetadata::new(record_cid, new_file_cids);
164
+
metadata.save(&temp_dir)?;
165
+
166
+
// Move files from temp to output directory
167
+
let output_abs = std::fs::canonicalize(&output_dir).unwrap_or_else(|_| output_dir.clone());
168
+
let current_dir = std::env::current_dir().into_diagnostic()?;
169
+
170
+
// Special handling for pulling to current directory
171
+
if output_abs == current_dir {
172
+
// Move files from temp to current directory
173
+
for entry in std::fs::read_dir(&temp_dir).into_diagnostic()? {
174
+
let entry = entry.into_diagnostic()?;
175
+
let dest = current_dir.join(entry.file_name());
176
+
177
+
// Remove existing file/dir if it exists
178
+
if dest.exists() {
179
+
if dest.is_dir() {
180
+
std::fs::remove_dir_all(&dest).into_diagnostic()?;
181
+
} else {
182
+
std::fs::remove_file(&dest).into_diagnostic()?;
183
+
}
184
+
}
185
+
186
+
// Move from temp to current dir
187
+
std::fs::rename(entry.path(), dest).into_diagnostic()?;
188
+
}
189
+
190
+
// Clean up temp directory
191
+
std::fs::remove_dir_all(&temp_dir).into_diagnostic()?;
192
+
} else {
193
+
// If output directory exists and has content, remove it first
194
+
if output_dir.exists() {
195
+
std::fs::remove_dir_all(&output_dir).into_diagnostic()?;
196
+
}
197
+
198
+
// Ensure parent directory exists
199
+
if let Some(parent) = output_dir.parent() {
200
+
if !parent.as_os_str().is_empty() && !parent.exists() {
201
+
std::fs::create_dir_all(parent).into_diagnostic()?;
202
+
}
203
+
}
204
+
205
+
// Rename temp to final location
206
+
match std::fs::rename(&temp_dir, &output_dir) {
207
+
Ok(_) => {},
208
+
Err(e) => {
209
+
// Clean up temp directory on failure
210
+
let _ = std::fs::remove_dir_all(&temp_dir);
211
+
return Err(miette::miette!("Failed to move temp directory: {}", e));
212
+
}
213
+
}
214
+
}
215
+
216
+
println!("โ Site pulled successfully to {}", output_dir.display());
217
+
218
+
Ok(())
219
+
}
220
+
221
+
/// Recursively download a directory
222
+
fn download_directory<'a>(
223
+
dir: &'a Directory<'_>,
224
+
output_dir: &'a Path,
225
+
pds_url: &'a Url,
226
+
did: &'a str,
227
+
new_blob_map: &'a HashMap<String, (jacquard_common::types::blob::BlobRef<'static>, String)>,
228
+
existing_file_cids: &'a HashMap<String, String>,
229
+
existing_output_dir: &'a Path,
230
+
path_prefix: String,
231
+
downloaded: &'a mut usize,
232
+
reused: &'a mut usize,
233
+
) -> std::pin::Pin<Box<dyn std::future::Future<Output = miette::Result<()>> + Send + 'a>> {
234
+
Box::pin(async move {
235
+
for entry in &dir.entries {
236
+
let entry_name = entry.name.as_str();
237
+
let current_path = if path_prefix.is_empty() {
238
+
entry_name.to_string()
239
+
} else {
240
+
format!("{}/{}", path_prefix, entry_name)
241
+
};
242
+
243
+
match &entry.node {
244
+
EntryNode::File(file) => {
245
+
let output_path = output_dir.join(entry_name);
246
+
247
+
// Check if file CID matches existing
248
+
if let Some((_blob_ref, new_cid)) = new_blob_map.get(¤t_path) {
249
+
if let Some(existing_cid) = existing_file_cids.get(¤t_path) {
250
+
if existing_cid == new_cid {
251
+
// File unchanged, copy from existing directory
252
+
let existing_path = existing_output_dir.join(¤t_path);
253
+
if existing_path.exists() {
254
+
std::fs::copy(&existing_path, &output_path).into_diagnostic()?;
255
+
*reused += 1;
256
+
println!(" โ Reused {}", current_path);
257
+
continue;
258
+
}
259
+
}
260
+
}
261
+
}
262
+
263
+
// File is new or changed, download it
264
+
println!(" โ Downloading {}", current_path);
265
+
let data = download::download_and_decompress_blob(
266
+
pds_url,
267
+
&file.blob,
268
+
did,
269
+
file.base64.unwrap_or(false),
270
+
file.encoding.as_ref().map(|e| e.as_str() == "gzip").unwrap_or(false),
271
+
)
272
+
.await?;
273
+
274
+
std::fs::write(&output_path, data).into_diagnostic()?;
275
+
*downloaded += 1;
276
+
}
277
+
EntryNode::Directory(subdir) => {
278
+
let subdir_path = output_dir.join(entry_name);
279
+
std::fs::create_dir_all(&subdir_path).into_diagnostic()?;
280
+
281
+
download_directory(
282
+
subdir,
283
+
&subdir_path,
284
+
pds_url,
285
+
did,
286
+
new_blob_map,
287
+
existing_file_cids,
288
+
existing_output_dir,
289
+
current_path,
290
+
downloaded,
291
+
reused,
292
+
)
293
+
.await?;
294
+
}
295
+
EntryNode::Unknown(_) => {
296
+
// Skip unknown node types
297
+
println!(" โ Skipping unknown node type for {}", current_path);
298
+
}
299
+
}
300
+
}
301
+
302
+
Ok(())
303
+
})
304
+
}
305
+
+202
cli/src/serve.rs
+202
cli/src/serve.rs
···
1
+
use crate::pull::pull_site;
2
+
use axum::Router;
3
+
use jacquard::CowStr;
4
+
use jacquard_common::jetstream::{CommitOperation, JetstreamMessage, JetstreamParams};
5
+
use jacquard_common::types::string::Did;
6
+
use jacquard_common::xrpc::{SubscriptionClient, TungsteniteSubscriptionClient};
7
+
use miette::IntoDiagnostic;
8
+
use n0_future::StreamExt;
9
+
use std::path::PathBuf;
10
+
use std::sync::Arc;
11
+
use tokio::sync::RwLock;
12
+
use tower_http::compression::CompressionLayer;
13
+
use tower_http::services::ServeDir;
14
+
use url::Url;
15
+
16
+
/// Shared state for the server
17
+
#[derive(Clone)]
18
+
struct ServerState {
19
+
did: CowStr<'static>,
20
+
rkey: CowStr<'static>,
21
+
output_dir: PathBuf,
22
+
last_cid: Arc<RwLock<Option<String>>>,
23
+
}
24
+
25
+
/// Serve a site locally with real-time firehose updates
26
+
pub async fn serve_site(
27
+
input: CowStr<'static>,
28
+
rkey: CowStr<'static>,
29
+
output_dir: PathBuf,
30
+
port: u16,
31
+
) -> miette::Result<()> {
32
+
println!("Serving site {} from {} on port {}...", rkey, input, port);
33
+
34
+
// Resolve handle to DID if needed
35
+
use jacquard_identity::PublicResolver;
36
+
use jacquard::prelude::IdentityResolver;
37
+
38
+
let resolver = PublicResolver::default();
39
+
let did = if input.starts_with("did:") {
40
+
Did::new(&input).into_diagnostic()?
41
+
} else {
42
+
// It's a handle, resolve it
43
+
let handle = jacquard_common::types::string::Handle::new(&input).into_diagnostic()?;
44
+
resolver.resolve_handle(&handle).await.into_diagnostic()?
45
+
};
46
+
47
+
println!("Resolved to DID: {}", did.as_str());
48
+
49
+
// Create output directory if it doesn't exist
50
+
std::fs::create_dir_all(&output_dir).into_diagnostic()?;
51
+
52
+
// Initial pull of the site
53
+
println!("Performing initial pull...");
54
+
let did_str = CowStr::from(did.as_str().to_string());
55
+
pull_site(did_str.clone(), rkey.clone(), output_dir.clone()).await?;
56
+
57
+
// Create shared state
58
+
let state = ServerState {
59
+
did: did_str.clone(),
60
+
rkey: rkey.clone(),
61
+
output_dir: output_dir.clone(),
62
+
last_cid: Arc::new(RwLock::new(None)),
63
+
};
64
+
65
+
// Start firehose listener in background
66
+
let firehose_state = state.clone();
67
+
tokio::spawn(async move {
68
+
if let Err(e) = watch_firehose(firehose_state).await {
69
+
eprintln!("Firehose error: {}", e);
70
+
}
71
+
});
72
+
73
+
// Create HTTP server with gzip compression
74
+
let app = Router::new()
75
+
.fallback_service(
76
+
ServeDir::new(&output_dir)
77
+
.precompressed_gzip()
78
+
)
79
+
.layer(CompressionLayer::new())
80
+
.with_state(state);
81
+
82
+
let addr = format!("0.0.0.0:{}", port);
83
+
let listener = tokio::net::TcpListener::bind(&addr)
84
+
.await
85
+
.into_diagnostic()?;
86
+
87
+
println!("\nโ Server running at http://localhost:{}", port);
88
+
println!(" Watching for updates on the firehose...\n");
89
+
90
+
axum::serve(listener, app).await.into_diagnostic()?;
91
+
92
+
Ok(())
93
+
}
94
+
95
+
/// Watch the firehose for updates to the specific site
96
+
fn watch_firehose(state: ServerState) -> std::pin::Pin<Box<dyn std::future::Future<Output = miette::Result<()>> + Send>> {
97
+
Box::pin(async move {
98
+
let jetstream_url = Url::parse("wss://jetstream1.us-east.fire.hose.cam")
99
+
.into_diagnostic()?;
100
+
101
+
println!("[Firehose] Connecting to Jetstream...");
102
+
103
+
// Create subscription client
104
+
let client = TungsteniteSubscriptionClient::from_base_uri(jetstream_url);
105
+
106
+
// Subscribe with no filters (we'll filter manually)
107
+
// Jetstream doesn't support filtering by collection in the params builder
108
+
let params = JetstreamParams::new().build();
109
+
110
+
let stream = client.subscribe(¶ms).await.into_diagnostic()?;
111
+
println!("[Firehose] Connected! Watching for updates...");
112
+
113
+
// Convert to typed message stream
114
+
let (_sink, mut messages) = stream.into_stream();
115
+
116
+
loop {
117
+
match messages.next().await {
118
+
Some(Ok(msg)) => {
119
+
if let Err(e) = handle_firehose_message(&state, msg).await {
120
+
eprintln!("[Firehose] Error handling message: {}", e);
121
+
}
122
+
}
123
+
Some(Err(e)) => {
124
+
eprintln!("[Firehose] Stream error: {}", e);
125
+
// Try to reconnect after a delay
126
+
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
127
+
return Box::pin(watch_firehose(state)).await;
128
+
}
129
+
None => {
130
+
println!("[Firehose] Stream ended, reconnecting...");
131
+
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
132
+
return Box::pin(watch_firehose(state)).await;
133
+
}
134
+
}
135
+
}
136
+
})
137
+
}
138
+
139
+
/// Handle a firehose message
140
+
async fn handle_firehose_message(
141
+
state: &ServerState,
142
+
msg: JetstreamMessage<'_>,
143
+
) -> miette::Result<()> {
144
+
match msg {
145
+
JetstreamMessage::Commit {
146
+
did,
147
+
commit,
148
+
..
149
+
} => {
150
+
// Check if this is our site
151
+
if did.as_str() == state.did.as_str()
152
+
&& commit.collection.as_str() == "place.wisp.fs"
153
+
&& commit.rkey.as_str() == state.rkey.as_str()
154
+
{
155
+
match commit.operation {
156
+
CommitOperation::Create | CommitOperation::Update => {
157
+
let new_cid = commit.cid.as_ref().map(|c| c.to_string());
158
+
159
+
// Check if CID changed
160
+
let should_update = {
161
+
let last_cid = state.last_cid.read().await;
162
+
new_cid != *last_cid
163
+
};
164
+
165
+
if should_update {
166
+
println!("\n[Update] Detected change to site {} (CID: {:?})", state.rkey, new_cid);
167
+
println!("[Update] Pulling latest version...");
168
+
169
+
// Pull the updated site
170
+
match pull_site(
171
+
state.did.clone(),
172
+
state.rkey.clone(),
173
+
state.output_dir.clone(),
174
+
)
175
+
.await
176
+
{
177
+
Ok(_) => {
178
+
// Update last CID
179
+
let mut last_cid = state.last_cid.write().await;
180
+
*last_cid = new_cid;
181
+
println!("[Update] โ Site updated successfully!\n");
182
+
}
183
+
Err(e) => {
184
+
eprintln!("[Update] Failed to pull site: {}", e);
185
+
}
186
+
}
187
+
}
188
+
}
189
+
CommitOperation::Delete => {
190
+
println!("\n[Update] Site {} was deleted", state.rkey);
191
+
}
192
+
}
193
+
}
194
+
}
195
+
_ => {
196
+
// Ignore identity and account messages
197
+
}
198
+
}
199
+
200
+
Ok(())
201
+
}
202
+
+90
crates.nix
+90
crates.nix
···
1
+
{...}: {
2
+
perSystem = {
3
+
pkgs,
4
+
config,
5
+
lib,
6
+
inputs',
7
+
...
8
+
}: {
9
+
# declare projects
10
+
nci.projects."wisp-place-cli" = {
11
+
path = ./cli;
12
+
export = false;
13
+
};
14
+
nci.toolchains.mkBuild = _:
15
+
with inputs'.fenix.packages;
16
+
combine [
17
+
minimal.rustc
18
+
minimal.cargo
19
+
targets.x86_64-pc-windows-gnu.latest.rust-std
20
+
targets.x86_64-unknown-linux-gnu.latest.rust-std
21
+
targets.aarch64-apple-darwin.latest.rust-std
22
+
targets.aarch64-unknown-linux-gnu.latest.rust-std
23
+
];
24
+
# configure crates
25
+
nci.crates."wisp-cli" = {
26
+
profiles = {
27
+
dev.runTests = false;
28
+
release.runTests = false;
29
+
};
30
+
targets."x86_64-unknown-linux-gnu" = let
31
+
targetPkgs = pkgs.pkgsCross.gnu64;
32
+
targetCC = targetPkgs.stdenv.cc;
33
+
targetCargoEnvVarTarget = targetPkgs.stdenv.hostPlatform.rust.cargoEnvVarTarget;
34
+
in rec {
35
+
default = true;
36
+
depsDrvConfig.mkDerivation = {
37
+
nativeBuildInputs = [targetCC];
38
+
};
39
+
depsDrvConfig.env = rec {
40
+
TARGET_CC = "${targetCC.targetPrefix}cc";
41
+
"CARGO_TARGET_${targetCargoEnvVarTarget}_LINKER" = TARGET_CC;
42
+
};
43
+
drvConfig = depsDrvConfig;
44
+
};
45
+
targets."x86_64-pc-windows-gnu" = let
46
+
targetPkgs = pkgs.pkgsCross.mingwW64;
47
+
targetCC = targetPkgs.stdenv.cc;
48
+
targetCargoEnvVarTarget = targetPkgs.stdenv.hostPlatform.rust.cargoEnvVarTarget;
49
+
in rec {
50
+
depsDrvConfig.mkDerivation = {
51
+
nativeBuildInputs = [targetCC];
52
+
buildInputs = with targetPkgs; [windows.pthreads];
53
+
};
54
+
depsDrvConfig.env = rec {
55
+
TARGET_CC = "${targetCC.targetPrefix}cc";
56
+
"CARGO_TARGET_${targetCargoEnvVarTarget}_LINKER" = TARGET_CC;
57
+
};
58
+
drvConfig = depsDrvConfig;
59
+
};
60
+
targets."aarch64-apple-darwin" = let
61
+
targetPkgs = pkgs.pkgsCross.aarch64-darwin;
62
+
targetCC = targetPkgs.stdenv.cc;
63
+
targetCargoEnvVarTarget = targetPkgs.stdenv.hostPlatform.rust.cargoEnvVarTarget;
64
+
in rec {
65
+
depsDrvConfig.mkDerivation = {
66
+
nativeBuildInputs = [targetCC];
67
+
};
68
+
depsDrvConfig.env = rec {
69
+
TARGET_CC = "${targetCC.targetPrefix}cc";
70
+
"CARGO_TARGET_${targetCargoEnvVarTarget}_LINKER" = TARGET_CC;
71
+
};
72
+
drvConfig = depsDrvConfig;
73
+
};
74
+
targets."aarch64-unknown-linux-gnu" = let
75
+
targetPkgs = pkgs.pkgsCross.aarch64-multiplatform;
76
+
targetCC = targetPkgs.stdenv.cc;
77
+
targetCargoEnvVarTarget = targetPkgs.stdenv.hostPlatform.rust.cargoEnvVarTarget;
78
+
in rec {
79
+
depsDrvConfig.mkDerivation = {
80
+
nativeBuildInputs = [targetCC];
81
+
};
82
+
depsDrvConfig.env = rec {
83
+
TARGET_CC = "${targetCC.targetPrefix}cc";
84
+
"CARGO_TARGET_${targetCargoEnvVarTarget}_LINKER" = TARGET_CC;
85
+
};
86
+
drvConfig = depsDrvConfig;
87
+
};
88
+
};
89
+
};
90
+
}
+318
flake.lock
+318
flake.lock
···
1
+
{
2
+
"nodes": {
3
+
"crane": {
4
+
"flake": false,
5
+
"locked": {
6
+
"lastModified": 1758758545,
7
+
"narHash": "sha256-NU5WaEdfwF6i8faJ2Yh+jcK9vVFrofLcwlD/mP65JrI=",
8
+
"owner": "ipetkov",
9
+
"repo": "crane",
10
+
"rev": "95d528a5f54eaba0d12102249ce42f4d01f4e364",
11
+
"type": "github"
12
+
},
13
+
"original": {
14
+
"owner": "ipetkov",
15
+
"ref": "v0.21.1",
16
+
"repo": "crane",
17
+
"type": "github"
18
+
}
19
+
},
20
+
"dream2nix": {
21
+
"inputs": {
22
+
"nixpkgs": [
23
+
"nci",
24
+
"nixpkgs"
25
+
],
26
+
"purescript-overlay": "purescript-overlay",
27
+
"pyproject-nix": "pyproject-nix"
28
+
},
29
+
"locked": {
30
+
"lastModified": 1754978539,
31
+
"narHash": "sha256-nrDovydywSKRbWim9Ynmgj8SBm8LK3DI2WuhIqzOHYI=",
32
+
"owner": "nix-community",
33
+
"repo": "dream2nix",
34
+
"rev": "fbec3263cb4895ac86ee9506cdc4e6919a1a2214",
35
+
"type": "github"
36
+
},
37
+
"original": {
38
+
"owner": "nix-community",
39
+
"repo": "dream2nix",
40
+
"type": "github"
41
+
}
42
+
},
43
+
"fenix": {
44
+
"inputs": {
45
+
"nixpkgs": [
46
+
"nixpkgs"
47
+
],
48
+
"rust-analyzer-src": "rust-analyzer-src"
49
+
},
50
+
"locked": {
51
+
"lastModified": 1762584108,
52
+
"narHash": "sha256-wZUW7dlXMXaRdvNbaADqhF8gg9bAfFiMV+iyFQiDv+Y=",
53
+
"owner": "nix-community",
54
+
"repo": "fenix",
55
+
"rev": "32f3ad3b6c690061173e1ac16708874975ec6056",
56
+
"type": "github"
57
+
},
58
+
"original": {
59
+
"owner": "nix-community",
60
+
"repo": "fenix",
61
+
"type": "github"
62
+
}
63
+
},
64
+
"flake-compat": {
65
+
"flake": false,
66
+
"locked": {
67
+
"lastModified": 1696426674,
68
+
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
69
+
"owner": "edolstra",
70
+
"repo": "flake-compat",
71
+
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
72
+
"type": "github"
73
+
},
74
+
"original": {
75
+
"owner": "edolstra",
76
+
"repo": "flake-compat",
77
+
"type": "github"
78
+
}
79
+
},
80
+
"mk-naked-shell": {
81
+
"flake": false,
82
+
"locked": {
83
+
"lastModified": 1681286841,
84
+
"narHash": "sha256-3XlJrwlR0nBiREnuogoa5i1b4+w/XPe0z8bbrJASw0g=",
85
+
"owner": "90-008",
86
+
"repo": "mk-naked-shell",
87
+
"rev": "7612f828dd6f22b7fb332cc69440e839d7ffe6bd",
88
+
"type": "github"
89
+
},
90
+
"original": {
91
+
"owner": "90-008",
92
+
"repo": "mk-naked-shell",
93
+
"type": "github"
94
+
}
95
+
},
96
+
"nci": {
97
+
"inputs": {
98
+
"crane": "crane",
99
+
"dream2nix": "dream2nix",
100
+
"mk-naked-shell": "mk-naked-shell",
101
+
"nixpkgs": [
102
+
"nixpkgs"
103
+
],
104
+
"parts": "parts",
105
+
"rust-overlay": "rust-overlay",
106
+
"treefmt": "treefmt"
107
+
},
108
+
"locked": {
109
+
"lastModified": 1762582646,
110
+
"narHash": "sha256-MMzE4xccG+8qbLhdaZoeFDUKWUOn3B4lhp5dZmgukmM=",
111
+
"owner": "90-008",
112
+
"repo": "nix-cargo-integration",
113
+
"rev": "0993c449377049fa8868a664e8290ac6658e0b9a",
114
+
"type": "github"
115
+
},
116
+
"original": {
117
+
"owner": "90-008",
118
+
"repo": "nix-cargo-integration",
119
+
"type": "github"
120
+
}
121
+
},
122
+
"nixpkgs": {
123
+
"locked": {
124
+
"lastModified": 1762361079,
125
+
"narHash": "sha256-lz718rr1BDpZBYk7+G8cE6wee3PiBUpn8aomG/vLLiY=",
126
+
"owner": "nixos",
127
+
"repo": "nixpkgs",
128
+
"rev": "ffcdcf99d65c61956d882df249a9be53e5902ea5",
129
+
"type": "github"
130
+
},
131
+
"original": {
132
+
"owner": "nixos",
133
+
"ref": "nixpkgs-unstable",
134
+
"repo": "nixpkgs",
135
+
"type": "github"
136
+
}
137
+
},
138
+
"parts": {
139
+
"inputs": {
140
+
"nixpkgs-lib": [
141
+
"nci",
142
+
"nixpkgs"
143
+
]
144
+
},
145
+
"locked": {
146
+
"lastModified": 1762440070,
147
+
"narHash": "sha256-xxdepIcb39UJ94+YydGP221rjnpkDZUlykKuF54PsqI=",
148
+
"owner": "hercules-ci",
149
+
"repo": "flake-parts",
150
+
"rev": "26d05891e14c88eb4a5d5bee659c0db5afb609d8",
151
+
"type": "github"
152
+
},
153
+
"original": {
154
+
"owner": "hercules-ci",
155
+
"repo": "flake-parts",
156
+
"type": "github"
157
+
}
158
+
},
159
+
"parts_2": {
160
+
"inputs": {
161
+
"nixpkgs-lib": [
162
+
"nixpkgs"
163
+
]
164
+
},
165
+
"locked": {
166
+
"lastModified": 1762440070,
167
+
"narHash": "sha256-xxdepIcb39UJ94+YydGP221rjnpkDZUlykKuF54PsqI=",
168
+
"owner": "hercules-ci",
169
+
"repo": "flake-parts",
170
+
"rev": "26d05891e14c88eb4a5d5bee659c0db5afb609d8",
171
+
"type": "github"
172
+
},
173
+
"original": {
174
+
"owner": "hercules-ci",
175
+
"repo": "flake-parts",
176
+
"type": "github"
177
+
}
178
+
},
179
+
"purescript-overlay": {
180
+
"inputs": {
181
+
"flake-compat": "flake-compat",
182
+
"nixpkgs": [
183
+
"nci",
184
+
"dream2nix",
185
+
"nixpkgs"
186
+
],
187
+
"slimlock": "slimlock"
188
+
},
189
+
"locked": {
190
+
"lastModified": 1728546539,
191
+
"narHash": "sha256-Sws7w0tlnjD+Bjck1nv29NjC5DbL6nH5auL9Ex9Iz2A=",
192
+
"owner": "thomashoneyman",
193
+
"repo": "purescript-overlay",
194
+
"rev": "4ad4c15d07bd899d7346b331f377606631eb0ee4",
195
+
"type": "github"
196
+
},
197
+
"original": {
198
+
"owner": "thomashoneyman",
199
+
"repo": "purescript-overlay",
200
+
"type": "github"
201
+
}
202
+
},
203
+
"pyproject-nix": {
204
+
"inputs": {
205
+
"nixpkgs": [
206
+
"nci",
207
+
"dream2nix",
208
+
"nixpkgs"
209
+
]
210
+
},
211
+
"locked": {
212
+
"lastModified": 1752481895,
213
+
"narHash": "sha256-luVj97hIMpCbwhx3hWiRwjP2YvljWy8FM+4W9njDhLA=",
214
+
"owner": "pyproject-nix",
215
+
"repo": "pyproject.nix",
216
+
"rev": "16ee295c25107a94e59a7fc7f2e5322851781162",
217
+
"type": "github"
218
+
},
219
+
"original": {
220
+
"owner": "pyproject-nix",
221
+
"repo": "pyproject.nix",
222
+
"type": "github"
223
+
}
224
+
},
225
+
"root": {
226
+
"inputs": {
227
+
"fenix": "fenix",
228
+
"nci": "nci",
229
+
"nixpkgs": "nixpkgs",
230
+
"parts": "parts_2"
231
+
}
232
+
},
233
+
"rust-analyzer-src": {
234
+
"flake": false,
235
+
"locked": {
236
+
"lastModified": 1762438844,
237
+
"narHash": "sha256-ApIKJf6CcMsV2nYBXhGF95BmZMO/QXPhgfSnkA/rVUo=",
238
+
"owner": "rust-lang",
239
+
"repo": "rust-analyzer",
240
+
"rev": "4bf516ee5a960c1e2eee9fedd9b1c9e976a19c86",
241
+
"type": "github"
242
+
},
243
+
"original": {
244
+
"owner": "rust-lang",
245
+
"ref": "nightly",
246
+
"repo": "rust-analyzer",
247
+
"type": "github"
248
+
}
249
+
},
250
+
"rust-overlay": {
251
+
"inputs": {
252
+
"nixpkgs": [
253
+
"nci",
254
+
"nixpkgs"
255
+
]
256
+
},
257
+
"locked": {
258
+
"lastModified": 1762569282,
259
+
"narHash": "sha256-vINZAJpXQTZd5cfh06Rcw7hesH7sGSvi+Tn+HUieJn8=",
260
+
"owner": "oxalica",
261
+
"repo": "rust-overlay",
262
+
"rev": "a35a6144b976f70827c2fe2f5c89d16d8f9179d8",
263
+
"type": "github"
264
+
},
265
+
"original": {
266
+
"owner": "oxalica",
267
+
"repo": "rust-overlay",
268
+
"type": "github"
269
+
}
270
+
},
271
+
"slimlock": {
272
+
"inputs": {
273
+
"nixpkgs": [
274
+
"nci",
275
+
"dream2nix",
276
+
"purescript-overlay",
277
+
"nixpkgs"
278
+
]
279
+
},
280
+
"locked": {
281
+
"lastModified": 1688756706,
282
+
"narHash": "sha256-xzkkMv3neJJJ89zo3o2ojp7nFeaZc2G0fYwNXNJRFlo=",
283
+
"owner": "thomashoneyman",
284
+
"repo": "slimlock",
285
+
"rev": "cf72723f59e2340d24881fd7bf61cb113b4c407c",
286
+
"type": "github"
287
+
},
288
+
"original": {
289
+
"owner": "thomashoneyman",
290
+
"repo": "slimlock",
291
+
"type": "github"
292
+
}
293
+
},
294
+
"treefmt": {
295
+
"inputs": {
296
+
"nixpkgs": [
297
+
"nci",
298
+
"nixpkgs"
299
+
]
300
+
},
301
+
"locked": {
302
+
"lastModified": 1762410071,
303
+
"narHash": "sha256-aF5fvoZeoXNPxT0bejFUBXeUjXfHLSL7g+mjR/p5TEg=",
304
+
"owner": "numtide",
305
+
"repo": "treefmt-nix",
306
+
"rev": "97a30861b13c3731a84e09405414398fbf3e109f",
307
+
"type": "github"
308
+
},
309
+
"original": {
310
+
"owner": "numtide",
311
+
"repo": "treefmt-nix",
312
+
"type": "github"
313
+
}
314
+
}
315
+
},
316
+
"root": "root",
317
+
"version": 7
318
+
}
+59
flake.nix
+59
flake.nix
···
1
+
{
2
+
inputs.nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable";
3
+
inputs.nci.url = "github:90-008/nix-cargo-integration";
4
+
inputs.nci.inputs.nixpkgs.follows = "nixpkgs";
5
+
inputs.parts.url = "github:hercules-ci/flake-parts";
6
+
inputs.parts.inputs.nixpkgs-lib.follows = "nixpkgs";
7
+
inputs.fenix = {
8
+
url = "github:nix-community/fenix";
9
+
inputs.nixpkgs.follows = "nixpkgs";
10
+
};
11
+
12
+
outputs = inputs @ {
13
+
parts,
14
+
nci,
15
+
...
16
+
}:
17
+
parts.lib.mkFlake {inherit inputs;} {
18
+
systems = ["x86_64-linux" "aarch64-darwin"];
19
+
imports = [
20
+
nci.flakeModule
21
+
./crates.nix
22
+
];
23
+
perSystem = {
24
+
pkgs,
25
+
config,
26
+
...
27
+
}: let
28
+
crateOutputs = config.nci.outputs."wisp-cli";
29
+
mkRenamedPackage = name: pkg: isWindows: pkgs.runCommand name {} ''
30
+
mkdir -p $out/bin
31
+
if [ -f ${pkg}/bin/wisp-cli.exe ]; then
32
+
cp ${pkg}/bin/wisp-cli.exe $out/bin/${name}
33
+
elif [ -f ${pkg}/bin/wisp-cli ]; then
34
+
cp ${pkg}/bin/wisp-cli $out/bin/${name}
35
+
else
36
+
echo "Error: Could not find wisp-cli binary in ${pkg}/bin/"
37
+
ls -la ${pkg}/bin/ || true
38
+
exit 1
39
+
fi
40
+
'';
41
+
in {
42
+
devShells.default = crateOutputs.devShell;
43
+
packages.default = crateOutputs.packages.release;
44
+
packages.wisp-cli-x86_64-linux = mkRenamedPackage "wisp-cli-x86_64-linux" crateOutputs.packages.release false;
45
+
packages.wisp-cli-aarch64-linux = mkRenamedPackage "wisp-cli-aarch64-linux" crateOutputs.allTargets."aarch64-unknown-linux-gnu".packages.release false;
46
+
packages.wisp-cli-x86_64-windows = mkRenamedPackage "wisp-cli-x86_64-windows.exe" crateOutputs.allTargets."x86_64-pc-windows-gnu".packages.release true;
47
+
packages.wisp-cli-aarch64-darwin = mkRenamedPackage "wisp-cli-aarch64-darwin" crateOutputs.allTargets."aarch64-apple-darwin".packages.release false;
48
+
packages.all = pkgs.symlinkJoin {
49
+
name = "wisp-cli-all";
50
+
paths = [
51
+
config.packages.wisp-cli-x86_64-linux
52
+
config.packages.wisp-cli-aarch64-linux
53
+
config.packages.wisp-cli-x86_64-windows
54
+
config.packages.wisp-cli-aarch64-darwin
55
+
];
56
+
};
57
+
};
58
+
};
59
+
}
-123
hosting-service/EXAMPLE.md
-123
hosting-service/EXAMPLE.md
···
1
-
# HTML Path Rewriting Example
2
-
3
-
This document demonstrates how HTML path rewriting works when serving sites via the `/s/:identifier/:site/*` route.
4
-
5
-
## Problem
6
-
7
-
When you create a static site with absolute paths like `/style.css` or `/images/logo.png`, these paths work fine when served from the root domain. However, when served from a subdirectory like `/s/alice.bsky.social/mysite/`, these absolute paths break because they resolve to the server root instead of the site root.
8
-
9
-
## Solution
10
-
11
-
The hosting service automatically rewrites absolute paths in HTML files to work correctly in the subdirectory context.
12
-
13
-
## Example
14
-
15
-
**Original HTML file (index.html):**
16
-
```html
17
-
<!DOCTYPE html>
18
-
<html>
19
-
<head>
20
-
<meta charset="UTF-8">
21
-
<title>My Site</title>
22
-
<link rel="stylesheet" href="/style.css">
23
-
<link rel="icon" href="/favicon.ico">
24
-
<script src="/app.js"></script>
25
-
</head>
26
-
<body>
27
-
<header>
28
-
<img src="/images/logo.png" alt="Logo">
29
-
<nav>
30
-
<a href="/">Home</a>
31
-
<a href="/about">About</a>
32
-
<a href="/contact">Contact</a>
33
-
</nav>
34
-
</header>
35
-
36
-
<main>
37
-
<h1>Welcome</h1>
38
-
<img src="/images/hero.jpg"
39
-
srcset="/images/hero.jpg 1x, /images/hero@2x.jpg 2x"
40
-
alt="Hero">
41
-
42
-
<form action="/submit" method="post">
43
-
<input type="text" name="email">
44
-
<button>Submit</button>
45
-
</form>
46
-
</main>
47
-
48
-
<footer>
49
-
<a href="https://example.com">External Link</a>
50
-
<a href="#top">Back to Top</a>
51
-
</footer>
52
-
</body>
53
-
</html>
54
-
```
55
-
56
-
**When accessed via `/s/alice.bsky.social/mysite/`, the HTML is rewritten to:**
57
-
```html
58
-
<!DOCTYPE html>
59
-
<html>
60
-
<head>
61
-
<meta charset="UTF-8">
62
-
<title>My Site</title>
63
-
<link rel="stylesheet" href="/s/alice.bsky.social/mysite/style.css">
64
-
<link rel="icon" href="/s/alice.bsky.social/mysite/favicon.ico">
65
-
<script src="/s/alice.bsky.social/mysite/app.js"></script>
66
-
</head>
67
-
<body>
68
-
<header>
69
-
<img src="/s/alice.bsky.social/mysite/images/logo.png" alt="Logo">
70
-
<nav>
71
-
<a href="/s/alice.bsky.social/mysite/">Home</a>
72
-
<a href="/s/alice.bsky.social/mysite/about">About</a>
73
-
<a href="/s/alice.bsky.social/mysite/contact">Contact</a>
74
-
</nav>
75
-
</header>
76
-
77
-
<main>
78
-
<h1>Welcome</h1>
79
-
<img src="/s/alice.bsky.social/mysite/images/hero.jpg"
80
-
srcset="/s/alice.bsky.social/mysite/images/hero.jpg 1x, /s/alice.bsky.social/mysite/images/hero@2x.jpg 2x"
81
-
alt="Hero">
82
-
83
-
<form action="/s/alice.bsky.social/mysite/submit" method="post">
84
-
<input type="text" name="email">
85
-
<button>Submit</button>
86
-
</form>
87
-
</main>
88
-
89
-
<footer>
90
-
<a href="https://example.com">External Link</a>
91
-
<a href="#top">Back to Top</a>
92
-
</footer>
93
-
</body>
94
-
</html>
95
-
```
96
-
97
-
## What's Preserved
98
-
99
-
Notice that:
100
-
- โ
Absolute paths are rewritten: `/style.css` โ `/s/alice.bsky.social/mysite/style.css`
101
-
- โ
External URLs are preserved: `https://example.com` stays the same
102
-
- โ
Anchors are preserved: `#top` stays the same
103
-
- โ
The rewriting is safe and won't break your site
104
-
105
-
## Supported Attributes
106
-
107
-
The rewriter handles these HTML attributes:
108
-
- `src` - images, scripts, iframes, videos, audio
109
-
- `href` - links, stylesheets
110
-
- `action` - forms
111
-
- `data` - objects
112
-
- `poster` - video posters
113
-
- `srcset` - responsive images
114
-
115
-
## Testing Your Site
116
-
117
-
To test if your site works with path rewriting:
118
-
119
-
1. Upload your site to your PDS as a `place.wisp.fs` record
120
-
2. Access it via: `https://hosting.wisp.place/s/YOUR_HANDLE/SITE_NAME/`
121
-
3. Check that all resources load correctly
122
-
123
-
If you're using relative paths already (like `./style.css` or `../images/logo.png`), they'll work without any rewriting.
+134
hosting-service/example-_redirects
+134
hosting-service/example-_redirects
···
1
+
# Example _redirects file for Wisp hosting
2
+
# Place this file in the root directory of your site as "_redirects"
3
+
# Lines starting with # are comments
4
+
5
+
# ===================================
6
+
# SIMPLE REDIRECTS
7
+
# ===================================
8
+
9
+
# Redirect home page
10
+
# /home /
11
+
12
+
# Redirect old URLs to new ones
13
+
# /old-blog /blog
14
+
# /about-us /about
15
+
16
+
# ===================================
17
+
# SPLAT REDIRECTS (WILDCARDS)
18
+
# ===================================
19
+
20
+
# Redirect entire directories
21
+
# /news/* /blog/:splat
22
+
# /old-site/* /new-site/:splat
23
+
24
+
# ===================================
25
+
# PLACEHOLDER REDIRECTS
26
+
# ===================================
27
+
28
+
# Restructure blog URLs
29
+
# /blog/:year/:month/:day/:slug /posts/:year-:month-:day/:slug
30
+
31
+
# Capture multiple parameters
32
+
# /products/:category/:id /shop/:category/item/:id
33
+
34
+
# ===================================
35
+
# STATUS CODES
36
+
# ===================================
37
+
38
+
# Permanent redirect (301) - default if not specified
39
+
# /permanent-move /new-location 301
40
+
41
+
# Temporary redirect (302)
42
+
# /temp-redirect /temp-location 302
43
+
44
+
# Rewrite (200) - serves different content, URL stays the same
45
+
# /api/* /functions/:splat 200
46
+
47
+
# Custom 404 page
48
+
# /shop/* /shop-closed.html 404
49
+
50
+
# ===================================
51
+
# FORCE REDIRECTS
52
+
# ===================================
53
+
54
+
# Force redirect even if file exists (note the ! after status code)
55
+
# /override-file /other-file.html 200!
56
+
57
+
# ===================================
58
+
# CONDITIONAL REDIRECTS
59
+
# ===================================
60
+
61
+
# Country-based redirects (ISO 3166-1 alpha-2 codes)
62
+
# / /us/ 302 Country=us
63
+
# / /uk/ 302 Country=gb
64
+
# / /anz/ 302 Country=au,nz
65
+
66
+
# Language-based redirects
67
+
# /products /en/products 301 Language=en
68
+
# /products /de/products 301 Language=de
69
+
# /products /fr/products 301 Language=fr
70
+
71
+
# Cookie-based redirects (checks if cookie exists)
72
+
# /* /legacy/:splat 200 Cookie=is_legacy
73
+
74
+
# ===================================
75
+
# QUERY PARAMETERS
76
+
# ===================================
77
+
78
+
# Match specific query parameters
79
+
# /store id=:id /blog/:id 301
80
+
81
+
# Multiple parameters
82
+
# /search q=:query category=:cat /find/:cat/:query 301
83
+
84
+
# ===================================
85
+
# DOMAIN-LEVEL REDIRECTS
86
+
# ===================================
87
+
88
+
# Redirect to different domain (must include protocol)
89
+
# /external https://example.com/path
90
+
91
+
# Redirect entire subdomain
92
+
# http://blog.example.com/* https://example.com/blog/:splat 301!
93
+
# https://blog.example.com/* https://example.com/blog/:splat 301!
94
+
95
+
# ===================================
96
+
# COMMON PATTERNS
97
+
# ===================================
98
+
99
+
# Remove .html extensions
100
+
# /page.html /page
101
+
102
+
# Add trailing slash
103
+
# /about /about/
104
+
105
+
# Single-page app fallback (serve index.html for all paths)
106
+
# /* /index.html 200
107
+
108
+
# API proxy
109
+
# /api/* https://api.example.com/:splat 200
110
+
111
+
# ===================================
112
+
# CUSTOM ERROR PAGES
113
+
# ===================================
114
+
115
+
# Language-specific 404 pages
116
+
# /en/* /en/404.html 404
117
+
# /de/* /de/404.html 404
118
+
119
+
# Section-specific 404 pages
120
+
# /shop/* /shop/not-found.html 404
121
+
# /blog/* /blog/404.html 404
122
+
123
+
# ===================================
124
+
# NOTES
125
+
# ===================================
126
+
#
127
+
# - Rules are processed in order (first match wins)
128
+
# - More specific rules should come before general ones
129
+
# - Splats (*) can only be used at the end of a path
130
+
# - Query parameters are automatically preserved for 200, 301, 302
131
+
# - Trailing slashes are normalized (/ and no / are treated the same)
132
+
# - Default status code is 301 if not specified
133
+
#
134
+
+11
-1
hosting-service/src/index.ts
+11
-1
hosting-service/src/index.ts
···
4
4
import { logger } from './lib/observability';
5
5
import { mkdirSync, existsSync } from 'fs';
6
6
import { backfillCache } from './lib/backfill';
7
-
import { startDomainCacheCleanup, stopDomainCacheCleanup } from './lib/db';
7
+
import { startDomainCacheCleanup, stopDomainCacheCleanup, setCacheOnlyMode } from './lib/db';
8
8
9
9
const PORT = process.env.PORT ? parseInt(process.env.PORT) : 3001;
10
10
const CACHE_DIR = process.env.CACHE_DIR || './cache/sites';
···
13
13
const args = process.argv.slice(2);
14
14
const hasBackfillFlag = args.includes('--backfill');
15
15
const backfillOnStartup = hasBackfillFlag || process.env.BACKFILL_ON_STARTUP === 'true';
16
+
17
+
// Cache-only mode: service will only cache files locally, no DB writes
18
+
const hasCacheOnlyFlag = args.includes('--cache-only');
19
+
export const CACHE_ONLY_MODE = hasCacheOnlyFlag || process.env.CACHE_ONLY_MODE === 'true';
20
+
21
+
// Configure cache-only mode in database module
22
+
if (CACHE_ONLY_MODE) {
23
+
setCacheOnlyMode(true);
24
+
}
16
25
17
26
// Ensure cache directory exists
18
27
if (!existsSync(CACHE_DIR)) {
···
65
74
Health: http://localhost:${PORT}/health
66
75
Cache: ${CACHE_DIR}
67
76
Firehose: Connected to Firehose
77
+
Cache-Only: ${CACHE_ONLY_MODE ? 'ENABLED (no DB writes)' : 'DISABLED'}
68
78
`);
69
79
70
80
// Graceful shutdown
+16
hosting-service/src/lib/db.ts
+16
hosting-service/src/lib/db.ts
···
1
1
import postgres from 'postgres';
2
2
import { createHash } from 'crypto';
3
3
4
+
// Global cache-only mode flag (set by index.ts)
5
+
let cacheOnlyMode = false;
6
+
7
+
export function setCacheOnlyMode(enabled: boolean) {
8
+
cacheOnlyMode = enabled;
9
+
if (enabled) {
10
+
console.log('[DB] Cache-only mode enabled - database writes will be skipped');
11
+
}
12
+
}
13
+
4
14
const sql = postgres(
5
15
process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/wisp',
6
16
{
···
130
140
}
131
141
132
142
export async function upsertSite(did: string, rkey: string, displayName?: string) {
143
+
// Skip database writes in cache-only mode
144
+
if (cacheOnlyMode) {
145
+
console.log('[DB] Skipping upsertSite (cache-only mode)', { did, rkey });
146
+
return;
147
+
}
148
+
133
149
try {
134
150
// Only set display_name if provided (not undefined/null/empty)
135
151
const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null;
+2
hosting-service/src/lib/firehose.ts
+2
hosting-service/src/lib/firehose.ts
···
197
197
)
198
198
199
199
// Acquire distributed lock only for database write to prevent duplicate writes
200
+
// Note: upsertSite will check cache-only mode internally and skip if needed
200
201
const lockKey = `db:upsert:${did}:${site}`
201
202
const lockAcquired = await tryAcquireLock(lockKey)
202
203
···
214
215
215
216
try {
216
217
// Upsert site to database (only one instance does this)
218
+
// In cache-only mode, this will be a no-op
217
219
await upsertSite(did, site, fsRecord.site)
218
220
this.log(
219
221
'Successfully processed create/update (cached + DB updated)',
+215
hosting-service/src/lib/redirects.test.ts
+215
hosting-service/src/lib/redirects.test.ts
···
1
+
import { describe, it, expect } from 'bun:test'
2
+
import { parseRedirectsFile, matchRedirectRule } from './redirects';
3
+
4
+
describe('parseRedirectsFile', () => {
5
+
it('should parse simple redirects', () => {
6
+
const content = `
7
+
# Comment line
8
+
/old-path /new-path
9
+
/home / 301
10
+
`;
11
+
const rules = parseRedirectsFile(content);
12
+
expect(rules).toHaveLength(2);
13
+
expect(rules[0]).toMatchObject({
14
+
from: '/old-path',
15
+
to: '/new-path',
16
+
status: 301,
17
+
force: false,
18
+
});
19
+
expect(rules[1]).toMatchObject({
20
+
from: '/home',
21
+
to: '/',
22
+
status: 301,
23
+
force: false,
24
+
});
25
+
});
26
+
27
+
it('should parse redirects with different status codes', () => {
28
+
const content = `
29
+
/temp-redirect /target 302
30
+
/rewrite /content 200
31
+
/not-found /404 404
32
+
`;
33
+
const rules = parseRedirectsFile(content);
34
+
expect(rules).toHaveLength(3);
35
+
expect(rules[0]?.status).toBe(302);
36
+
expect(rules[1]?.status).toBe(200);
37
+
expect(rules[2]?.status).toBe(404);
38
+
});
39
+
40
+
it('should parse force redirects', () => {
41
+
const content = `/force-path /target 301!`;
42
+
const rules = parseRedirectsFile(content);
43
+
expect(rules[0]?.force).toBe(true);
44
+
expect(rules[0]?.status).toBe(301);
45
+
});
46
+
47
+
it('should parse splat redirects', () => {
48
+
const content = `/news/* /blog/:splat`;
49
+
const rules = parseRedirectsFile(content);
50
+
expect(rules[0]?.from).toBe('/news/*');
51
+
expect(rules[0]?.to).toBe('/blog/:splat');
52
+
});
53
+
54
+
it('should parse placeholder redirects', () => {
55
+
const content = `/blog/:year/:month/:day /posts/:year-:month-:day`;
56
+
const rules = parseRedirectsFile(content);
57
+
expect(rules[0]?.from).toBe('/blog/:year/:month/:day');
58
+
expect(rules[0]?.to).toBe('/posts/:year-:month-:day');
59
+
});
60
+
61
+
it('should parse country-based redirects', () => {
62
+
const content = `/ /anz 302 Country=au,nz`;
63
+
const rules = parseRedirectsFile(content);
64
+
expect(rules[0]?.conditions?.country).toEqual(['au', 'nz']);
65
+
});
66
+
67
+
it('should parse language-based redirects', () => {
68
+
const content = `/products /en/products 301 Language=en`;
69
+
const rules = parseRedirectsFile(content);
70
+
expect(rules[0]?.conditions?.language).toEqual(['en']);
71
+
});
72
+
73
+
it('should parse cookie-based redirects', () => {
74
+
const content = `/* /legacy/:splat 200 Cookie=is_legacy,my_cookie`;
75
+
const rules = parseRedirectsFile(content);
76
+
expect(rules[0]?.conditions?.cookie).toEqual(['is_legacy', 'my_cookie']);
77
+
});
78
+
});
79
+
80
+
describe('matchRedirectRule', () => {
81
+
it('should match exact paths', () => {
82
+
const rules = parseRedirectsFile('/old-path /new-path');
83
+
const match = matchRedirectRule('/old-path', rules);
84
+
expect(match).toBeTruthy();
85
+
expect(match?.targetPath).toBe('/new-path');
86
+
expect(match?.status).toBe(301);
87
+
});
88
+
89
+
it('should match paths with trailing slash', () => {
90
+
const rules = parseRedirectsFile('/old-path /new-path');
91
+
const match = matchRedirectRule('/old-path/', rules);
92
+
expect(match).toBeTruthy();
93
+
expect(match?.targetPath).toBe('/new-path');
94
+
});
95
+
96
+
it('should match splat patterns', () => {
97
+
const rules = parseRedirectsFile('/news/* /blog/:splat');
98
+
const match = matchRedirectRule('/news/2024/01/15/my-post', rules);
99
+
expect(match).toBeTruthy();
100
+
expect(match?.targetPath).toBe('/blog/2024/01/15/my-post');
101
+
});
102
+
103
+
it('should match placeholder patterns', () => {
104
+
const rules = parseRedirectsFile('/blog/:year/:month/:day /posts/:year-:month-:day');
105
+
const match = matchRedirectRule('/blog/2024/01/15', rules);
106
+
expect(match).toBeTruthy();
107
+
expect(match?.targetPath).toBe('/posts/2024-01-15');
108
+
});
109
+
110
+
it('should preserve query strings for 301/302 redirects', () => {
111
+
const rules = parseRedirectsFile('/old /new 301');
112
+
const match = matchRedirectRule('/old', rules, {
113
+
queryParams: { foo: 'bar', baz: 'qux' },
114
+
});
115
+
expect(match?.targetPath).toContain('?');
116
+
expect(match?.targetPath).toContain('foo=bar');
117
+
expect(match?.targetPath).toContain('baz=qux');
118
+
});
119
+
120
+
it('should match based on query parameters', () => {
121
+
const rules = parseRedirectsFile('/store id=:id /blog/:id 301');
122
+
const match = matchRedirectRule('/store', rules, {
123
+
queryParams: { id: 'my-post' },
124
+
});
125
+
expect(match).toBeTruthy();
126
+
expect(match?.targetPath).toContain('/blog/my-post');
127
+
});
128
+
129
+
it('should not match when query params are missing', () => {
130
+
const rules = parseRedirectsFile('/store id=:id /blog/:id 301');
131
+
const match = matchRedirectRule('/store', rules, {
132
+
queryParams: {},
133
+
});
134
+
expect(match).toBeNull();
135
+
});
136
+
137
+
it('should match based on country header', () => {
138
+
const rules = parseRedirectsFile('/ /aus 302 Country=au');
139
+
const match = matchRedirectRule('/', rules, {
140
+
headers: { 'cf-ipcountry': 'AU' },
141
+
});
142
+
expect(match).toBeTruthy();
143
+
expect(match?.targetPath).toBe('/aus');
144
+
});
145
+
146
+
it('should not match wrong country', () => {
147
+
const rules = parseRedirectsFile('/ /aus 302 Country=au');
148
+
const match = matchRedirectRule('/', rules, {
149
+
headers: { 'cf-ipcountry': 'US' },
150
+
});
151
+
expect(match).toBeNull();
152
+
});
153
+
154
+
it('should match based on language header', () => {
155
+
const rules = parseRedirectsFile('/products /en/products 301 Language=en');
156
+
const match = matchRedirectRule('/products', rules, {
157
+
headers: { 'accept-language': 'en-US,en;q=0.9' },
158
+
});
159
+
expect(match).toBeTruthy();
160
+
expect(match?.targetPath).toBe('/en/products');
161
+
});
162
+
163
+
it('should match based on cookie presence', () => {
164
+
const rules = parseRedirectsFile('/* /legacy/:splat 200 Cookie=is_legacy');
165
+
const match = matchRedirectRule('/some-path', rules, {
166
+
cookies: { is_legacy: 'true' },
167
+
});
168
+
expect(match).toBeTruthy();
169
+
expect(match?.targetPath).toBe('/legacy/some-path');
170
+
});
171
+
172
+
it('should return first matching rule', () => {
173
+
const content = `
174
+
/path /first
175
+
/path /second
176
+
`;
177
+
const rules = parseRedirectsFile(content);
178
+
const match = matchRedirectRule('/path', rules);
179
+
expect(match?.targetPath).toBe('/first');
180
+
});
181
+
182
+
it('should match more specific rules before general ones', () => {
183
+
const content = `
184
+
/jobs/customer-ninja /careers/support
185
+
/jobs/* /careers/:splat
186
+
`;
187
+
const rules = parseRedirectsFile(content);
188
+
189
+
const match1 = matchRedirectRule('/jobs/customer-ninja', rules);
190
+
expect(match1?.targetPath).toBe('/careers/support');
191
+
192
+
const match2 = matchRedirectRule('/jobs/developer', rules);
193
+
expect(match2?.targetPath).toBe('/careers/developer');
194
+
});
195
+
196
+
it('should handle SPA routing pattern', () => {
197
+
const rules = parseRedirectsFile('/* /index.html 200');
198
+
199
+
// Should match any path
200
+
const match1 = matchRedirectRule('/about', rules);
201
+
expect(match1).toBeTruthy();
202
+
expect(match1?.targetPath).toBe('/index.html');
203
+
expect(match1?.status).toBe(200);
204
+
205
+
const match2 = matchRedirectRule('/users/123/profile', rules);
206
+
expect(match2).toBeTruthy();
207
+
expect(match2?.targetPath).toBe('/index.html');
208
+
expect(match2?.status).toBe(200);
209
+
210
+
const match3 = matchRedirectRule('/', rules);
211
+
expect(match3).toBeTruthy();
212
+
expect(match3?.targetPath).toBe('/index.html');
213
+
});
214
+
});
215
+
+413
hosting-service/src/lib/redirects.ts
+413
hosting-service/src/lib/redirects.ts
···
1
+
import { readFile } from 'fs/promises';
2
+
import { existsSync } from 'fs';
3
+
4
+
export interface RedirectRule {
5
+
from: string;
6
+
to: string;
7
+
status: number;
8
+
force: boolean;
9
+
conditions?: {
10
+
country?: string[];
11
+
language?: string[];
12
+
role?: string[];
13
+
cookie?: string[];
14
+
};
15
+
// For pattern matching
16
+
fromPattern?: RegExp;
17
+
fromParams?: string[]; // Named parameters from the pattern
18
+
queryParams?: Record<string, string>; // Expected query parameters
19
+
}
20
+
21
+
export interface RedirectMatch {
22
+
rule: RedirectRule;
23
+
targetPath: string;
24
+
status: number;
25
+
}
26
+
27
+
/**
28
+
* Parse a _redirects file into an array of redirect rules
29
+
*/
30
+
export function parseRedirectsFile(content: string): RedirectRule[] {
31
+
const lines = content.split('\n');
32
+
const rules: RedirectRule[] = [];
33
+
34
+
for (let lineNum = 0; lineNum < lines.length; lineNum++) {
35
+
const lineRaw = lines[lineNum];
36
+
if (!lineRaw) continue;
37
+
38
+
const line = lineRaw.trim();
39
+
40
+
// Skip empty lines and comments
41
+
if (!line || line.startsWith('#')) {
42
+
continue;
43
+
}
44
+
45
+
try {
46
+
const rule = parseRedirectLine(line);
47
+
if (rule && rule.fromPattern) {
48
+
rules.push(rule);
49
+
}
50
+
} catch (err) {
51
+
console.warn(`Failed to parse redirect rule on line ${lineNum + 1}: ${line}`, err);
52
+
}
53
+
}
54
+
55
+
return rules;
56
+
}
57
+
58
+
/**
59
+
* Parse a single redirect rule line
60
+
* Format: /from [query_params] /to [status] [conditions]
61
+
*/
62
+
function parseRedirectLine(line: string): RedirectRule | null {
63
+
// Split by whitespace, but respect quoted strings (though not commonly used)
64
+
const parts = line.split(/\s+/);
65
+
66
+
if (parts.length < 2) {
67
+
return null;
68
+
}
69
+
70
+
let idx = 0;
71
+
const from = parts[idx++];
72
+
73
+
if (!from) {
74
+
return null;
75
+
}
76
+
77
+
let status = 301; // Default status
78
+
let force = false;
79
+
const conditions: NonNullable<RedirectRule['conditions']> = {};
80
+
const queryParams: Record<string, string> = {};
81
+
82
+
// Parse query parameters that come before the destination path
83
+
// They look like: key=:value (and don't start with /)
84
+
while (idx < parts.length) {
85
+
const part = parts[idx];
86
+
if (!part) {
87
+
idx++;
88
+
continue;
89
+
}
90
+
91
+
// If it starts with / or http, it's the destination path
92
+
if (part.startsWith('/') || part.startsWith('http://') || part.startsWith('https://')) {
93
+
break;
94
+
}
95
+
96
+
// If it contains = and comes before the destination, it's a query param
97
+
if (part.includes('=')) {
98
+
const splitIndex = part.indexOf('=');
99
+
const key = part.slice(0, splitIndex);
100
+
const value = part.slice(splitIndex + 1);
101
+
102
+
if (key && value) {
103
+
queryParams[key] = value;
104
+
}
105
+
idx++;
106
+
} else {
107
+
// Not a query param, must be destination or something else
108
+
break;
109
+
}
110
+
}
111
+
112
+
// Next part should be the destination
113
+
if (idx >= parts.length) {
114
+
return null;
115
+
}
116
+
117
+
const to = parts[idx++];
118
+
if (!to) {
119
+
return null;
120
+
}
121
+
122
+
// Parse remaining parts for status code and conditions
123
+
for (let i = idx; i < parts.length; i++) {
124
+
const part = parts[i];
125
+
126
+
if (!part) continue;
127
+
128
+
// Check for status code (with optional ! for force)
129
+
if (/^\d+!?$/.test(part)) {
130
+
if (part.endsWith('!')) {
131
+
force = true;
132
+
status = parseInt(part.slice(0, -1));
133
+
} else {
134
+
status = parseInt(part);
135
+
}
136
+
continue;
137
+
}
138
+
139
+
// Check for condition parameters (Country=, Language=, Role=, Cookie=)
140
+
if (part.includes('=')) {
141
+
const splitIndex = part.indexOf('=');
142
+
const key = part.slice(0, splitIndex);
143
+
const value = part.slice(splitIndex + 1);
144
+
145
+
if (!key || !value) continue;
146
+
147
+
const keyLower = key.toLowerCase();
148
+
149
+
if (keyLower === 'country') {
150
+
conditions.country = value.split(',').map(v => v.trim().toLowerCase());
151
+
} else if (keyLower === 'language') {
152
+
conditions.language = value.split(',').map(v => v.trim().toLowerCase());
153
+
} else if (keyLower === 'role') {
154
+
conditions.role = value.split(',').map(v => v.trim());
155
+
} else if (keyLower === 'cookie') {
156
+
conditions.cookie = value.split(',').map(v => v.trim().toLowerCase());
157
+
}
158
+
}
159
+
}
160
+
161
+
// Parse the 'from' pattern
162
+
const { pattern, params } = convertPathToRegex(from);
163
+
164
+
return {
165
+
from,
166
+
to,
167
+
status,
168
+
force,
169
+
conditions: Object.keys(conditions).length > 0 ? conditions : undefined,
170
+
queryParams: Object.keys(queryParams).length > 0 ? queryParams : undefined,
171
+
fromPattern: pattern,
172
+
fromParams: params,
173
+
};
174
+
}
175
+
176
+
/**
177
+
* Convert a path pattern with placeholders and splats to a regex
178
+
* Examples:
179
+
* /blog/:year/:month/:day -> captures year, month, day
180
+
* /news/* -> captures splat
181
+
*/
182
+
function convertPathToRegex(pattern: string): { pattern: RegExp; params: string[] } {
183
+
const params: string[] = [];
184
+
let regexStr = '^';
185
+
186
+
// Split by query string if present
187
+
const pathPart = pattern.split('?')[0] || pattern;
188
+
189
+
// Escape special regex characters except * and :
190
+
let escaped = pathPart.replace(/[.+^${}()|[\]\\]/g, '\\$&');
191
+
192
+
// Replace :param with named capture groups
193
+
escaped = escaped.replace(/:([a-zA-Z_][a-zA-Z0-9_]*)/g, (match, paramName) => {
194
+
params.push(paramName);
195
+
// Match path segment (everything except / and ?)
196
+
return '([^/?]+)';
197
+
});
198
+
199
+
// Replace * with splat capture (matches everything including /)
200
+
if (escaped.includes('*')) {
201
+
escaped = escaped.replace(/\*/g, '(.*)');
202
+
params.push('splat');
203
+
}
204
+
205
+
regexStr += escaped;
206
+
207
+
// Make trailing slash optional
208
+
if (!regexStr.endsWith('.*')) {
209
+
regexStr += '/?';
210
+
}
211
+
212
+
regexStr += '$';
213
+
214
+
return {
215
+
pattern: new RegExp(regexStr),
216
+
params,
217
+
};
218
+
}
219
+
220
+
/**
221
+
* Match a request path against redirect rules
222
+
*/
223
+
export function matchRedirectRule(
224
+
requestPath: string,
225
+
rules: RedirectRule[],
226
+
context?: {
227
+
queryParams?: Record<string, string>;
228
+
headers?: Record<string, string>;
229
+
cookies?: Record<string, string>;
230
+
}
231
+
): RedirectMatch | null {
232
+
// Normalize path: ensure leading slash, remove trailing slash (except for root)
233
+
let normalizedPath = requestPath.startsWith('/') ? requestPath : `/${requestPath}`;
234
+
235
+
for (const rule of rules) {
236
+
// Check query parameter conditions first (if any)
237
+
if (rule.queryParams) {
238
+
// If rule requires query params but none provided, skip this rule
239
+
if (!context?.queryParams) {
240
+
continue;
241
+
}
242
+
243
+
const queryMatches = Object.entries(rule.queryParams).every(([key, value]) => {
244
+
const actualValue = context.queryParams?.[key];
245
+
return actualValue !== undefined;
246
+
});
247
+
248
+
if (!queryMatches) {
249
+
continue;
250
+
}
251
+
}
252
+
253
+
// Check conditional redirects (country, language, role, cookie)
254
+
if (rule.conditions) {
255
+
if (rule.conditions.country && context?.headers) {
256
+
const cfCountry = context.headers['cf-ipcountry'];
257
+
const xCountry = context.headers['x-country'];
258
+
const country = (cfCountry?.toLowerCase() || xCountry?.toLowerCase());
259
+
if (!country || !rule.conditions.country.includes(country)) {
260
+
continue;
261
+
}
262
+
}
263
+
264
+
if (rule.conditions.language && context?.headers) {
265
+
const acceptLang = context.headers['accept-language'];
266
+
if (!acceptLang) {
267
+
continue;
268
+
}
269
+
// Parse accept-language header (simplified)
270
+
const langs = acceptLang.split(',').map(l => {
271
+
const langPart = l.split(';')[0];
272
+
return langPart ? langPart.trim().toLowerCase() : '';
273
+
}).filter(l => l !== '');
274
+
const hasMatch = rule.conditions.language.some(lang =>
275
+
langs.some(l => l === lang || l.startsWith(lang + '-'))
276
+
);
277
+
if (!hasMatch) {
278
+
continue;
279
+
}
280
+
}
281
+
282
+
if (rule.conditions.cookie && context?.cookies) {
283
+
const hasCookie = rule.conditions.cookie.some(cookieName =>
284
+
context.cookies && cookieName in context.cookies
285
+
);
286
+
if (!hasCookie) {
287
+
continue;
288
+
}
289
+
}
290
+
291
+
// Role-based redirects would need JWT verification - skip for now
292
+
if (rule.conditions.role) {
293
+
continue;
294
+
}
295
+
}
296
+
297
+
// Match the path pattern
298
+
const match = rule.fromPattern?.exec(normalizedPath);
299
+
if (!match) {
300
+
continue;
301
+
}
302
+
303
+
// Build the target path by replacing placeholders
304
+
let targetPath = rule.to;
305
+
306
+
// Replace captured parameters
307
+
if (rule.fromParams && match.length > 1) {
308
+
for (let i = 0; i < rule.fromParams.length; i++) {
309
+
const paramName = rule.fromParams[i];
310
+
const paramValue = match[i + 1];
311
+
312
+
if (!paramName || !paramValue) continue;
313
+
314
+
if (paramName === 'splat') {
315
+
targetPath = targetPath.replace(':splat', paramValue);
316
+
} else {
317
+
targetPath = targetPath.replace(`:${paramName}`, paramValue);
318
+
}
319
+
}
320
+
}
321
+
322
+
// Handle query parameter replacements
323
+
if (rule.queryParams && context?.queryParams) {
324
+
for (const [key, placeholder] of Object.entries(rule.queryParams)) {
325
+
const actualValue = context.queryParams[key];
326
+
if (actualValue && placeholder && placeholder.startsWith(':')) {
327
+
const paramName = placeholder.slice(1);
328
+
if (paramName) {
329
+
targetPath = targetPath.replace(`:${paramName}`, actualValue);
330
+
}
331
+
}
332
+
}
333
+
}
334
+
335
+
// Preserve query string for 200, 301, 302 redirects (unless target already has one)
336
+
if ([200, 301, 302].includes(rule.status) && context?.queryParams && !targetPath.includes('?')) {
337
+
const queryString = Object.entries(context.queryParams)
338
+
.map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`)
339
+
.join('&');
340
+
if (queryString) {
341
+
targetPath += `?${queryString}`;
342
+
}
343
+
}
344
+
345
+
return {
346
+
rule,
347
+
targetPath,
348
+
status: rule.status,
349
+
};
350
+
}
351
+
352
+
return null;
353
+
}
354
+
355
+
/**
356
+
* Load redirect rules from a cached site
357
+
*/
358
+
export async function loadRedirectRules(did: string, rkey: string): Promise<RedirectRule[]> {
359
+
const CACHE_DIR = process.env.CACHE_DIR || './cache/sites';
360
+
const redirectsPath = `${CACHE_DIR}/${did}/${rkey}/_redirects`;
361
+
362
+
if (!existsSync(redirectsPath)) {
363
+
return [];
364
+
}
365
+
366
+
try {
367
+
const content = await readFile(redirectsPath, 'utf-8');
368
+
return parseRedirectsFile(content);
369
+
} catch (err) {
370
+
console.error('Failed to load _redirects file', err);
371
+
return [];
372
+
}
373
+
}
374
+
375
+
/**
376
+
* Parse cookies from Cookie header
377
+
*/
378
+
export function parseCookies(cookieHeader?: string): Record<string, string> {
379
+
if (!cookieHeader) return {};
380
+
381
+
const cookies: Record<string, string> = {};
382
+
const parts = cookieHeader.split(';');
383
+
384
+
for (const part of parts) {
385
+
const [key, ...valueParts] = part.split('=');
386
+
if (key && valueParts.length > 0) {
387
+
cookies[key.trim()] = valueParts.join('=').trim();
388
+
}
389
+
}
390
+
391
+
return cookies;
392
+
}
393
+
394
+
/**
395
+
* Parse query string into object
396
+
*/
397
+
export function parseQueryString(url: string): Record<string, string> {
398
+
const queryStart = url.indexOf('?');
399
+
if (queryStart === -1) return {};
400
+
401
+
const queryString = url.slice(queryStart + 1);
402
+
const params: Record<string, string> = {};
403
+
404
+
for (const pair of queryString.split('&')) {
405
+
const [key, value] = pair.split('=');
406
+
if (key) {
407
+
params[decodeURIComponent(key)] = value ? decodeURIComponent(value) : '';
408
+
}
409
+
}
410
+
411
+
return params;
412
+
}
413
+
+168
-6
hosting-service/src/server.ts
+168
-6
hosting-service/src/server.ts
···
7
7
import { lookup } from 'mime-types';
8
8
import { logger, observabilityMiddleware, observabilityErrorHandler, logCollector, errorTracker, metricsCollector } from './lib/observability';
9
9
import { fileCache, metadataCache, rewrittenHtmlCache, getCacheKey, type FileMetadata } from './lib/cache';
10
+
import { loadRedirectRules, matchRedirectRule, parseCookies, parseQueryString, type RedirectRule } from './lib/redirects';
10
11
11
12
const BASE_HOST = process.env.BASE_HOST || 'wisp.place';
12
13
···
35
36
}
36
37
}
37
38
39
+
// Cache for redirect rules (per site)
40
+
const redirectRulesCache = new Map<string, RedirectRule[]>();
41
+
42
+
/**
43
+
* Clear redirect rules cache for a specific site
44
+
* Should be called when a site is updated/recached
45
+
*/
46
+
export function clearRedirectRulesCache(did: string, rkey: string) {
47
+
const cacheKey = `${did}:${rkey}`;
48
+
redirectRulesCache.delete(cacheKey);
49
+
}
50
+
38
51
// Helper to serve files from cache
39
-
async function serveFromCache(did: string, rkey: string, filePath: string) {
52
+
async function serveFromCache(
53
+
did: string,
54
+
rkey: string,
55
+
filePath: string,
56
+
fullUrl?: string,
57
+
headers?: Record<string, string>
58
+
) {
59
+
// Check for redirect rules first
60
+
const redirectCacheKey = `${did}:${rkey}`;
61
+
let redirectRules = redirectRulesCache.get(redirectCacheKey);
62
+
63
+
if (redirectRules === undefined) {
64
+
// Load rules for the first time
65
+
redirectRules = await loadRedirectRules(did, rkey);
66
+
redirectRulesCache.set(redirectCacheKey, redirectRules);
67
+
}
68
+
69
+
// Apply redirect rules if any exist
70
+
if (redirectRules.length > 0) {
71
+
const requestPath = '/' + (filePath || '');
72
+
const queryParams = fullUrl ? parseQueryString(fullUrl) : {};
73
+
const cookies = parseCookies(headers?.['cookie']);
74
+
75
+
const redirectMatch = matchRedirectRule(requestPath, redirectRules, {
76
+
queryParams,
77
+
headers,
78
+
cookies,
79
+
});
80
+
81
+
if (redirectMatch) {
82
+
const { targetPath, status } = redirectMatch;
83
+
84
+
// Handle different status codes
85
+
if (status === 200) {
86
+
// Rewrite: serve different content but keep URL the same
87
+
// Remove leading slash for internal path resolution
88
+
const rewritePath = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath;
89
+
return serveFileInternal(did, rkey, rewritePath);
90
+
} else if (status === 301 || status === 302) {
91
+
// External redirect: change the URL
92
+
return new Response(null, {
93
+
status,
94
+
headers: {
95
+
'Location': targetPath,
96
+
'Cache-Control': status === 301 ? 'public, max-age=31536000' : 'public, max-age=0',
97
+
},
98
+
});
99
+
} else if (status === 404) {
100
+
// Custom 404 page
101
+
const custom404Path = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath;
102
+
const response = await serveFileInternal(did, rkey, custom404Path);
103
+
// Override status to 404
104
+
return new Response(response.body, {
105
+
status: 404,
106
+
headers: response.headers,
107
+
});
108
+
}
109
+
}
110
+
}
111
+
112
+
// No redirect matched, serve normally
113
+
return serveFileInternal(did, rkey, filePath);
114
+
}
115
+
116
+
// Internal function to serve a file (used by both normal serving and rewrites)
117
+
async function serveFileInternal(did: string, rkey: string, filePath: string) {
40
118
// Default to index.html if path is empty or ends with /
41
119
let requestPath = filePath || 'index.html';
42
120
if (requestPath.endsWith('/')) {
···
138
216
did: string,
139
217
rkey: string,
140
218
filePath: string,
141
-
basePath: string
219
+
basePath: string,
220
+
fullUrl?: string,
221
+
headers?: Record<string, string>
142
222
) {
223
+
// Check for redirect rules first
224
+
const redirectCacheKey = `${did}:${rkey}`;
225
+
let redirectRules = redirectRulesCache.get(redirectCacheKey);
226
+
227
+
if (redirectRules === undefined) {
228
+
// Load rules for the first time
229
+
redirectRules = await loadRedirectRules(did, rkey);
230
+
redirectRulesCache.set(redirectCacheKey, redirectRules);
231
+
}
232
+
233
+
// Apply redirect rules if any exist
234
+
if (redirectRules.length > 0) {
235
+
const requestPath = '/' + (filePath || '');
236
+
const queryParams = fullUrl ? parseQueryString(fullUrl) : {};
237
+
const cookies = parseCookies(headers?.['cookie']);
238
+
239
+
const redirectMatch = matchRedirectRule(requestPath, redirectRules, {
240
+
queryParams,
241
+
headers,
242
+
cookies,
243
+
});
244
+
245
+
if (redirectMatch) {
246
+
const { targetPath, status } = redirectMatch;
247
+
248
+
// Handle different status codes
249
+
if (status === 200) {
250
+
// Rewrite: serve different content but keep URL the same
251
+
const rewritePath = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath;
252
+
return serveFileInternalWithRewrite(did, rkey, rewritePath, basePath);
253
+
} else if (status === 301 || status === 302) {
254
+
// External redirect: change the URL
255
+
// For sites.wisp.place, we need to adjust the target path to include the base path
256
+
// unless it's an absolute URL
257
+
let redirectTarget = targetPath;
258
+
if (!targetPath.startsWith('http://') && !targetPath.startsWith('https://')) {
259
+
redirectTarget = basePath + (targetPath.startsWith('/') ? targetPath.slice(1) : targetPath);
260
+
}
261
+
return new Response(null, {
262
+
status,
263
+
headers: {
264
+
'Location': redirectTarget,
265
+
'Cache-Control': status === 301 ? 'public, max-age=31536000' : 'public, max-age=0',
266
+
},
267
+
});
268
+
} else if (status === 404) {
269
+
// Custom 404 page
270
+
const custom404Path = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath;
271
+
const response = await serveFileInternalWithRewrite(did, rkey, custom404Path, basePath);
272
+
// Override status to 404
273
+
return new Response(response.body, {
274
+
status: 404,
275
+
headers: response.headers,
276
+
});
277
+
}
278
+
}
279
+
}
280
+
281
+
// No redirect matched, serve normally
282
+
return serveFileInternalWithRewrite(did, rkey, filePath, basePath);
283
+
}
284
+
285
+
// Internal function to serve a file with rewriting
286
+
async function serveFileInternalWithRewrite(did: string, rkey: string, filePath: string, basePath: string) {
143
287
// Default to index.html if path is empty or ends with /
144
288
let requestPath = filePath || 'index.html';
145
289
if (requestPath.endsWith('/')) {
···
317
461
318
462
try {
319
463
await downloadAndCacheSite(did, rkey, siteData.record, pdsEndpoint, siteData.cid);
464
+
// Clear redirect rules cache since the site was updated
465
+
clearRedirectRulesCache(did, rkey);
320
466
logger.info('Site cached successfully', { did, rkey });
321
467
return true;
322
468
} catch (err) {
···
384
530
385
531
// Serve with HTML path rewriting to handle absolute paths
386
532
const basePath = `/${identifier}/${site}/`;
387
-
return serveFromCacheWithRewrite(did, site, filePath, basePath);
533
+
const headers: Record<string, string> = {};
534
+
c.req.raw.headers.forEach((value, key) => {
535
+
headers[key.toLowerCase()] = value;
536
+
});
537
+
return serveFromCacheWithRewrite(did, site, filePath, basePath, c.req.url, headers);
388
538
}
389
539
390
540
// Check if this is a DNS hash subdomain
···
420
570
return c.text('Site not found', 404);
421
571
}
422
572
423
-
return serveFromCache(customDomain.did, rkey, path);
573
+
const headers: Record<string, string> = {};
574
+
c.req.raw.headers.forEach((value, key) => {
575
+
headers[key.toLowerCase()] = value;
576
+
});
577
+
return serveFromCache(customDomain.did, rkey, path, c.req.url, headers);
424
578
}
425
579
426
580
// Route 2: Registered subdomains - /*.wisp.place/*
···
444
598
return c.text('Site not found', 404);
445
599
}
446
600
447
-
return serveFromCache(domainInfo.did, rkey, path);
601
+
const headers: Record<string, string> = {};
602
+
c.req.raw.headers.forEach((value, key) => {
603
+
headers[key.toLowerCase()] = value;
604
+
});
605
+
return serveFromCache(domainInfo.did, rkey, path, c.req.url, headers);
448
606
}
449
607
450
608
// Route 1: Custom domains - /*
···
467
625
return c.text('Site not found', 404);
468
626
}
469
627
470
-
return serveFromCache(customDomain.did, rkey, path);
628
+
const headers: Record<string, string> = {};
629
+
c.req.raw.headers.forEach((value, key) => {
630
+
headers[key.toLowerCase()] = value;
631
+
});
632
+
return serveFromCache(customDomain.did, rkey, path, c.req.url, headers);
471
633
});
472
634
473
635
// Internal observability endpoints (for admin panel)
+78
-14
public/editor/tabs/CLITab.tsx
+78
-14
public/editor/tabs/CLITab.tsx
···
16
16
<CardHeader>
17
17
<div className="flex items-center gap-2 mb-2">
18
18
<CardTitle>Wisp CLI Tool</CardTitle>
19
-
<Badge variant="secondary" className="text-xs">v0.1.0</Badge>
19
+
<Badge variant="secondary" className="text-xs">v0.2.0</Badge>
20
20
<Badge variant="outline" className="text-xs">Alpha</Badge>
21
21
</div>
22
22
<CardDescription>
···
32
32
</div>
33
33
34
34
<div className="space-y-3">
35
-
<h3 className="text-sm font-semibold">Download CLI</h3>
35
+
<h3 className="text-sm font-semibold">Features</h3>
36
+
<ul className="text-sm text-muted-foreground space-y-2 list-disc list-inside">
37
+
<li><strong>Deploy:</strong> Push static sites directly from your terminal</li>
38
+
<li><strong>Pull:</strong> Download sites from the PDS for development or backup</li>
39
+
<li><strong>Serve:</strong> Run a local server with real-time firehose updates</li>
40
+
</ul>
41
+
</div>
42
+
43
+
<div className="space-y-3">
44
+
<h3 className="text-sm font-semibold">Download v0.2.0</h3>
36
45
<div className="grid gap-2">
37
46
<div className="p-3 bg-muted/50 hover:bg-muted rounded-lg transition-colors border border-border">
38
47
<a
39
-
href="https://sites.wisp.place/nekomimi.pet/wisp-cli-binaries/wisp-cli-macos-arm64"
48
+
href="https://sites.wisp.place/nekomimi.pet/wisp-cli-binaries/wisp-cli-aarch64-darwin"
40
49
target="_blank"
41
50
rel="noopener noreferrer"
42
51
className="flex items-center justify-between mb-2"
···
45
54
<ExternalLink className="w-4 h-4 text-muted-foreground" />
46
55
</a>
47
56
<div className="text-xs text-muted-foreground">
48
-
<span className="font-mono">SHA256: 637e325d9668ca745e01493d80dfc72447ef0a889b313e28913ca65c94c7aaae</span>
57
+
<span className="font-mono">SHA-1: a8c27ea41c5e2672bfecb3476ece1c801741d759</span>
49
58
</div>
50
59
</div>
51
60
<div className="p-3 bg-muted/50 hover:bg-muted rounded-lg transition-colors border border-border">
···
59
68
<ExternalLink className="w-4 h-4 text-muted-foreground" />
60
69
</a>
61
70
<div className="text-xs text-muted-foreground">
62
-
<span className="font-mono">SHA256: 01561656b64826f95b39f13c65c97da8bcc63ecd9f4d7e4e369c8ba8c903c22a</span>
71
+
<span className="font-mono">SHA-1: fd7ee689c7600fc953179ea755b0357c8481a622</span>
63
72
</div>
64
73
</div>
65
74
<div className="p-3 bg-muted/50 hover:bg-muted rounded-lg transition-colors border border-border">
···
73
82
<ExternalLink className="w-4 h-4 text-muted-foreground" />
74
83
</a>
75
84
<div className="text-xs text-muted-foreground">
76
-
<span className="font-mono">SHA256: 1ff485b9bcf89bc5721a862863c4843cf4530cbcd2489cf200cb24a44f7865a2</span>
85
+
<span className="font-mono">SHA-1: 8bca6992559e19e1d29ab3d2fcc6d09b28e5a485</span>
86
+
</div>
87
+
</div>
88
+
<div className="p-3 bg-muted/50 hover:bg-muted rounded-lg transition-colors border border-border">
89
+
<a
90
+
href="https://sites.wisp.place/nekomimi.pet/wisp-cli-binaries/wisp-cli-x86_64-windows.exe"
91
+
target="_blank"
92
+
rel="noopener noreferrer"
93
+
className="flex items-center justify-between mb-2"
94
+
>
95
+
<span className="font-mono text-sm">Windows (x86_64)</span>
96
+
<ExternalLink className="w-4 h-4 text-muted-foreground" />
97
+
</a>
98
+
<div className="text-xs text-muted-foreground">
99
+
<span className="font-mono">SHA-1: 90ea3987a06597fa6c42e1df9009e9758e92dd54</span>
77
100
</div>
78
101
</div>
79
102
</div>
80
103
</div>
81
104
82
105
<div className="space-y-3">
83
-
<h3 className="text-sm font-semibold">Basic Usage</h3>
106
+
<h3 className="text-sm font-semibold">Deploy a Site</h3>
84
107
<CodeBlock
85
108
code={`# Download and make executable
86
-
curl -O https://sites.wisp.place/nekomimi.pet/wisp-cli-binaries/wisp-cli-macos-arm64
87
-
chmod +x wisp-cli-macos-arm64
109
+
curl -O https://sites.wisp.place/nekomimi.pet/wisp-cli-binaries/wisp-cli-aarch64-darwin
110
+
chmod +x wisp-cli-aarch64-darwin
88
111
89
-
# Deploy your site (will use OAuth)
90
-
./wisp-cli-macos-arm64 your-handle.bsky.social \\
112
+
# Deploy your site
113
+
./wisp-cli-aarch64-darwin deploy your-handle.bsky.social \\
91
114
--path ./dist \\
92
-
--site my-site
115
+
--site my-site \\
116
+
--password your-app-password
93
117
94
118
# Your site will be available at:
95
119
# https://sites.wisp.place/your-handle/my-site`}
···
98
122
</div>
99
123
100
124
<div className="space-y-3">
125
+
<h3 className="text-sm font-semibold">Pull a Site from PDS</h3>
126
+
<p className="text-xs text-muted-foreground">
127
+
Download a site from the PDS to your local machine (uses OAuth authentication):
128
+
</p>
129
+
<CodeBlock
130
+
code={`# Pull a site to a specific directory
131
+
wisp-cli pull your-handle.bsky.social \\
132
+
--site my-site \\
133
+
--output ./my-site
134
+
135
+
# Pull to current directory
136
+
wisp-cli pull your-handle.bsky.social \\
137
+
--site my-site
138
+
139
+
# Opens browser for OAuth authentication on first run`}
140
+
language="bash"
141
+
/>
142
+
</div>
143
+
144
+
<div className="space-y-3">
145
+
<h3 className="text-sm font-semibold">Serve a Site Locally with Real-Time Updates</h3>
146
+
<p className="text-xs text-muted-foreground">
147
+
Run a local server that monitors the firehose for real-time updates (uses OAuth authentication):
148
+
</p>
149
+
<CodeBlock
150
+
code={`# Serve on http://localhost:8080 (default)
151
+
wisp-cli serve your-handle.bsky.social \\
152
+
--site my-site
153
+
154
+
# Serve on a custom port
155
+
wisp-cli serve your-handle.bsky.social \\
156
+
--site my-site \\
157
+
--port 3000
158
+
159
+
# Downloads site, serves it, and watches firehose for live updates!`}
160
+
language="bash"
161
+
/>
162
+
</div>
163
+
164
+
<div className="space-y-3">
101
165
<h3 className="text-sm font-semibold">CI/CD with Tangled Spindle</h3>
102
166
<p className="text-xs text-muted-foreground">
103
167
Deploy automatically on every push using{' '}
···
147
211
chmod +x wisp-cli
148
212
149
213
# Deploy to Wisp
150
-
./wisp-cli \\
214
+
./wisp-cli deploy \\
151
215
"$WISP_HANDLE" \\
152
216
--path "$SITE_PATH" \\
153
217
--site "$SITE_NAME" \\
···
210
274
chmod +x wisp-cli
211
275
212
276
# Deploy to Wisp
213
-
./wisp-cli \\
277
+
./wisp-cli deploy \\
214
278
"$WISP_HANDLE" \\
215
279
--path "$SITE_PATH" \\
216
280
--site "$SITE_NAME" \\
+6
public/index.tsx
+6
public/index.tsx
···
321
321
window.location.href = '/editor'
322
322
return
323
323
}
324
+
// If not authenticated, clear any stale cookies
325
+
document.cookie = 'did=; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT; SameSite=Lax'
324
326
} catch (error) {
325
327
console.error('Auth check failed:', error)
328
+
// Clear cookies on error as well
329
+
document.cookie = 'did=; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT; SameSite=Lax'
326
330
} finally {
327
331
setCheckingAuth(false)
328
332
}
···
455
459
'Login failed:',
456
460
error
457
461
)
462
+
// Clear any invalid cookies
463
+
document.cookie = 'did=; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT; SameSite=Lax'
458
464
alert('Authentication failed')
459
465
}
460
466
}}
+2
-2
scripts/change-admin-password.ts
+2
-2
scripts/change-admin-password.ts
···
1
1
// Change admin password
2
-
import { adminAuth } from './src/lib/admin-auth'
3
-
import { db } from './src/lib/db'
2
+
import { adminAuth } from '../src/lib/admin-auth'
3
+
import { db } from '../src/lib/db'
4
4
import { randomBytes, createHash } from 'crypto'
5
5
6
6
// Get username and new password from command line
+5
-5
src/index.ts
+5
-5
src/index.ts
···
70
70
},
71
71
cookie: {
72
72
secrets: cookieSecret,
73
-
sign: true
73
+
sign: ['did']
74
74
}
75
75
})
76
76
// Observability middleware
···
105
105
.onError(observabilityMiddleware('main-app').onError)
106
106
.use(csrfProtection())
107
107
.use(authRoutes(client, cookieSecret))
108
-
.use(wispRoutes(client))
109
-
.use(domainRoutes(client))
110
-
.use(userRoutes(client))
111
-
.use(siteRoutes(client))
108
+
.use(wispRoutes(client, cookieSecret))
109
+
.use(domainRoutes(client, cookieSecret))
110
+
.use(userRoutes(client, cookieSecret))
111
+
.use(siteRoutes(client, cookieSecret))
112
112
.use(adminRoutes(cookieSecret))
113
113
.use(
114
114
await staticPlugin({
+42
-7
src/routes/admin.ts
+42
-7
src/routes/admin.ts
···
37
37
password: t.String()
38
38
}),
39
39
cookie: t.Cookie({
40
-
admin_session: t.String()
40
+
admin_session: t.Optional(t.String())
41
41
}, {
42
42
secrets: cookieSecret,
43
43
sign: ['admin_session']
···
106
106
// Get logs from hosting service
107
107
let hostingLogs: any[] = []
108
108
try {
109
-
const hostingPort = process.env.HOSTING_PORT || '3001'
109
+
const hostingServiceUrl = process.env.HOSTING_SERVICE_URL || `http://localhost:${process.env.HOSTING_PORT || '3001'}`
110
110
const params = new URLSearchParams()
111
111
if (query.level) params.append('level', query.level as string)
112
112
if (query.service) params.append('service', query.service as string)
···
114
114
if (query.eventType) params.append('eventType', query.eventType as string)
115
115
params.append('limit', String(filter.limit || 100))
116
116
117
-
const response = await fetch(`http://localhost:${hostingPort}/__internal__/observability/logs?${params}`)
117
+
const response = await fetch(`${hostingServiceUrl}/__internal__/observability/logs?${params}`)
118
118
if (response.ok) {
119
119
const data = await response.json()
120
120
hostingLogs = data.logs
···
154
154
// Get errors from hosting service
155
155
let hostingErrors: any[] = []
156
156
try {
157
-
const hostingPort = process.env.HOSTING_PORT || '3001'
157
+
const hostingServiceUrl = process.env.HOSTING_SERVICE_URL || `http://localhost:${process.env.HOSTING_PORT || '3001'}`
158
158
const params = new URLSearchParams()
159
159
if (query.service) params.append('service', query.service as string)
160
160
params.append('limit', String(filter.limit || 100))
161
161
162
-
const response = await fetch(`http://localhost:${hostingPort}/__internal__/observability/errors?${params}`)
162
+
const response = await fetch(`${hostingServiceUrl}/__internal__/observability/errors?${params}`)
163
163
if (response.ok) {
164
164
const data = await response.json()
165
165
hostingErrors = data.errors
···
207
207
}
208
208
209
209
try {
210
-
const hostingPort = process.env.HOSTING_PORT || '3001'
211
-
const response = await fetch(`http://localhost:${hostingPort}/__internal__/observability/metrics?timeWindow=${timeWindow}`)
210
+
const hostingServiceUrl = process.env.HOSTING_SERVICE_URL || `http://localhost:${process.env.HOSTING_PORT || '3001'}`
211
+
const response = await fetch(`${hostingServiceUrl}/__internal__/observability/metrics?timeWindow=${timeWindow}`)
212
212
if (response.ok) {
213
213
const data = await response.json()
214
214
hostingServiceStats = data.stats
···
273
273
set.status = 500
274
274
return {
275
275
error: 'Failed to fetch database stats',
276
+
message: error instanceof Error ? error.message : String(error)
277
+
}
278
+
}
279
+
}, {
280
+
cookie: t.Cookie({
281
+
admin_session: t.Optional(t.String())
282
+
}, {
283
+
secrets: cookieSecret,
284
+
sign: ['admin_session']
285
+
})
286
+
})
287
+
288
+
// Get cache stats (protected)
289
+
.get('/cache', async ({ cookie, set }) => {
290
+
const check = requireAdmin({ cookie, set })
291
+
if (check) return check
292
+
293
+
try {
294
+
const hostingServiceUrl = process.env.HOSTING_SERVICE_URL || `http://localhost:${process.env.HOSTING_PORT || '3001'}`
295
+
const response = await fetch(`${hostingServiceUrl}/__internal__/observability/cache`)
296
+
297
+
if (response.ok) {
298
+
const data = await response.json()
299
+
return data
300
+
} else {
301
+
set.status = 503
302
+
return {
303
+
error: 'Failed to fetch cache stats from hosting service',
304
+
message: 'Hosting service unavailable'
305
+
}
306
+
}
307
+
} catch (error) {
308
+
set.status = 500
309
+
return {
310
+
error: 'Failed to fetch cache stats',
276
311
message: error instanceof Error ? error.message : String(error)
277
312
}
278
313
}
+10
-22
src/routes/auth.ts
+10
-22
src/routes/auth.ts
···
5
5
import { authenticateRequest } from '../lib/wisp-auth'
6
6
import { logger } from '../lib/observability'
7
7
8
-
export const authRoutes = (client: NodeOAuthClient, cookieSecret: string) => new Elysia()
8
+
export const authRoutes = (client: NodeOAuthClient, cookieSecret: string) => new Elysia({
9
+
cookie: {
10
+
secrets: cookieSecret,
11
+
sign: ['did']
12
+
}
13
+
})
9
14
.post('/api/auth/signin', async (c) => {
10
15
let handle = 'unknown'
11
16
try {
···
32
37
33
38
if (!session) {
34
39
logger.error('[Auth] OAuth callback failed: no session returned')
40
+
c.cookie.did.remove()
35
41
return c.redirect('/?error=auth_failed')
36
42
}
37
43
···
70
76
} catch (err) {
71
77
// This catches state validation failures and other OAuth errors
72
78
logger.error('[Auth] OAuth callback error', err)
79
+
c.cookie.did.remove()
73
80
return c.redirect('/?error=auth_failed')
74
81
}
75
-
}, {
76
-
cookie: t.Cookie({
77
-
did: t.Optional(t.String())
78
-
}, {
79
-
secrets: cookieSecret,
80
-
sign: ['did']
81
-
})
82
82
})
83
83
.post('/api/auth/logout', async (c) => {
84
84
try {
···
104
104
logger.error('[Auth] Logout error', err)
105
105
return { error: 'Logout failed' }
106
106
}
107
-
}, {
108
-
cookie: t.Cookie({
109
-
did: t.Optional(t.String())
110
-
}, {
111
-
secrets: cookieSecret,
112
-
sign: ['did']
113
-
})
114
107
})
115
108
.get('/api/auth/status', async (c) => {
116
109
try {
117
110
const auth = await authenticateRequest(client, c.cookie)
118
111
119
112
if (!auth) {
113
+
c.cookie.did.remove()
120
114
return { authenticated: false }
121
115
}
122
116
···
126
120
}
127
121
} catch (err) {
128
122
logger.error('[Auth] Status check error', err)
123
+
c.cookie.did.remove()
129
124
return { authenticated: false }
130
125
}
131
-
}, {
132
-
cookie: t.Cookie({
133
-
did: t.Optional(t.String())
134
-
}, {
135
-
secrets: cookieSecret,
136
-
sign: ['did']
137
-
})
138
126
})
+8
-2
src/routes/domain.ts
+8
-2
src/routes/domain.ts
···
24
24
import { verifyCustomDomain } from '../lib/dns-verify'
25
25
import { logger } from '../lib/logger'
26
26
27
-
export const domainRoutes = (client: NodeOAuthClient) =>
28
-
new Elysia({ prefix: '/api/domain' })
27
+
export const domainRoutes = (client: NodeOAuthClient, cookieSecret: string) =>
28
+
new Elysia({
29
+
prefix: '/api/domain',
30
+
cookie: {
31
+
secrets: cookieSecret,
32
+
sign: ['did']
33
+
}
34
+
})
29
35
// Public endpoints (no auth required)
30
36
.get('/check', async ({ query }) => {
31
37
try {
+8
-2
src/routes/site.ts
+8
-2
src/routes/site.ts
···
5
5
import { deleteSite } from '../lib/db'
6
6
import { logger } from '../lib/logger'
7
7
8
-
export const siteRoutes = (client: NodeOAuthClient) =>
9
-
new Elysia({ prefix: '/api/site' })
8
+
export const siteRoutes = (client: NodeOAuthClient, cookieSecret: string) =>
9
+
new Elysia({
10
+
prefix: '/api/site',
11
+
cookie: {
12
+
secrets: cookieSecret,
13
+
sign: ['did']
14
+
}
15
+
})
10
16
.derive(async ({ cookie }) => {
11
17
const auth = await requireAuth(client, cookie)
12
18
return { auth }
+9
-3
src/routes/user.ts
+9
-3
src/routes/user.ts
···
1
-
import { Elysia } from 'elysia'
1
+
import { Elysia, t } from 'elysia'
2
2
import { requireAuth } from '../lib/wisp-auth'
3
3
import { NodeOAuthClient } from '@atproto/oauth-client-node'
4
4
import { Agent } from '@atproto/api'
···
6
6
import { syncSitesFromPDS } from '../lib/sync-sites'
7
7
import { logger } from '../lib/logger'
8
8
9
-
export const userRoutes = (client: NodeOAuthClient) =>
10
-
new Elysia({ prefix: '/api/user' })
9
+
export const userRoutes = (client: NodeOAuthClient, cookieSecret: string) =>
10
+
new Elysia({
11
+
prefix: '/api/user',
12
+
cookie: {
13
+
secrets: cookieSecret,
14
+
sign: ['did']
15
+
}
16
+
})
11
17
.derive(async ({ cookie }) => {
12
18
const auth = await requireAuth(client, cookie)
13
19
return { auth }
+8
-2
src/routes/wisp.ts
+8
-2
src/routes/wisp.ts
···
37
37
return true;
38
38
}
39
39
40
-
export const wispRoutes = (client: NodeOAuthClient) =>
41
-
new Elysia({ prefix: '/wisp' })
40
+
export const wispRoutes = (client: NodeOAuthClient, cookieSecret: string) =>
41
+
new Elysia({
42
+
prefix: '/wisp',
43
+
cookie: {
44
+
secrets: cookieSecret,
45
+
sign: ['did']
46
+
}
47
+
})
42
48
.derive(async ({ cookie }) => {
43
49
const auth = await requireAuth(client, cookie)
44
50
return { auth }