+3
-1
.gitignore
+3
-1
.gitignore
+221
PDSharp/Handlers/Auth.fs
+221
PDSharp/Handlers/Auth.fs
···
1
+
namespace PDSharp.Handlers
2
+
3
+
open System
4
+
open System.Text.Json
5
+
open Microsoft.AspNetCore.Http
6
+
open Giraffe
7
+
open PDSharp.Core.Config
8
+
open PDSharp.Core.Auth
9
+
open PDSharp.Handlers
10
+
11
+
module Auth =
12
+
[<CLIMutable>]
13
+
type CreateAccountRequest = {
14
+
handle : string
15
+
email : string option
16
+
password : string
17
+
inviteCode : string option
18
+
}
19
+
20
+
[<CLIMutable>]
21
+
type CreateSessionRequest = { identifier : string; password : string }
22
+
23
+
type SessionResponse = {
24
+
accessJwt : string
25
+
refreshJwt : string
26
+
handle : string
27
+
did : string
28
+
email : string option
29
+
}
30
+
31
+
let private extractBearerToken (ctx : HttpContext) : string option =
32
+
match ctx.Request.Headers.TryGetValue("Authorization") with
33
+
| true, values ->
34
+
let header = values.ToString()
35
+
36
+
if header.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase) then
37
+
Some(header.Substring(7))
38
+
else
39
+
None
40
+
| _ -> None
41
+
42
+
let createAccountHandler : HttpHandler =
43
+
fun next ctx -> task {
44
+
let config = ctx.GetService<AppConfig>()
45
+
let accountStore = ctx.GetService<IAccountStore>()
46
+
let! body = ctx.ReadBodyFromRequestAsync()
47
+
48
+
let request =
49
+
JsonSerializer.Deserialize<CreateAccountRequest>(
50
+
body,
51
+
JsonSerializerOptions(PropertyNameCaseInsensitive = true)
52
+
)
53
+
54
+
if
55
+
String.IsNullOrWhiteSpace(request.handle)
56
+
|| String.IsNullOrWhiteSpace(request.password)
57
+
then
58
+
ctx.SetStatusCode 400
59
+
60
+
return!
61
+
json
62
+
{
63
+
error = "InvalidRequest"
64
+
message = "handle and password are required"
65
+
}
66
+
next
67
+
ctx
68
+
else
69
+
match! PDSharp.Core.Auth.createAccount accountStore request.handle request.password request.email with
70
+
| Result.Error msg ->
71
+
ctx.SetStatusCode 400
72
+
return! json { error = "AccountExists"; message = msg } next ctx
73
+
| Result.Ok(account : Account) ->
74
+
let accessJwt = PDSharp.Core.Auth.createAccessToken config.JwtSecret account.Did
75
+
let refreshJwt = PDSharp.Core.Auth.createRefreshToken config.JwtSecret account.Did
76
+
ctx.SetStatusCode 200
77
+
78
+
return!
79
+
json
80
+
{
81
+
accessJwt = accessJwt
82
+
refreshJwt = refreshJwt
83
+
handle = account.Handle
84
+
did = account.Did
85
+
email = account.Email
86
+
}
87
+
next
88
+
ctx
89
+
}
90
+
91
+
let createSessionHandler : HttpHandler =
92
+
fun next ctx -> task {
93
+
let config = ctx.GetService<AppConfig>()
94
+
let accountStore = ctx.GetService<IAccountStore>()
95
+
let! body = ctx.ReadBodyFromRequestAsync()
96
+
97
+
let request =
98
+
JsonSerializer.Deserialize<CreateSessionRequest>(
99
+
body,
100
+
JsonSerializerOptions(PropertyNameCaseInsensitive = true)
101
+
)
102
+
103
+
if
104
+
String.IsNullOrWhiteSpace(request.identifier)
105
+
|| String.IsNullOrWhiteSpace(request.password)
106
+
then
107
+
ctx.SetStatusCode 400
108
+
109
+
return!
110
+
json
111
+
{
112
+
error = "InvalidRequest"
113
+
message = "identifier and password are required"
114
+
}
115
+
next
116
+
ctx
117
+
else
118
+
let! accountOpt =
119
+
if request.identifier.StartsWith("did:") then
120
+
accountStore.GetAccountByDid request.identifier
121
+
else
122
+
accountStore.GetAccountByHandle request.identifier
123
+
124
+
match accountOpt with
125
+
| None ->
126
+
ctx.SetStatusCode 401
127
+
128
+
return!
129
+
json
130
+
{
131
+
error = "AuthenticationRequired"
132
+
message = "Invalid identifier or password"
133
+
}
134
+
next
135
+
ctx
136
+
| Some(account : Account) ->
137
+
if not (PDSharp.Core.Auth.verifyPassword request.password account.PasswordHash) then
138
+
ctx.SetStatusCode 401
139
+
140
+
return!
141
+
json
142
+
{
143
+
error = "AuthenticationRequired"
144
+
message = "Invalid identifier or password"
145
+
}
146
+
next
147
+
ctx
148
+
else
149
+
let accessJwt = PDSharp.Core.Auth.createAccessToken config.JwtSecret account.Did
150
+
let refreshJwt = PDSharp.Core.Auth.createRefreshToken config.JwtSecret account.Did
151
+
ctx.SetStatusCode 200
152
+
153
+
return!
154
+
json
155
+
{
156
+
accessJwt = accessJwt
157
+
refreshJwt = refreshJwt
158
+
handle = account.Handle
159
+
did = account.Did
160
+
email = account.Email
161
+
}
162
+
next
163
+
ctx
164
+
}
165
+
166
+
let refreshSessionHandler : HttpHandler =
167
+
fun next ctx -> task {
168
+
let config = ctx.GetService<AppConfig>()
169
+
let accountStore = ctx.GetService<IAccountStore>()
170
+
171
+
match extractBearerToken ctx with
172
+
| None ->
173
+
ctx.SetStatusCode 401
174
+
175
+
return!
176
+
json
177
+
{
178
+
error = "AuthenticationRequired"
179
+
message = "Missing or invalid Authorization header"
180
+
}
181
+
next
182
+
ctx
183
+
| Some token ->
184
+
match PDSharp.Core.Auth.validateToken config.JwtSecret token with
185
+
| PDSharp.Core.Auth.Invalid reason ->
186
+
ctx.SetStatusCode 401
187
+
return! json { error = "ExpiredToken"; message = reason } next ctx
188
+
| PDSharp.Core.Auth.Valid(did, tokenType, _) ->
189
+
if tokenType <> PDSharp.Core.Auth.Refresh then
190
+
ctx.SetStatusCode 400
191
+
192
+
return!
193
+
json
194
+
{
195
+
error = "InvalidRequest"
196
+
message = "Refresh token required"
197
+
}
198
+
next
199
+
ctx
200
+
else
201
+
match! accountStore.GetAccountByDid did with
202
+
| None ->
203
+
ctx.SetStatusCode 401
204
+
return! json { error = "AccountNotFound"; message = "Account not found" } next ctx
205
+
| Some account ->
206
+
let accessJwt = PDSharp.Core.Auth.createAccessToken config.JwtSecret account.Did
207
+
let refreshJwt = PDSharp.Core.Auth.createRefreshToken config.JwtSecret account.Did
208
+
ctx.SetStatusCode 200
209
+
210
+
return!
211
+
json
212
+
{
213
+
accessJwt = accessJwt
214
+
refreshJwt = refreshJwt
215
+
handle = account.Handle
216
+
did = account.Did
217
+
email = account.Email
218
+
}
219
+
next
220
+
ctx
221
+
}
+149
PDSharp/Handlers/Common.fs
+149
PDSharp/Handlers/Common.fs
···
1
+
namespace PDSharp.Handlers
2
+
3
+
open System
4
+
open System.Text.Json
5
+
open System.Collections.Concurrent
6
+
open System.Net.WebSockets
7
+
open PDSharp.Core
8
+
open PDSharp.Core.Models
9
+
open PDSharp.Core.BlockStore
10
+
open PDSharp.Core.Repository
11
+
open PDSharp.Core.Mst
12
+
open PDSharp.Core.SqliteStore
13
+
open PDSharp.Core.Crypto
14
+
open PDSharp.Core.Firehose
15
+
16
+
/// Repo state per DID: MST root, collections, current rev, head commit CID
17
+
type RepoData = {
18
+
Did : string
19
+
Head : Cid option
20
+
Mst : Mst.MstNode
21
+
Collections : Map<string, string>
22
+
}
23
+
24
+
/// Manages active Firehose WebSocket subscribers
25
+
type FirehoseState() =
26
+
member val Subscribers = ConcurrentDictionary<string, WebSocket>() with get
27
+
28
+
/// Simple in-memory key store (TODO: Persist)
29
+
type SigningKeyStore() =
30
+
let mutable keys : Map<string, EcKeyPair> = Map.empty
31
+
let lockObj = obj ()
32
+
33
+
member _.GetOrCreateKey(did : string) =
34
+
lock lockObj (fun () ->
35
+
match Map.tryFind did keys with
36
+
| Some k -> k
37
+
| None ->
38
+
let k = Crypto.generateKey P256
39
+
keys <- Map.add did k keys
40
+
k)
41
+
42
+
[<CLIMutable>]
43
+
type ErrorResponse = { error : string; message : string }
44
+
45
+
module Persistence =
46
+
let nodeLoader (blockStore : IBlockStore) (cid : Cid) = async {
47
+
let! data = blockStore.Get cid
48
+
return data |> Option.map Mst.deserialize
49
+
}
50
+
51
+
let nodePersister (blockStore : IBlockStore) (node : MstNode) = async {
52
+
let bytes = Mst.serialize node
53
+
return! blockStore.Put bytes
54
+
}
55
+
56
+
let loadRepo (repoStore : IRepoStore) (blockStore : IBlockStore) (did : string) : Async<RepoData option> = async {
57
+
let! rowOpt = repoStore.GetRepo did
58
+
59
+
match rowOpt with
60
+
| None -> return None
61
+
| Some row ->
62
+
let! mstNode = async {
63
+
if String.IsNullOrEmpty row.mst_root_cid then
64
+
return None
65
+
else
66
+
match Cid.TryParse row.mst_root_cid with
67
+
| None -> return None
68
+
| Some rootCid -> return! nodeLoader blockStore rootCid
69
+
}
70
+
71
+
let mst = mstNode |> Option.defaultValue { Left = None; Entries = [] }
72
+
73
+
let collections =
74
+
try
75
+
JsonSerializer.Deserialize<Map<string, string>>(row.collections_json)
76
+
with _ ->
77
+
Map.empty
78
+
79
+
let head =
80
+
if String.IsNullOrEmpty row.head_cid then
81
+
None
82
+
else
83
+
Cid.TryParse row.head_cid
84
+
85
+
return
86
+
Some {
87
+
Did = did
88
+
Head = head
89
+
Mst = mst
90
+
Collections = collections
91
+
}
92
+
}
93
+
94
+
let saveRepo (repoStore : IRepoStore) (blockStore : IBlockStore) (repo : RepoData) (rev : string) : Async<unit> = async {
95
+
let! rootCid = nodePersister blockStore repo.Mst
96
+
97
+
let row : RepoRow = {
98
+
did = repo.Did
99
+
rev = rev
100
+
mst_root_cid = rootCid.ToString()
101
+
head_cid =
102
+
(match repo.Head with
103
+
| Some c -> c.ToString()
104
+
| None -> "")
105
+
collections_json = JsonSerializer.Serialize repo.Collections
106
+
}
107
+
108
+
do! repoStore.SaveRepo row
109
+
}
110
+
111
+
let signAndStoreCommit
112
+
(blockStore : IBlockStore)
113
+
(keyStore : SigningKeyStore)
114
+
(did : string)
115
+
(mstRootCid : Cid)
116
+
(rev : string)
117
+
(prev : Cid option)
118
+
=
119
+
async {
120
+
let key = keyStore.GetOrCreateKey did
121
+
122
+
let unsigned : UnsignedCommit = {
123
+
Did = did
124
+
Version = 3
125
+
Data = mstRootCid
126
+
Rev = rev
127
+
Prev = prev
128
+
}
129
+
130
+
let signed = signCommit key unsigned
131
+
let commitBytes = serializeCommit signed
132
+
let! commitCid = blockStore.Put(commitBytes)
133
+
return signed, commitCid
134
+
}
135
+
136
+
let broadcastEvent (firehose : FirehoseState) (event : CommitEvent) =
137
+
let eventBytes = Firehose.encodeEvent event
138
+
let segment = ArraySegment<byte>(eventBytes)
139
+
140
+
for kvp in firehose.Subscribers do
141
+
let ws = kvp.Value
142
+
143
+
if ws.State = WebSocketState.Open then
144
+
try
145
+
ws.SendAsync(segment, WebSocketMessageType.Binary, true, System.Threading.CancellationToken.None)
146
+
|> Async.AwaitTask
147
+
|> Async.RunSynchronously
148
+
with _ ->
149
+
firehose.Subscribers.TryRemove(kvp.Key) |> ignore
+32
PDSharp/Handlers/Health.fs
+32
PDSharp/Handlers/Health.fs
···
1
+
namespace PDSharp.Handlers
2
+
3
+
open System.Text.Json
4
+
open Microsoft.AspNetCore.Http
5
+
open Giraffe
6
+
open PDSharp.Core.Health
7
+
open PDSharp.Core.Config
8
+
9
+
module HealthHandler =
10
+
/// PDS version (could be read from assembly info)
11
+
let private version = "0.1.0"
12
+
13
+
/// JSON serialization options with camelCase naming
14
+
let private jsonOptions =
15
+
JsonSerializerOptions(PropertyNamingPolicy = JsonNamingPolicy.CamelCase, WriteIndented = true)
16
+
17
+
/// Health check handler for /xrpc/_health endpoint
18
+
let healthHandler : HttpHandler =
19
+
fun next ctx -> task {
20
+
let config = ctx.GetService<AppConfig>()
21
+
let healthState = ctx.GetService<HealthState>()
22
+
let status = buildHealthStatus version healthState config.SqliteConnectionString "." // Check disk of current working directory
23
+
24
+
if status.DatabaseStatus.IsHealthy then
25
+
ctx.SetStatusCode 200
26
+
else
27
+
ctx.SetStatusCode 503
28
+
29
+
let json = JsonSerializer.Serialize(status, jsonOptions)
30
+
ctx.SetContentType "application/json"
31
+
return! text json next ctx
32
+
}
+261
PDSharp/Handlers/Repo.fs
+261
PDSharp/Handlers/Repo.fs
···
1
+
namespace PDSharp.Handlers
2
+
3
+
open System
4
+
open System.Text
5
+
open System.Text.Json
6
+
open Microsoft.AspNetCore.Http
7
+
open Giraffe
8
+
open PDSharp.Core
9
+
open PDSharp.Core.Models
10
+
open PDSharp.Core.BlockStore
11
+
open PDSharp.Core.Repository
12
+
open PDSharp.Core.Mst
13
+
open PDSharp.Core.Lexicon
14
+
open PDSharp.Core.Car
15
+
open PDSharp.Core.Firehose
16
+
open PDSharp.Core.SqliteStore
17
+
open PDSharp.Handlers
18
+
19
+
module Repo =
20
+
[<CLIMutable>]
21
+
type CreateRecordRequest = {
22
+
repo : string
23
+
collection : string
24
+
record : JsonElement
25
+
rkey : string option
26
+
}
27
+
28
+
[<CLIMutable>]
29
+
type CreateRecordResponse = {
30
+
uri : string
31
+
cid : string
32
+
commit : {| rev : string; cid : string |}
33
+
}
34
+
35
+
[<CLIMutable>]
36
+
type GetRecordResponse = { uri : string; cid : string; value : JsonElement }
37
+
38
+
let createRecordHandler : HttpHandler =
39
+
fun next ctx -> task {
40
+
let blockStore = ctx.GetService<IBlockStore>()
41
+
let repoStore = ctx.GetService<IRepoStore>()
42
+
let keyStore = ctx.GetService<SigningKeyStore>()
43
+
let firehose = ctx.GetService<FirehoseState>()
44
+
45
+
let! body = ctx.ReadBodyFromRequestAsync()
46
+
47
+
let request =
48
+
JsonSerializer.Deserialize<CreateRecordRequest>(body, JsonSerializerOptions(PropertyNameCaseInsensitive = true))
49
+
50
+
match Lexicon.validate request.collection request.record with
51
+
| Lexicon.Error msg ->
52
+
ctx.SetStatusCode 400
53
+
return! json { error = "InvalidRequest"; message = msg } next ctx
54
+
| Lexicon.Ok ->
55
+
let did = request.repo
56
+
57
+
let rkey =
58
+
match request.rkey with
59
+
| Some r when not (String.IsNullOrWhiteSpace r) -> r
60
+
| _ -> Tid.generate ()
61
+
62
+
let recordJson = request.record.GetRawText()
63
+
let recordBytes = Encoding.UTF8.GetBytes(recordJson)
64
+
let! recordCid = blockStore.Put(recordBytes)
65
+
66
+
let! existingRepoOpt = Persistence.loadRepo repoStore blockStore did
67
+
68
+
let repoData =
69
+
match existingRepoOpt with
70
+
| Some r -> r
71
+
| None -> {
72
+
Did = did
73
+
Head = None
74
+
Mst = { Left = None; Entries = [] }
75
+
Collections = Map.empty
76
+
}
77
+
78
+
let mstKey = $"{request.collection}/{rkey}"
79
+
let loader = Persistence.nodeLoader blockStore
80
+
let persister = Persistence.nodePersister blockStore
81
+
82
+
let! newMstRoot = Mst.put loader persister repoData.Mst mstKey recordCid ""
83
+
84
+
let newRev = Tid.generate ()
85
+
let! mstRootCid = persister newMstRoot
86
+
87
+
let! (_, commitCid) = Persistence.signAndStoreCommit blockStore keyStore did mstRootCid newRev repoData.Head
88
+
89
+
let updatedRepo = {
90
+
Did = did
91
+
Mst = newMstRoot
92
+
Collections = Map.empty
93
+
Head = Some commitCid
94
+
}
95
+
96
+
do! Persistence.saveRepo repoStore blockStore updatedRepo newRev
97
+
98
+
let carBytes = Car.createCar [ commitCid ] [ (recordCid, recordBytes) ]
99
+
let event = Firehose.createCommitEvent did newRev commitCid carBytes
100
+
Persistence.broadcastEvent firehose event
101
+
102
+
let uri = $"at://{did}/{request.collection}/{rkey}"
103
+
ctx.SetStatusCode 200
104
+
105
+
return!
106
+
json
107
+
{|
108
+
uri = uri
109
+
cid = recordCid.ToString()
110
+
commit = {| rev = newRev; cid = commitCid.ToString() |}
111
+
|}
112
+
next
113
+
ctx
114
+
}
115
+
116
+
let getRecordHandler : HttpHandler =
117
+
fun next ctx -> task {
118
+
let repoStore = ctx.GetService<IRepoStore>()
119
+
let blockStore = ctx.GetService<IBlockStore>()
120
+
121
+
let repo = ctx.Request.Query.["repo"].ToString()
122
+
let collection = ctx.Request.Query.["collection"].ToString()
123
+
let rkey = ctx.Request.Query.["rkey"].ToString()
124
+
125
+
if
126
+
String.IsNullOrWhiteSpace(repo)
127
+
|| String.IsNullOrWhiteSpace(collection)
128
+
|| String.IsNullOrWhiteSpace(rkey)
129
+
then
130
+
ctx.SetStatusCode 400
131
+
132
+
return!
133
+
json
134
+
{
135
+
error = "InvalidRequest"
136
+
message = "Missing required params"
137
+
}
138
+
next
139
+
ctx
140
+
else
141
+
let! repoOpt = Persistence.loadRepo repoStore blockStore repo
142
+
143
+
match repoOpt with
144
+
| None ->
145
+
ctx.SetStatusCode 404
146
+
147
+
return!
148
+
json
149
+
{
150
+
error = "RepoNotFound"
151
+
message = $"Repository not found: {repo}"
152
+
}
153
+
next
154
+
ctx
155
+
| Some repoData ->
156
+
let mstKey = $"{collection}/{rkey}"
157
+
let loader = Persistence.nodeLoader blockStore
158
+
let! cidOpt = Mst.get loader repoData.Mst mstKey ""
159
+
160
+
match cidOpt with
161
+
| None ->
162
+
ctx.SetStatusCode 404
163
+
return! json { error = "RecordNotFound"; message = "Record not found" } next ctx
164
+
| Some recordCid ->
165
+
let! recordBytesOpt = blockStore.Get(recordCid)
166
+
167
+
match recordBytesOpt with
168
+
| None ->
169
+
ctx.SetStatusCode 500
170
+
return! json { error = "InternalError"; message = "Block missing" } next ctx
171
+
| Some recordBytes ->
172
+
let recordJson = Encoding.UTF8.GetString(recordBytes)
173
+
let uri = $"at://{repo}/{collection}/{rkey}"
174
+
let valueElement = JsonSerializer.Deserialize<JsonElement>(recordJson)
175
+
ctx.SetStatusCode 200
176
+
177
+
return!
178
+
json
179
+
{|
180
+
uri = uri
181
+
cid = recordCid.ToString()
182
+
value = valueElement
183
+
|}
184
+
next
185
+
ctx
186
+
}
187
+
188
+
let putRecordHandler : HttpHandler =
189
+
fun next ctx -> task {
190
+
let blockStore = ctx.GetService<IBlockStore>()
191
+
let repoStore = ctx.GetService<IRepoStore>()
192
+
let keyStore = ctx.GetService<SigningKeyStore>()
193
+
let firehose = ctx.GetService<FirehoseState>()
194
+
195
+
let! body = ctx.ReadBodyFromRequestAsync()
196
+
197
+
let request =
198
+
JsonSerializer.Deserialize<CreateRecordRequest>(body, JsonSerializerOptions(PropertyNameCaseInsensitive = true))
199
+
200
+
match Lexicon.validate request.collection request.record with
201
+
| Lexicon.Error msg ->
202
+
ctx.SetStatusCode 400
203
+
return! json { error = "InvalidRequest"; message = msg } next ctx
204
+
| Lexicon.Ok ->
205
+
match request.rkey with
206
+
| Some r when not (String.IsNullOrWhiteSpace r) ->
207
+
let did = request.repo
208
+
let recordJson = request.record.GetRawText()
209
+
let recordBytes = Encoding.UTF8.GetBytes(recordJson)
210
+
let! recordCid = blockStore.Put(recordBytes)
211
+
212
+
let! existingRepoOpt = Persistence.loadRepo repoStore blockStore did
213
+
214
+
let repoData =
215
+
match existingRepoOpt with
216
+
| Some r -> r
217
+
| None -> {
218
+
Did = did
219
+
Head = None
220
+
Mst = { Left = None; Entries = [] }
221
+
Collections = Map.empty
222
+
}
223
+
224
+
let mstKey = $"{request.collection}/{r}"
225
+
let loader = Persistence.nodeLoader blockStore
226
+
let persister = Persistence.nodePersister blockStore
227
+
228
+
let! newMstRoot = Mst.put loader persister repoData.Mst mstKey recordCid ""
229
+
let! mstRootCid = persister newMstRoot
230
+
231
+
let newRev = Tid.generate ()
232
+
let! (_, commitCid) = Persistence.signAndStoreCommit blockStore keyStore did mstRootCid newRev repoData.Head
233
+
234
+
let updatedRepo = {
235
+
Did = did
236
+
Mst = newMstRoot
237
+
Collections = Map.empty
238
+
Head = Some commitCid
239
+
}
240
+
241
+
do! Persistence.saveRepo repoStore blockStore updatedRepo newRev
242
+
243
+
let carBytes = Car.createCar [ commitCid ] [ (recordCid, recordBytes) ]
244
+
let event = Firehose.createCommitEvent did newRev commitCid carBytes
245
+
Persistence.broadcastEvent firehose event
246
+
247
+
ctx.SetStatusCode 200
248
+
249
+
return!
250
+
json
251
+
{|
252
+
uri = $"at://{did}/{request.collection}/{r}"
253
+
cid = recordCid.ToString()
254
+
commit = {| rev = newRev; cid = commitCid.ToString() |}
255
+
|}
256
+
next
257
+
ctx
258
+
| _ ->
259
+
ctx.SetStatusCode 400
260
+
return! json { error = "InvalidRequest"; message = "rkey is required" } next ctx
261
+
}
+66
PDSharp/Handlers/Server.fs
+66
PDSharp/Handlers/Server.fs
···
1
+
namespace PDSharp.Handlers
2
+
3
+
open Microsoft.AspNetCore.Http
4
+
open Giraffe
5
+
open PDSharp.Core.Config
6
+
7
+
// =========================================
8
+
// Server & Meta Handlers
9
+
// =========================================
10
+
11
+
module Server =
12
+
[<CLIMutable>]
13
+
type DescribeServerResponse = {
14
+
availableUserDomains : string list
15
+
did : string
16
+
inviteCodeRequired : bool
17
+
}
18
+
19
+
let describeServerHandler : HttpHandler =
20
+
fun next ctx ->
21
+
let config = ctx.GetService<AppConfig>()
22
+
23
+
let response = {
24
+
availableUserDomains = []
25
+
did = config.DidHost
26
+
inviteCodeRequired = true
27
+
}
28
+
29
+
json response next ctx
30
+
31
+
let indexHandler : HttpHandler =
32
+
fun next ctx ->
33
+
let html =
34
+
"""<html>
35
+
<head><title>PDSharp</title></head>
36
+
<body>
37
+
<pre>
38
+
888 888 8888888888 888 888
39
+
888 888 888 888 888
40
+
888 888 888 888888888888
41
+
8888b. 888888 88888b. 888d888 .d88b. 888888 .d88b. 88 8888888 888 888
42
+
"88b 888 888 "88b 888P" d88""88b 888 d88""88b 888888 888 888 888
43
+
.d888888 888 888 888 888 888 888 888 888 888 88 888 888888888888
44
+
888 888 Y88b. 888 d88P 888 Y88..88P Y88b. Y88..88P 888 888 888
45
+
"Y888888 "Y888 88888P" 888 "Y88P" "Y888 "Y88P" 888 888 888
46
+
888
47
+
888
48
+
888
49
+
50
+
51
+
This is an AT Protocol Personal Data Server (aka, an atproto PDS)
52
+
53
+
Most API routes are under /xrpc/
54
+
55
+
Code: https://github.com/bluesky-social/atproto
56
+
https://github.com/stormlightlabs/PDSharp
57
+
https://tangled.org/desertthunder.dev/PDSharp
58
+
Self-Host: https://github.com/bluesky-social/pds
59
+
Protocol: https://atproto.com
60
+
</pre>
61
+
</body>
62
+
</html>"""
63
+
64
+
ctx.SetContentType "text/html"
65
+
ctx.SetStatusCode 200
66
+
ctx.WriteStringAsync html
+137
PDSharp/Handlers/Sync.fs
+137
PDSharp/Handlers/Sync.fs
···
1
+
namespace PDSharp.Handlers
2
+
3
+
open System
4
+
open System.Threading.Tasks
5
+
open System.Net.WebSockets
6
+
open Microsoft.AspNetCore.Http
7
+
open Giraffe
8
+
open PDSharp.Core
9
+
open PDSharp.Core.Models
10
+
open PDSharp.Core.BlockStore
11
+
open PDSharp.Core.Repository
12
+
open PDSharp.Core.Car
13
+
open PDSharp.Core.BlobStore
14
+
open PDSharp.Core.SqliteStore
15
+
open PDSharp.Handlers
16
+
17
+
module Sync =
18
+
let getRepoHandler : HttpHandler =
19
+
fun next ctx -> task {
20
+
let repoStore = ctx.GetService<IRepoStore>()
21
+
let blockStore = ctx.GetService<IBlockStore>()
22
+
let did = ctx.Request.Query.["did"].ToString()
23
+
24
+
if String.IsNullOrWhiteSpace(did) then
25
+
ctx.SetStatusCode 400
26
+
return! json { error = "InvalidRequest"; message = "Missing did" } next ctx
27
+
else
28
+
let! repoOpt = Persistence.loadRepo repoStore blockStore did
29
+
30
+
match repoOpt with
31
+
| None ->
32
+
ctx.SetStatusCode 404
33
+
return! json { error = "RepoNotFound"; message = "Repository not found" } next ctx
34
+
| Some repoData ->
35
+
match repoData.Head with
36
+
| None ->
37
+
ctx.SetStatusCode 404
38
+
39
+
return!
40
+
json
41
+
{
42
+
error = "RepoNotFound"
43
+
message = "Repository has no commits"
44
+
}
45
+
next
46
+
ctx
47
+
| Some headCid ->
48
+
let! allBlocks = blockStore.GetAllCidsAndData()
49
+
let carBytes = Car.createCar [ headCid ] allBlocks
50
+
ctx.SetContentType "application/vnd.ipld.car"
51
+
ctx.SetStatusCode 200
52
+
return! ctx.WriteBytesAsync carBytes
53
+
}
54
+
55
+
let getBlocksHandler : HttpHandler =
56
+
fun next ctx -> task {
57
+
let repoStore = ctx.GetService<IRepoStore>()
58
+
let blockStore = ctx.GetService<IBlockStore>()
59
+
let did = ctx.Request.Query.["did"].ToString()
60
+
let cidsParam = ctx.Request.Query.["cids"].ToString()
61
+
62
+
if String.IsNullOrWhiteSpace did || String.IsNullOrWhiteSpace cidsParam then
63
+
ctx.SetStatusCode 400
64
+
return! json { error = "InvalidRequest"; message = "Missing parameters" } next ctx
65
+
else
66
+
let! repoOpt = Persistence.loadRepo repoStore blockStore did
67
+
68
+
match repoOpt with
69
+
| None ->
70
+
ctx.SetStatusCode 404
71
+
return! json { error = "RepoNotFound"; message = "Repository not found" } next ctx
72
+
| Some _ ->
73
+
let cidStrs = cidsParam.Split(',') |> Array.map (fun s -> s.Trim())
74
+
let parsedCids = cidStrs |> Array.choose Cid.TryParse |> Array.toList
75
+
76
+
let! blocks =
77
+
parsedCids
78
+
|> List.map (fun cid -> async {
79
+
let! dataOpt = blockStore.Get cid
80
+
return dataOpt |> Option.map (fun d -> (cid, d))
81
+
})
82
+
|> Async.Sequential
83
+
84
+
let foundBlocks = blocks |> Array.choose id |> Array.toList
85
+
let roots = if foundBlocks.IsEmpty then [] else [ fst foundBlocks.Head ]
86
+
let carBytes = Car.createCar roots foundBlocks
87
+
ctx.SetContentType "application/vnd.ipld.car"
88
+
ctx.SetStatusCode 200
89
+
return! ctx.WriteBytesAsync carBytes
90
+
}
91
+
92
+
let getBlobHandler : HttpHandler =
93
+
fun next ctx -> task {
94
+
let blobStore = ctx.GetService<IBlobStore>()
95
+
let did = ctx.Request.Query.["did"].ToString()
96
+
let cidStr = ctx.Request.Query.["cid"].ToString()
97
+
98
+
if String.IsNullOrWhiteSpace(did) || String.IsNullOrWhiteSpace(cidStr) then
99
+
ctx.SetStatusCode 400
100
+
return! json { error = "InvalidRequest"; message = "Missing parameters" } next ctx
101
+
else
102
+
match Cid.TryParse cidStr with
103
+
| None ->
104
+
ctx.SetStatusCode 400
105
+
return! json { error = "InvalidCid"; message = "Invalid CID" } next ctx
106
+
| Some cid ->
107
+
let! blobOpt = blobStore.Get cid
108
+
109
+
match blobOpt with
110
+
| Some blob ->
111
+
ctx.SetContentType "application/octet-stream"
112
+
ctx.SetStatusCode 200
113
+
return! ctx.WriteBytesAsync blob
114
+
| None ->
115
+
ctx.SetStatusCode 404
116
+
return! json { error = "NotFound"; message = "Blob not found" } next ctx
117
+
}
118
+
119
+
let subscribeReposHandler : HttpHandler =
120
+
fun next ctx -> task {
121
+
if ctx.WebSockets.IsWebSocketRequest then
122
+
let firehose = ctx.GetService<FirehoseState>()
123
+
let! ws = ctx.WebSockets.AcceptWebSocketAsync()
124
+
let id = Guid.NewGuid().ToString()
125
+
firehose.Subscribers.TryAdd(id, ws) |> ignore
126
+
127
+
try
128
+
while ws.State = WebSocketState.Open do
129
+
do! Task.Delay 1000
130
+
finally
131
+
firehose.Subscribers.TryRemove(id) |> ignore
132
+
133
+
return Some ctx
134
+
else
135
+
ctx.SetStatusCode 400
136
+
return! text "WebSocket upgrade required" next ctx
137
+
}
+7
-1
PDSharp/PDSharp.fsproj
+7
-1
PDSharp/PDSharp.fsproj
···
6
6
</PropertyGroup>
7
7
8
8
<ItemGroup>
9
-
<Compile Include="Program.fs" />
9
+
<Compile Include="Handlers/Common.fs" />
10
+
<Compile Include="Handlers/Server.fs" />
11
+
<Compile Include="Handlers/Auth.fs" />
12
+
<Compile Include="Handlers/Repo.fs" />
13
+
<Compile Include="Handlers/Sync.fs" />
14
+
<Compile Include="Handlers/Health.fs" />
15
+
<Compile Include="Program.fs" />
10
16
</ItemGroup>
11
17
12
18
<ItemGroup>
+90
-26
PDSharp/Program.fs
+90
-26
PDSharp/Program.fs
···
1
1
open System
2
-
open System.IO
3
2
open Microsoft.AspNetCore.Builder
4
3
open Microsoft.AspNetCore.Hosting
5
4
open Microsoft.Extensions.Hosting
6
5
open Microsoft.Extensions.DependencyInjection
7
-
open Microsoft.Extensions.Configuration
8
6
open Giraffe
9
-
open PDSharp.Core.Models
7
+
open PDSharp.Core
8
+
open PDSharp.Core.Auth
9
+
open PDSharp.Core.BlockStore
10
+
open PDSharp.Core.SqliteStore
11
+
open PDSharp.Core.BlobStore
10
12
open PDSharp.Core.Config
13
+
open PDSharp.Core.Health
14
+
open PDSharp.Handlers
11
15
12
-
module App =
16
+
let getConfig () =
17
+
let env (k : string) (def : string) =
18
+
match Environment.GetEnvironmentVariable k with
19
+
| null -> def
20
+
| v -> v
13
21
14
-
let describeServerHandler : HttpHandler =
15
-
fun next ctx ->
16
-
let config = ctx.GetService<AppConfig>()
22
+
let publicUrl = env "PDSHARP_PublicUrl" "http://localhost:5000"
23
+
let dbPath = env "PDSHARP_DbPath" "pdsharp.db"
17
24
18
-
// TODO: add to config
19
-
let response = {
20
-
availableUserDomains = []
21
-
did = config.DidHost
22
-
inviteCodeRequired = true
25
+
let disableWalAutoCheckpoint =
26
+
env "PDSHARP_SQLITE_DISABLE_WAL_AUTO_CHECKPOINT" "false" |> bool.Parse
27
+
28
+
let blobStoreConfig =
29
+
match env "PDSHARP_BLOBSTORE_TYPE" "disk" with
30
+
| "s3" ->
31
+
S3 {
32
+
Bucket = env "PDSHARP_S3_BUCKET" "pdsharp-blobs"
33
+
Region = env "PDSHARP_S3_REGION" "us-east-1"
34
+
AccessKey = Option.ofObj (Environment.GetEnvironmentVariable "PDSHARP_S3_ACCESS_KEY")
35
+
SecretKey = Option.ofObj (Environment.GetEnvironmentVariable "PDSHARP_S3_SECRET_KEY")
36
+
ServiceUrl = Option.ofObj (Environment.GetEnvironmentVariable "PDSHARP_S3_SERVICE_URL")
37
+
ForcePathStyle = env "PDSHARP_S3_FORCE_PATH_STYLE" "false" |> bool.Parse
23
38
}
39
+
| _ -> Disk "blobs"
24
40
25
-
json response next ctx
41
+
{
42
+
PublicUrl = publicUrl
43
+
DidHost = env "PDSHARP_DidHost" "did:web:localhost"
44
+
JwtSecret = env "PDSHARP_JwtSecret" "development-secret-do-not-use-in-prod"
45
+
SqliteConnectionString = $"Data Source={dbPath}"
46
+
DisableWalAutoCheckpoint = disableWalAutoCheckpoint
47
+
BlobStore = blobStoreConfig
48
+
}
26
49
27
-
let webApp =
50
+
let config = getConfig ()
51
+
52
+
SqliteStore.initialize config
53
+
54
+
module App =
55
+
let appRouter =
28
56
choose [
29
-
route "/xrpc/com.atproto.server.describeServer" >=> describeServerHandler
57
+
GET
58
+
>=> choose [
59
+
route "/" >=> Server.indexHandler
60
+
route "/xrpc/com.atproto.server.describeServer" >=> Server.describeServerHandler
61
+
route "/xrpc/_health" >=> HealthHandler.healthHandler
62
+
]
63
+
POST
64
+
>=> route "/xrpc/com.atproto.server.createAccount"
65
+
>=> Auth.createAccountHandler
66
+
POST
67
+
>=> route "/xrpc/com.atproto.server.createSession"
68
+
>=> Auth.createSessionHandler
69
+
POST
70
+
>=> route "/xrpc/com.atproto.server.refreshSession"
71
+
>=> Auth.refreshSessionHandler
72
+
POST
73
+
>=> route "/xrpc/com.atproto.repo.createRecord"
74
+
>=> Repo.createRecordHandler
75
+
GET >=> route "/xrpc/com.atproto.repo.getRecord" >=> Repo.getRecordHandler
76
+
POST >=> route "/xrpc/com.atproto.repo.putRecord" >=> Repo.putRecordHandler
77
+
GET >=> route "/xrpc/com.atproto.sync.getRepo" >=> Sync.getRepoHandler
78
+
GET >=> route "/xrpc/com.atproto.sync.getBlocks" >=> Sync.getBlocksHandler
79
+
GET >=> route "/xrpc/com.atproto.sync.getBlob" >=> Sync.getBlobHandler
80
+
GET
81
+
>=> route "/xrpc/com.atproto.sync.subscribeRepos"
82
+
>=> Sync.subscribeReposHandler
30
83
route "/" >=> text "PDSharp PDS is running."
31
84
RequestErrors.NOT_FOUND "Not Found"
32
85
]
33
86
34
-
let configureApp (app : IApplicationBuilder) = app.UseGiraffe webApp
87
+
let webApp (app : IApplicationBuilder) =
88
+
app.UseWebSockets() |> ignore
89
+
app.UseGiraffe appRouter
35
90
36
91
let configureServices (config : AppConfig) (services : IServiceCollection) =
37
92
services.AddGiraffe() |> ignore
38
93
services.AddSingleton<AppConfig>(config) |> ignore
39
94
95
+
let blockStore = new SqliteBlockStore(config.SqliteConnectionString)
96
+
let accountStore = new SqliteAccountStore(config.SqliteConnectionString)
97
+
let repoStore = new SqliteRepoStore(config.SqliteConnectionString)
98
+
99
+
services.AddSingleton<IBlockStore> blockStore |> ignore
100
+
services.AddSingleton<IAccountStore> accountStore |> ignore
101
+
services.AddSingleton<IRepoStore> repoStore |> ignore
102
+
103
+
let blobStore : IBlobStore =
104
+
match config.BlobStore with
105
+
| Disk path -> new DiskBlobStore(path) :> IBlobStore
106
+
| S3 s3Config -> new S3BlobStore(s3Config) :> IBlobStore
107
+
108
+
services.AddSingleton<IBlobStore> blobStore |> ignore
109
+
services.AddSingleton<FirehoseState>(new FirehoseState()) |> ignore
110
+
services.AddSingleton<SigningKeyStore>(new SigningKeyStore()) |> ignore
111
+
services.AddSingleton<HealthState>(new HealthState()) |> ignore
112
+
40
113
[<EntryPoint>]
41
114
let main args =
42
-
let configBuilder =
43
-
ConfigurationBuilder()
44
-
.SetBasePath(AppContext.BaseDirectory)
45
-
.AddJsonFile("appsettings.json", optional = false, reloadOnChange = true)
46
-
.AddEnvironmentVariables(prefix = "PDSHARP_")
47
-
.Build()
48
-
49
-
let appConfig = configBuilder.Get<AppConfig>()
50
-
51
115
Host
52
116
.CreateDefaultBuilder(args)
53
117
.ConfigureWebHostDefaults(fun webHostBuilder ->
54
-
webHostBuilder.Configure(configureApp).ConfigureServices(configureServices appConfig)
118
+
webHostBuilder.Configure(webApp).ConfigureServices(configureServices config)
55
119
|> ignore)
56
120
.Build()
57
121
.Run()
+2
-1
PDSharp/appsettings.json
+2
-1
PDSharp/appsettings.json
+41
PDSharp.Core/AtUri.fs
+41
PDSharp.Core/AtUri.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Text.RegularExpressions
5
+
6
+
/// AT-URI parsing and validation
7
+
module AtUri =
8
+
/// Represents an AT Protocol URI: at://did/collection/rkey
9
+
type AtUri = { Did : string; Collection : string; Rkey : string }
10
+
11
+
let private didPattern = @"^did:[a-z]+:[a-zA-Z0-9._:%-]+$"
12
+
let private nsidPattern = @"^[a-z][a-z0-9]*(\.[a-z][a-z0-9]*)+$"
13
+
let private rkeyPattern = @"^[a-zA-Z0-9._~-]+$"
14
+
15
+
/// Parse an AT-URI string into components
16
+
let parse (uri : string) : Result<AtUri, string> =
17
+
if not (uri.StartsWith("at://")) then
18
+
Error "AT-URI must start with at://"
19
+
else
20
+
let path = uri.Substring(5)
21
+
let parts = path.Split('/')
22
+
23
+
if parts.Length < 3 then
24
+
Error "AT-URI must have format at://did/collection/rkey"
25
+
else
26
+
let did = parts.[0]
27
+
let collection = parts.[1]
28
+
let rkey = parts.[2]
29
+
30
+
if not (Regex.IsMatch(did, didPattern)) then
31
+
Error $"Invalid DID format: {did}"
32
+
elif not (Regex.IsMatch(collection, nsidPattern)) then
33
+
Error $"Invalid collection NSID: {collection}"
34
+
elif not (Regex.IsMatch(rkey, rkeyPattern)) then
35
+
Error $"Invalid rkey format: {rkey}"
36
+
else
37
+
Ok { Did = did; Collection = collection; Rkey = rkey }
38
+
39
+
/// Convert AtUri back to string
40
+
let toString (uri : AtUri) : string =
41
+
$"at://{uri.Did}/{uri.Collection}/{uri.Rkey}"
+218
PDSharp.Core/Auth.fs
+218
PDSharp.Core/Auth.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Text
5
+
open Org.BouncyCastle.Crypto.Digests
6
+
open Org.BouncyCastle.Crypto.Macs
7
+
open Org.BouncyCastle.Crypto.Parameters
8
+
open Org.BouncyCastle.Security
9
+
10
+
/// Authentication module for sessions and accounts
11
+
/// TODO: Migrate account storage from in-memory to SQLite/Postgres for production
12
+
module Auth =
13
+
/// Hash a password with a random salt using SHA-256
14
+
///
15
+
/// Returns: base64(salt)$base64(hash)
16
+
let hashPassword (password : string) : string =
17
+
let salt = Array.zeroCreate<byte> 16
18
+
SecureRandom().NextBytes(salt)
19
+
20
+
let passwordBytes = Encoding.UTF8.GetBytes(password)
21
+
let toHash = Array.append salt passwordBytes
22
+
23
+
let digest = Sha256Digest()
24
+
digest.BlockUpdate(toHash, 0, toHash.Length)
25
+
let hash = Array.zeroCreate<byte> (digest.GetDigestSize())
26
+
digest.DoFinal(hash, 0) |> ignore
27
+
28
+
$"{Convert.ToBase64String(salt)}${Convert.ToBase64String(hash)}"
29
+
30
+
/// Verify a password against a stored hash
31
+
let verifyPassword (password : string) (storedHash : string) : bool =
32
+
let parts = storedHash.Split('$')
33
+
34
+
if parts.Length <> 2 then
35
+
false
36
+
else
37
+
try
38
+
let salt = Convert.FromBase64String(parts.[0])
39
+
let expectedHash = Convert.FromBase64String(parts.[1])
40
+
41
+
let passwordBytes = Encoding.UTF8.GetBytes(password)
42
+
let toHash = Array.append salt passwordBytes
43
+
44
+
let digest = Sha256Digest()
45
+
digest.BlockUpdate(toHash, 0, toHash.Length)
46
+
let actualHash = Array.zeroCreate<byte> (digest.GetDigestSize())
47
+
digest.DoFinal(actualHash, 0) |> ignore
48
+
49
+
actualHash = expectedHash
50
+
with _ ->
51
+
false
52
+
53
+
let private base64UrlEncode (bytes : byte[]) : string =
54
+
Convert.ToBase64String(bytes).Replace('+', '-').Replace('/', '_').TrimEnd('=')
55
+
56
+
let private base64UrlDecode (str : string) : byte[] =
57
+
let padded =
58
+
match str.Length % 4 with
59
+
| 2 -> str + "=="
60
+
| 3 -> str + "="
61
+
| _ -> str
62
+
63
+
Convert.FromBase64String(padded.Replace('-', '+').Replace('_', '/'))
64
+
65
+
let private hmacSha256 (secret : byte[]) (data : byte[]) : byte[] =
66
+
let hmac = HMac(Sha256Digest())
67
+
hmac.Init(KeyParameter(secret))
68
+
hmac.BlockUpdate(data, 0, data.Length)
69
+
let result = Array.zeroCreate<byte> (hmac.GetMacSize())
70
+
hmac.DoFinal(result, 0) |> ignore
71
+
result
72
+
73
+
/// Token type for domain separation per AT Protocol spec
74
+
type TokenType =
75
+
| Access // typ: at+jwt
76
+
| Refresh // typ: refresh+jwt
77
+
78
+
/// Create a JWT token
79
+
let createToken (secret : string) (tokenType : TokenType) (did : string) (expiresIn : TimeSpan) : string =
80
+
let now = DateTimeOffset.UtcNow
81
+
let exp = now.Add(expiresIn)
82
+
83
+
let typ =
84
+
match tokenType with
85
+
| Access -> "at+jwt"
86
+
| Refresh -> "refresh+jwt"
87
+
88
+
let jti = Guid.NewGuid().ToString("N")
89
+
90
+
let header = $"""{{ "alg": "HS256", "typ": "{typ}" }}"""
91
+
let headerB64 = base64UrlEncode (Encoding.UTF8.GetBytes(header))
92
+
93
+
let payload =
94
+
$"""{{ "sub": "{did}", "iat": {now.ToUnixTimeSeconds()}, "exp": {exp.ToUnixTimeSeconds()}, "jti": "{jti}" }}"""
95
+
96
+
let payloadB64 = base64UrlEncode (Encoding.UTF8.GetBytes(payload))
97
+
98
+
let signingInput = $"{headerB64}.{payloadB64}"
99
+
let secretBytes = Encoding.UTF8.GetBytes(secret)
100
+
let signature = hmacSha256 secretBytes (Encoding.UTF8.GetBytes(signingInput))
101
+
let signatureB64 = base64UrlEncode signature
102
+
103
+
$"{headerB64}.{payloadB64}.{signatureB64}"
104
+
105
+
/// Create an access token (short-lived)
106
+
let createAccessToken (secret : string) (did : string) : string =
107
+
createToken secret Access did (TimeSpan.FromMinutes(15.0))
108
+
109
+
/// Create a refresh token (longer-lived)
110
+
let createRefreshToken (secret : string) (did : string) : string =
111
+
createToken secret Refresh did (TimeSpan.FromDays(7.0))
112
+
113
+
/// Validation result
114
+
type TokenValidation =
115
+
| Valid of did : string * tokenType : TokenType * exp : int64
116
+
| Invalid of reason : string
117
+
118
+
/// Validate a JWT token and extract claims
119
+
let validateToken (secret : string) (token : string) : TokenValidation =
120
+
let parts = token.Split('.')
121
+
122
+
if parts.Length <> 3 then
123
+
Invalid "Invalid token format"
124
+
else
125
+
try
126
+
let headerB64, payloadB64, signatureB64 = parts.[0], parts.[1], parts.[2]
127
+
128
+
let signingInput = $"{headerB64}.{payloadB64}"
129
+
let secretBytes = Encoding.UTF8.GetBytes(secret)
130
+
let expectedSig = hmacSha256 secretBytes (Encoding.UTF8.GetBytes(signingInput))
131
+
let actualSig = base64UrlDecode signatureB64
132
+
133
+
if expectedSig <> actualSig then
134
+
Invalid "Invalid signature"
135
+
else
136
+
let payloadJson = Encoding.UTF8.GetString(base64UrlDecode payloadB64)
137
+
let headerJson = Encoding.UTF8.GetString(base64UrlDecode headerB64)
138
+
139
+
let typMatch =
140
+
System.Text.RegularExpressions.Regex.Match(headerJson, "\"typ\"\\s*:\\s*\"([^\"]+)\"")
141
+
142
+
let tokenType =
143
+
if typMatch.Success then
144
+
match typMatch.Groups.[1].Value with
145
+
| "at+jwt" -> Access
146
+
| "refresh+jwt" -> Refresh
147
+
| _ -> Access
148
+
else
149
+
Access
150
+
151
+
let subMatch =
152
+
System.Text.RegularExpressions.Regex.Match(payloadJson, "\"sub\"\\s*:\\s*\"([^\"]+)\"")
153
+
154
+
let expMatch =
155
+
System.Text.RegularExpressions.Regex.Match(payloadJson, "\"exp\"\\s*:\\s*([0-9]+)")
156
+
157
+
if not subMatch.Success || not expMatch.Success then
158
+
Invalid "Missing claims"
159
+
else
160
+
let did = subMatch.Groups.[1].Value
161
+
let exp = Int64.Parse(expMatch.Groups.[1].Value)
162
+
let now = DateTimeOffset.UtcNow.ToUnixTimeSeconds()
163
+
164
+
if now > exp then
165
+
Invalid "Token expired"
166
+
else
167
+
Valid(did, tokenType, exp)
168
+
with ex ->
169
+
Invalid $"Parse error: {ex.Message}"
170
+
171
+
/// Account record
172
+
type Account = {
173
+
Did : string
174
+
Handle : string
175
+
PasswordHash : string
176
+
Email : string option
177
+
CreatedAt : DateTimeOffset
178
+
}
179
+
180
+
/// Interface for account persistence
181
+
type IAccountStore =
182
+
abstract member CreateAccount : Account -> Async<Result<unit, string>>
183
+
abstract member GetAccountByHandle : string -> Async<Account option>
184
+
abstract member GetAccountByDid : string -> Async<Account option>
185
+
186
+
/// Create a new account
187
+
let createAccount
188
+
(store : IAccountStore)
189
+
(handle : string)
190
+
(password : string)
191
+
(email : string option)
192
+
: Async<Result<Account, string>> =
193
+
async {
194
+
let! existingHandle = store.GetAccountByHandle handle
195
+
196
+
match existingHandle with
197
+
| Some _ -> return Error "Handle already taken"
198
+
| None ->
199
+
let did = $"did:web:{handle}"
200
+
let! existingDid = store.GetAccountByDid did
201
+
202
+
match existingDid with
203
+
| Some _ -> return Error "Account already exists"
204
+
| None ->
205
+
let account = {
206
+
Did = did
207
+
Handle = handle
208
+
PasswordHash = hashPassword password
209
+
Email = email
210
+
CreatedAt = DateTimeOffset.UtcNow
211
+
}
212
+
213
+
let! result = store.CreateAccount account
214
+
215
+
match result with
216
+
| Ok() -> return Ok account
217
+
| Error e -> return Error e
218
+
}
+124
PDSharp.Core/BlobStore.fs
+124
PDSharp.Core/BlobStore.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System.IO
4
+
open Amazon.S3
5
+
open Amazon.S3.Model
6
+
open PDSharp.Core.Config
7
+
8
+
/// Interface for binary large object (blob) storage
9
+
type IBlobStore =
10
+
/// Store a blob by CID
11
+
abstract member Put : Cid * byte[] -> Async<unit>
12
+
/// Retrieve a blob by CID
13
+
abstract member Get : Cid -> Async<byte[] option>
14
+
/// Check if a blob exists (optional optimization)
15
+
abstract member Has : Cid -> Async<bool>
16
+
/// Delete a blob by CID
17
+
abstract member Delete : Cid -> Async<unit>
18
+
19
+
module BlobStore =
20
+
21
+
/// File-system based blob store
22
+
type DiskBlobStore(encodedRootPath : string) =
23
+
let rootPath =
24
+
if Path.IsPathRooted encodedRootPath then
25
+
encodedRootPath
26
+
else
27
+
Path.Combine(Directory.GetCurrentDirectory(), encodedRootPath)
28
+
29
+
do
30
+
if not (Directory.Exists rootPath) then
31
+
Directory.CreateDirectory(rootPath) |> ignore
32
+
33
+
let getPath (cid : Cid) = Path.Combine(rootPath, cid.ToString())
34
+
35
+
interface IBlobStore with
36
+
member _.Put(cid, data) = async {
37
+
let path = getPath cid
38
+
39
+
if not (File.Exists path) then
40
+
do! File.WriteAllBytesAsync(path, data) |> Async.AwaitTask
41
+
}
42
+
43
+
member _.Get(cid) = async {
44
+
let path = getPath cid
45
+
46
+
if File.Exists path then
47
+
let! data = File.ReadAllBytesAsync(path) |> Async.AwaitTask
48
+
return Some data
49
+
else
50
+
return None
51
+
}
52
+
53
+
member _.Has(cid) = async { return File.Exists(getPath cid) }
54
+
55
+
member _.Delete(cid) = async {
56
+
let path = getPath cid
57
+
58
+
if File.Exists path then
59
+
File.Delete path
60
+
}
61
+
62
+
/// S3-based blob store
63
+
type S3BlobStore(config : S3Config) =
64
+
let client =
65
+
let clientConfig =
66
+
AmazonS3Config(RegionEndpoint = Amazon.RegionEndpoint.GetBySystemName config.Region)
67
+
68
+
match config.ServiceUrl with
69
+
| Some url -> clientConfig.ServiceURL <- url
70
+
| None -> ()
71
+
72
+
clientConfig.ForcePathStyle <- config.ForcePathStyle
73
+
74
+
match config.AccessKey, config.SecretKey with
75
+
| Some access, Some secret -> new AmazonS3Client(access, secret, clientConfig)
76
+
| _ -> new AmazonS3Client(clientConfig)
77
+
78
+
let bucket = config.Bucket
79
+
80
+
interface IBlobStore with
81
+
member _.Put(cid, data) = async {
82
+
let request = PutObjectRequest()
83
+
request.BucketName <- bucket
84
+
request.Key <- cid.ToString()
85
+
use ms = new MemoryStream(data)
86
+
request.InputStream <- ms
87
+
let! _ = client.PutObjectAsync(request) |> Async.AwaitTask
88
+
()
89
+
}
90
+
91
+
member _.Get(cid) = async {
92
+
try
93
+
let request = GetObjectRequest()
94
+
request.BucketName <- bucket
95
+
request.Key <- cid.ToString()
96
+
97
+
use! response = client.GetObjectAsync(request) |> Async.AwaitTask
98
+
use ms = new MemoryStream()
99
+
do! response.ResponseStream.CopyToAsync(ms) |> Async.AwaitTask
100
+
return Some(ms.ToArray())
101
+
with
102
+
| :? AmazonS3Exception as ex when ex.StatusCode = System.Net.HttpStatusCode.NotFound -> return None
103
+
| _ -> return None
104
+
}
105
+
106
+
member _.Has(cid) = async {
107
+
try
108
+
let request = GetObjectMetadataRequest()
109
+
request.BucketName <- bucket
110
+
request.Key <- cid.ToString()
111
+
let! _ = client.GetObjectMetadataAsync(request) |> Async.AwaitTask
112
+
return true
113
+
with
114
+
| :? AmazonS3Exception as ex when ex.StatusCode = System.Net.HttpStatusCode.NotFound -> return false
115
+
| _ -> return false
116
+
}
117
+
118
+
member _.Delete(cid) = async {
119
+
let request = DeleteObjectRequest()
120
+
request.BucketName <- bucket
121
+
request.Key <- cid.ToString()
122
+
let! _ = client.DeleteObjectAsync(request) |> Async.AwaitTask
123
+
()
124
+
}
+50
PDSharp.Core/BlockStore.fs
+50
PDSharp.Core/BlockStore.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System.Collections.Concurrent
4
+
5
+
/// Block storage interface for CID → byte[] mappings
6
+
module BlockStore =
7
+
8
+
/// Interface for content-addressed block storage
9
+
type IBlockStore =
10
+
abstract member Get : Cid -> Async<byte[] option>
11
+
abstract member Put : byte[] -> Async<Cid>
12
+
abstract member Has : Cid -> Async<bool>
13
+
abstract member GetAllCidsAndData : unit -> Async<(Cid * byte[]) list>
14
+
15
+
/// In-memory implementation of IBlockStore for testing
16
+
type MemoryBlockStore() =
17
+
let store = ConcurrentDictionary<string, (Cid * byte[])>()
18
+
19
+
let cidKey (cid : Cid) =
20
+
System.Convert.ToBase64String(cid.Bytes)
21
+
22
+
interface IBlockStore with
23
+
member _.Get(cid : Cid) = async {
24
+
let key = cidKey cid
25
+
26
+
match store.TryGetValue(key) with
27
+
| true, (_, data) -> return Some data
28
+
| false, _ -> return None
29
+
}
30
+
31
+
member _.Put(data : byte[]) = async {
32
+
let hash = Crypto.sha256 data
33
+
let cid = Cid.FromHash hash
34
+
let key = cidKey cid
35
+
store.[key] <- (cid, data)
36
+
return cid
37
+
}
38
+
39
+
member _.Has(cid : Cid) = async {
40
+
let key = cidKey cid
41
+
return store.ContainsKey(key)
42
+
}
43
+
44
+
member _.GetAllCidsAndData() = async { return store.Values |> Seq.toList }
45
+
46
+
/// Get the number of blocks stored (for testing)
47
+
member _.Count = store.Count
48
+
49
+
/// Clear all blocks (for testing)
50
+
member _.Clear() = store.Clear()
+85
PDSharp.Core/Car.fs
+85
PDSharp.Core/Car.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.IO
5
+
6
+
/// CARv1 (Content Addressable aRchives) writer module
7
+
/// Implements the CAR format per https://ipld.io/specs/transport/car/carv1/
8
+
module Car =
9
+
/// Encode an unsigned integer as LEB128 varint
10
+
let encodeVarint (value : int) : byte[] =
11
+
if value < 0 then
12
+
failwith "Varint value must be non-negative"
13
+
elif value = 0 then
14
+
[| 0uy |]
15
+
else
16
+
use ms = new MemoryStream()
17
+
let mutable v = value
18
+
19
+
while v > 0 do
20
+
let mutable b = byte (v &&& 0x7F)
21
+
v <- v >>> 7
22
+
23
+
if v > 0 then
24
+
b <- b ||| 0x80uy
25
+
26
+
ms.WriteByte(b)
27
+
28
+
ms.ToArray()
29
+
30
+
/// Create CAR header as DAG-CBOR encoded bytes
31
+
/// Header format: { version: 1, roots: [CID, ...] }
32
+
let createHeader (roots : Cid list) : byte[] =
33
+
let headerMap =
34
+
Map.ofList [ ("roots", box (roots |> List.map box)); ("version", box 1) ]
35
+
36
+
DagCbor.encode headerMap
37
+
38
+
/// Encode a single block section: varint(len) | CID bytes | block data
39
+
let encodeBlock (cid : Cid) (data : byte[]) : byte[] =
40
+
let cidBytes = cid.Bytes
41
+
let sectionLen = cidBytes.Length + data.Length
42
+
let varintBytes = encodeVarint sectionLen
43
+
44
+
use ms = new MemoryStream()
45
+
ms.Write(varintBytes, 0, varintBytes.Length)
46
+
ms.Write(cidBytes, 0, cidBytes.Length)
47
+
ms.Write(data, 0, data.Length)
48
+
ms.ToArray()
49
+
50
+
/// Create a complete CARv1 file from roots and blocks
51
+
/// CAR format: [varint | header] [varint | CID | block]...
52
+
let createCar (roots : Cid list) (blocks : (Cid * byte[]) seq) : byte[] =
53
+
use ms = new MemoryStream()
54
+
55
+
let headerBytes = createHeader roots
56
+
let headerVarint = encodeVarint headerBytes.Length
57
+
ms.Write(headerVarint, 0, headerVarint.Length)
58
+
ms.Write(headerBytes, 0, headerBytes.Length)
59
+
60
+
for cid, data in blocks do
61
+
let blockSection = encodeBlock cid data
62
+
ms.Write(blockSection, 0, blockSection.Length)
63
+
64
+
ms.ToArray()
65
+
66
+
/// Create a CAR from a single root with an async block fetcher
67
+
let createCarAsync (roots : Cid list) (getBlock : Cid -> Async<byte[] option>) (allCids : Cid seq) = async {
68
+
use ms = new MemoryStream()
69
+
70
+
let headerBytes = createHeader roots
71
+
let headerVarint = encodeVarint headerBytes.Length
72
+
ms.Write(headerVarint, 0, headerVarint.Length)
73
+
ms.Write(headerBytes, 0, headerBytes.Length)
74
+
75
+
for cid in allCids do
76
+
let! dataOpt = getBlock cid
77
+
78
+
match dataOpt with
79
+
| Some data ->
80
+
let blockSection = encodeBlock cid data
81
+
ms.Write(blockSection, 0, blockSection.Length)
82
+
| None -> ()
83
+
84
+
return ms.ToArray()
85
+
}
+111
PDSharp.Core/Cid.fs
+111
PDSharp.Core/Cid.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Text
5
+
6
+
/// Minimal Base32 (RFC 4648 Lowercase)
7
+
module Base32Encoding =
8
+
let private alphabet = "abcdefghijklmnopqrstuvwxyz234567"
9
+
10
+
let ToString (data : byte[]) : string =
11
+
if data.Length = 0 then
12
+
""
13
+
else
14
+
let mutable i = 0
15
+
let mutable index = 0
16
+
let mutable digit = 0
17
+
let mutable currByte = 0
18
+
let mutable nextByte = 0
19
+
let sb = StringBuilder((data.Length + 7) * 8 / 5)
20
+
21
+
while i < data.Length do
22
+
currByte <- (int data.[i]) &&& 0xFF
23
+
24
+
if index > 3 then
25
+
if (i + 1) < data.Length then
26
+
nextByte <- (int data.[i + 1]) &&& 0xFF
27
+
else
28
+
nextByte <- 0
29
+
30
+
digit <- currByte &&& (0xFF >>> index)
31
+
index <- (index + 5) % 8
32
+
digit <- digit <<< index
33
+
digit <- digit ||| (nextByte >>> (8 - index))
34
+
i <- i + 1
35
+
else
36
+
digit <- currByte >>> 8 - (index + 5) &&& 0x1F
37
+
index <- (index + 5) % 8
38
+
39
+
if index = 0 then
40
+
i <- i + 1
41
+
42
+
sb.Append(alphabet.[digit]) |> ignore
43
+
44
+
sb.ToString()
45
+
46
+
let FromString (s : string) : byte[] option =
47
+
if String.IsNullOrEmpty s then
48
+
Some [||]
49
+
else
50
+
try
51
+
let bits = s.Length * 5
52
+
let bytes = Array.zeroCreate<byte> (bits / 8)
53
+
let mutable buffer = 0
54
+
let mutable bitsInBuffer = 0
55
+
let mutable byteIndex = 0
56
+
57
+
for c in s do
58
+
let idx = alphabet.IndexOf(Char.ToLowerInvariant c)
59
+
60
+
if idx < 0 then
61
+
failwith "Invalid base32 character"
62
+
63
+
buffer <- buffer <<< 5 ||| idx
64
+
bitsInBuffer <- bitsInBuffer + 5
65
+
66
+
if bitsInBuffer >= 8 then
67
+
bitsInBuffer <- bitsInBuffer - 8
68
+
69
+
if byteIndex < bytes.Length then
70
+
bytes.[byteIndex] <- byte ((buffer >>> bitsInBuffer) &&& 0xFF)
71
+
byteIndex <- byteIndex + 1
72
+
73
+
Some bytes
74
+
with _ ->
75
+
None
76
+
77
+
/// Basic CID implementation for AT Protocol (CIDv1 + dag-cbor + sha2-256)
78
+
///
79
+
/// Constants for ATProto defaults:
80
+
/// - Version 1 (0x01)
81
+
/// - Codec: dag-cbor (0x71)
82
+
/// - Hash: sha2-256 (0x12) - Length 32 (0x20)
83
+
[<Struct>]
84
+
type Cid =
85
+
val Bytes : byte[]
86
+
new(bytes : byte[]) = { Bytes = bytes }
87
+
88
+
static member FromHash(hash : byte[]) =
89
+
if hash.Length <> 32 then
90
+
failwith "Hash must be 32 bytes (sha2-256)"
91
+
92
+
let cidBytes = Array.zeroCreate<byte> 36
93
+
cidBytes.[0] <- 0x01uy
94
+
cidBytes.[1] <- 0x71uy
95
+
cidBytes.[2] <- 0x12uy
96
+
cidBytes.[3] <- 0x20uy
97
+
Array.Copy(hash, 0, cidBytes, 4, 32)
98
+
Cid cidBytes
99
+
100
+
static member TryParse(s : string) : Cid option =
101
+
if String.IsNullOrWhiteSpace s then
102
+
None
103
+
elif s.StartsWith("b") then
104
+
match Base32Encoding.FromString(s.Substring(1)) with
105
+
| Some bytes when bytes.Length = 36 -> Some(Cid bytes)
106
+
| _ -> None
107
+
else
108
+
None
109
+
110
+
override this.ToString() =
111
+
"b" + Base32Encoding.ToString(this.Bytes)
+25
-1
PDSharp.Core/Config.fs
+25
-1
PDSharp.Core/Config.fs
···
1
1
namespace PDSharp.Core
2
2
3
3
module Config =
4
-
type AppConfig = { PublicUrl : string; DidHost : string }
4
+
type AppConfig = {
5
+
PublicUrl : string
6
+
DidHost : string
7
+
/// HS256 signing key for session tokens
8
+
JwtSecret : string
9
+
/// Connection string for SQLite
10
+
SqliteConnectionString : string
11
+
/// Disable SQLite WAL auto-checkpoint (for Litestream compatibility)
12
+
DisableWalAutoCheckpoint : bool
13
+
/// Blob storage configuration
14
+
BlobStore : BlobStoreConfig
15
+
}
16
+
17
+
and BlobStoreConfig =
18
+
| Disk of path : string
19
+
| S3 of S3Config
20
+
21
+
and S3Config = {
22
+
Bucket : string
23
+
Region : string
24
+
AccessKey : string option
25
+
SecretKey : string option
26
+
ServiceUrl : string option
27
+
ForcePathStyle : bool
28
+
}
+108
PDSharp.Core/Crypto.fs
+108
PDSharp.Core/Crypto.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Text
5
+
open Org.BouncyCastle.Crypto.Digests
6
+
open Org.BouncyCastle.Crypto.Signers
7
+
open Org.BouncyCastle.Crypto.Parameters
8
+
open Org.BouncyCastle.Math
9
+
open Org.BouncyCastle.Asn1.X9
10
+
open Org.BouncyCastle.Crypto.Generators
11
+
open Org.BouncyCastle.Security
12
+
open Org.BouncyCastle.Asn1.Sec
13
+
14
+
module Crypto =
15
+
let sha256 (data : byte[]) : byte[] =
16
+
let digest = Sha256Digest()
17
+
digest.BlockUpdate(data, 0, data.Length)
18
+
let size = digest.GetDigestSize()
19
+
let result = Array.zeroCreate<byte> size
20
+
digest.DoFinal(result, 0) |> ignore
21
+
result
22
+
23
+
let sha256Str (input : string) : byte[] = sha256 (Encoding.UTF8.GetBytes(input))
24
+
25
+
type Curve =
26
+
| P256
27
+
| K256
28
+
29
+
let getCurveParams (curve : Curve) =
30
+
match curve with
31
+
| P256 -> ECNamedCurveTable.GetByName("secp256r1")
32
+
| K256 -> ECNamedCurveTable.GetByName("secp256k1")
33
+
34
+
let getDomainParams (curve : Curve) =
35
+
let ecP = getCurveParams curve
36
+
ECDomainParameters(ecP.Curve, ecP.G, ecP.N, ecP.H, ecP.GetSeed())
37
+
38
+
type EcKeyPair = {
39
+
PrivateKey : ECPrivateKeyParameters option
40
+
PublicKey : ECPublicKeyParameters
41
+
Curve : Curve
42
+
}
43
+
44
+
let generateKey (curve : Curve) : EcKeyPair =
45
+
let domainParams = getDomainParams curve
46
+
let genParam = ECKeyGenerationParameters(domainParams, SecureRandom())
47
+
let generator = ECKeyPairGenerator()
48
+
generator.Init(genParam)
49
+
let pair = generator.GenerateKeyPair()
50
+
51
+
{
52
+
PrivateKey = Some(pair.Private :?> ECPrivateKeyParameters)
53
+
PublicKey = (pair.Public :?> ECPublicKeyParameters)
54
+
Curve = curve
55
+
}
56
+
57
+
let enforceLowS (s : BigInteger) (n : BigInteger) : BigInteger =
58
+
let halfN = n.ShiftRight(1)
59
+
if s.CompareTo(halfN) > 0 then n.Subtract(s) else s
60
+
61
+
let sign (key : EcKeyPair) (digest : byte[]) : byte[] =
62
+
match key.PrivateKey with
63
+
| None -> failwith "Private key required for signing"
64
+
| Some privParams ->
65
+
let signer = ECDsaSigner()
66
+
signer.Init(true, privParams)
67
+
let inputs = digest
68
+
let signature = signer.GenerateSignature(inputs)
69
+
let r = signature.[0]
70
+
let s = signature.[1]
71
+
72
+
let n = privParams.Parameters.N
73
+
let canonicalS = enforceLowS s n
74
+
75
+
let to32Bytes (bi : BigInteger) =
76
+
let bytes = bi.ToByteArrayUnsigned()
77
+
78
+
if bytes.Length > 32 then
79
+
failwith "Signature component too large"
80
+
81
+
let padded = Array.zeroCreate<byte> 32
82
+
Array.Copy(bytes, 0, padded, 32 - bytes.Length, bytes.Length)
83
+
padded
84
+
85
+
let rBytes = to32Bytes r
86
+
let sBytes = to32Bytes canonicalS
87
+
Array.append rBytes sBytes
88
+
89
+
let verify (key : EcKeyPair) (digest : byte[]) (signature : byte[]) : bool =
90
+
if signature.Length <> 64 then
91
+
false
92
+
else
93
+
let rBytes = Array.sub signature 0 32
94
+
let sBytes = Array.sub signature 32 32
95
+
96
+
let r = BigInteger(1, rBytes)
97
+
let s = BigInteger(1, sBytes)
98
+
99
+
let domainParams = key.PublicKey.Parameters
100
+
let n = domainParams.N
101
+
let halfN = n.ShiftRight(1)
102
+
103
+
if s.CompareTo(halfN) > 0 then
104
+
false
105
+
else
106
+
let signer = ECDsaSigner()
107
+
signer.Init(false, key.PublicKey)
108
+
signer.VerifySignature(digest, r, s)
+83
PDSharp.Core/DagCbor.fs
+83
PDSharp.Core/DagCbor.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Collections.Generic
5
+
open System.Formats.Cbor
6
+
open System.IO
7
+
open System.Text
8
+
9
+
module DagCbor =
10
+
type SortKey = { Length : int; Bytes : byte[] }
11
+
12
+
let private getSortKey (key : string) =
13
+
let bytes = Encoding.UTF8.GetBytes(key)
14
+
{ Length = bytes.Length; Bytes = bytes }
15
+
16
+
let private compareKeys (a : string) (b : string) =
17
+
let ka = getSortKey a
18
+
let kb = getSortKey b
19
+
20
+
if ka.Length <> kb.Length then
21
+
ka.Length.CompareTo kb.Length
22
+
else
23
+
let mutable res = 0
24
+
let mutable i = 0
25
+
26
+
while res = 0 && i < ka.Bytes.Length do
27
+
res <- ka.Bytes.[i].CompareTo(kb.Bytes.[i])
28
+
i <- i + 1
29
+
30
+
res
31
+
32
+
let rec private writeItem (writer : CborWriter) (item : obj) =
33
+
match item with
34
+
| null -> writer.WriteNull()
35
+
| :? bool as b -> writer.WriteBoolean(b)
36
+
| :? int as i -> writer.WriteInt32(i)
37
+
| :? int64 as l -> writer.WriteInt64(l)
38
+
| :? string as s -> writer.WriteTextString(s)
39
+
| :? (byte[]) as b -> writer.WriteByteString(b)
40
+
| :? Cid as c ->
41
+
let tag = LanguagePrimitives.EnumOfValue<uint64, CborTag>(42UL)
42
+
writer.WriteTag(tag)
43
+
let rawCid = c.Bytes
44
+
let linkBytes = Array.zeroCreate<byte> (rawCid.Length + 1)
45
+
linkBytes.[0] <- 0x00uy
46
+
Array.Copy(rawCid, 0, linkBytes, 1, rawCid.Length)
47
+
writer.WriteByteString(linkBytes)
48
+
49
+
| :? Map<string, obj> as m ->
50
+
let keys = m |> Map.toList |> List.map fst |> List.sortWith compareKeys
51
+
writer.WriteStartMap(keys.Length)
52
+
53
+
for k in keys do
54
+
writer.WriteTextString(k)
55
+
writeItem writer (m.[k])
56
+
57
+
writer.WriteEndMap()
58
+
59
+
| :? IDictionary<string, obj> as d ->
60
+
let keys = d.Keys |> Seq.toList |> List.sortWith compareKeys
61
+
writer.WriteStartMap(d.Count)
62
+
63
+
for k in keys do
64
+
writer.WriteTextString(k)
65
+
writeItem writer (d.[k])
66
+
67
+
writer.WriteEndMap()
68
+
69
+
| :? seq<obj> as l ->
70
+
let arr = l |> Seq.toArray
71
+
writer.WriteStartArray(arr.Length)
72
+
73
+
for x in arr do
74
+
writeItem writer x
75
+
76
+
writer.WriteEndArray()
77
+
78
+
| _ -> failwith $"Unsupported type for DAG-CBOR: {item.GetType().Name}"
79
+
80
+
let encode (data : obj) : byte[] =
81
+
let writer = new CborWriter(CborConformanceMode.Strict, false, false)
82
+
writeItem writer data
83
+
writer.Encode()
+73
PDSharp.Core/DidResolver.fs
+73
PDSharp.Core/DidResolver.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Net.Http
5
+
open System.Text.Json
6
+
open System.Text.Json.Serialization
7
+
8
+
module DidResolver =
9
+
type VerificationMethod = {
10
+
[<JsonPropertyName("id")>]
11
+
Id : string
12
+
[<JsonPropertyName("type")>]
13
+
Type : string
14
+
[<JsonPropertyName("controller")>]
15
+
Controller : string
16
+
[<JsonPropertyName("publicKeyMultibase")>]
17
+
PublicKeyMultibase : string option
18
+
}
19
+
20
+
type DidDocument = {
21
+
[<JsonPropertyName("id")>]
22
+
Id : string
23
+
[<JsonPropertyName("verificationMethod")>]
24
+
VerificationMethod : VerificationMethod list
25
+
}
26
+
27
+
let private httpClient = new HttpClient()
28
+
29
+
let private fetchJson<'T> (url : string) : Async<'T option> = async {
30
+
try
31
+
let! response = httpClient.GetAsync url |> Async.AwaitTask
32
+
33
+
if response.IsSuccessStatusCode then
34
+
let! stream = response.Content.ReadAsStreamAsync() |> Async.AwaitTask
35
+
let options = JsonSerializerOptions(PropertyNameCaseInsensitive = true)
36
+
let! doc = JsonSerializer.DeserializeAsync<'T>(stream, options).AsTask() |> Async.AwaitTask
37
+
return Some doc
38
+
else
39
+
return None
40
+
with _ ->
41
+
return None
42
+
}
43
+
44
+
let resolveDidWeb (did : string) : Async<DidDocument option> = async {
45
+
let parts = did.Split(':')
46
+
47
+
if parts.Length < 3 then
48
+
return None
49
+
else
50
+
let domain = parts.[2]
51
+
52
+
let url =
53
+
if domain = "localhost" then
54
+
"http://localhost:5000/.well-known/did.json"
55
+
else
56
+
$"https://{domain}/.well-known/did.json"
57
+
58
+
return! fetchJson<DidDocument> url
59
+
}
60
+
61
+
let resolveDidPlc (did : string) : Async<DidDocument option> = async {
62
+
let url = $"https://plc.directory/{did}"
63
+
return! fetchJson<DidDocument> url
64
+
}
65
+
66
+
let resolve (did : string) : Async<DidDocument option> = async {
67
+
if did.StartsWith("did:web:") then
68
+
return! resolveDidWeb did
69
+
elif did.StartsWith("did:plc:") then
70
+
return! resolveDidPlc did
71
+
else
72
+
return None
73
+
}
+56
PDSharp.Core/Firehose.fs
+56
PDSharp.Core/Firehose.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Threading
5
+
6
+
/// Event stream (firehose) for com.atproto.sync.subscribeRepos
7
+
module Firehose =
8
+
9
+
/// Commit event sent to subscribers
10
+
type CommitEvent = {
11
+
Seq : int64
12
+
Did : string
13
+
Rev : string
14
+
Commit : Cid
15
+
Blocks : byte[]
16
+
Time : DateTimeOffset
17
+
}
18
+
19
+
/// Mutable sequence counter for firehose events
20
+
let private seqCounter = ref 0L
21
+
22
+
/// Get the next sequence number (thread-safe, monotonic)
23
+
let nextSeq () : int64 = Interlocked.Increment(seqCounter)
24
+
25
+
/// Get current sequence without incrementing (for cursor resume)
26
+
let currentSeq () : int64 = seqCounter.Value
27
+
28
+
/// Create a commit event for a repository write
29
+
let createCommitEvent (did : string) (rev : string) (commitCid : Cid) (carBytes : byte[]) : CommitEvent = {
30
+
Seq = nextSeq ()
31
+
Did = did
32
+
Rev = rev
33
+
Commit = commitCid
34
+
Blocks = carBytes
35
+
Time = DateTimeOffset.UtcNow
36
+
}
37
+
38
+
/// Encode a commit event to DAG-CBOR bytes for WebSocket transmission
39
+
/// Format follows AT Protocol #commit message structure
40
+
let encodeEvent (event : CommitEvent) : byte[] =
41
+
let eventMap : Map<string, obj> =
42
+
Map.ofList [
43
+
"$type", box "com.atproto.sync.subscribeRepos#commit"
44
+
"seq", box event.Seq
45
+
"did", box event.Did
46
+
"rev", box event.Rev
47
+
"commit", box event.Commit
48
+
"blocks", box event.Blocks
49
+
"time", box (event.Time.ToString("o"))
50
+
]
51
+
52
+
DagCbor.encode eventMap
53
+
54
+
/// Reset sequence counter (for testing)
55
+
let resetSeq () =
56
+
Interlocked.Exchange(seqCounter, 0L) |> ignore
+157
PDSharp.Core/Health.fs
+157
PDSharp.Core/Health.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.IO
5
+
open System.Runtime.InteropServices
6
+
7
+
/// Health monitoring module for guardrails and uptime checks
8
+
module Health =
9
+
10
+
/// Health status response
11
+
type HealthStatus = {
12
+
/// Version of the PDS
13
+
Version : string
14
+
/// Uptime in seconds
15
+
UptimeSeconds : int64
16
+
/// Server start time in ISO8601
17
+
StartTime : string
18
+
/// Database status
19
+
DatabaseStatus : DatabaseStatus
20
+
/// Disk usage information
21
+
DiskUsage : DiskUsage option
22
+
/// Backup status
23
+
BackupStatus : BackupStatus option
24
+
}
25
+
26
+
/// Database connectivity status
27
+
and DatabaseStatus = {
28
+
/// Whether the database is reachable
29
+
IsHealthy : bool
30
+
/// Optional error message
31
+
Message : string option
32
+
}
33
+
34
+
/// Disk usage metrics
35
+
and DiskUsage = {
36
+
/// Total disk space in bytes
37
+
TotalBytes : int64
38
+
/// Free disk space in bytes
39
+
FreeBytes : int64
40
+
/// Used disk space in bytes
41
+
UsedBytes : int64
42
+
/// Percentage of disk used
43
+
UsedPercent : float
44
+
/// Whether disk pressure is critical (>90%)
45
+
IsCritical : bool
46
+
}
47
+
48
+
/// Backup status tracking
49
+
and BackupStatus = {
50
+
/// Timestamp of last successful backup
51
+
LastBackupTime : DateTimeOffset option
52
+
/// Age of last backup in hours
53
+
BackupAgeHours : float option
54
+
/// Whether backup is too old (>24 hours)
55
+
IsStale : bool
56
+
}
57
+
58
+
/// Get disk usage for a given path
59
+
let getDiskUsage (path : string) : DiskUsage option =
60
+
try
61
+
let driveInfo =
62
+
if RuntimeInformation.IsOSPlatform OSPlatform.Windows then
63
+
let driveLetter = Path.GetPathRoot path
64
+
DriveInfo driveLetter
65
+
else
66
+
DriveInfo(if Directory.Exists path then path else "/")
67
+
68
+
if driveInfo.IsReady then
69
+
let total = driveInfo.TotalSize
70
+
let free = driveInfo.TotalFreeSpace
71
+
let used = total - free
72
+
let usedPercent = float used / float total * 100.0
73
+
74
+
Some {
75
+
TotalBytes = total
76
+
FreeBytes = free
77
+
UsedBytes = used
78
+
UsedPercent = Math.Round(usedPercent, 2)
79
+
IsCritical = usedPercent >= 90.0
80
+
}
81
+
else
82
+
None
83
+
with _ ->
84
+
None
85
+
86
+
87
+
88
+
/// Check if a SQLite database file is accessible
89
+
let checkDatabaseHealth (connectionString : string) : DatabaseStatus =
90
+
try
91
+
let dbPath =
92
+
connectionString.Split ';'
93
+
|> Array.tryFind (fun s -> s.Trim().StartsWith("Data Source=", StringComparison.OrdinalIgnoreCase))
94
+
|> Option.map (fun s -> s.Split('=').[1].Trim())
95
+
96
+
match dbPath with
97
+
| Some path when File.Exists path -> { IsHealthy = true; Message = None }
98
+
| Some path -> {
99
+
IsHealthy = false
100
+
Message = Some $"Database file not found: {path}"
101
+
}
102
+
| None -> {
103
+
IsHealthy = false
104
+
Message = Some "Could not parse connection string"
105
+
}
106
+
with ex -> { IsHealthy = false; Message = Some ex.Message }
107
+
108
+
/// Calculate backup status from last backup time
109
+
let getBackupStatus (lastBackupTime : DateTimeOffset option) : BackupStatus =
110
+
match lastBackupTime with
111
+
| Some time ->
112
+
let age = DateTimeOffset.UtcNow - time
113
+
let ageHours = age.TotalHours
114
+
115
+
{
116
+
LastBackupTime = Some time
117
+
BackupAgeHours = Some(Math.Round(ageHours, 2))
118
+
IsStale = ageHours > 24.0
119
+
}
120
+
| None -> {
121
+
LastBackupTime = None
122
+
BackupAgeHours = None
123
+
IsStale = true
124
+
}
125
+
126
+
/// Mutable state for tracking server state
127
+
type HealthState() =
128
+
let mutable startTime = DateTimeOffset.UtcNow
129
+
let mutable lastBackupTime : DateTimeOffset option = None
130
+
131
+
member _.StartTime = startTime
132
+
member _.SetStartTime(time : DateTimeOffset) = startTime <- time
133
+
member _.LastBackupTime = lastBackupTime
134
+
135
+
member _.RecordBackup() =
136
+
lastBackupTime <- Some DateTimeOffset.UtcNow
137
+
138
+
member _.RecordBackup(time : DateTimeOffset) = lastBackupTime <- Some time
139
+
140
+
member _.GetUptime() : int64 =
141
+
int64 (DateTimeOffset.UtcNow - startTime).TotalSeconds
142
+
143
+
/// Build a complete health status
144
+
let buildHealthStatus
145
+
(version : string)
146
+
(healthState : HealthState)
147
+
(connectionString : string)
148
+
(dataPath : string)
149
+
: HealthStatus =
150
+
{
151
+
Version = version
152
+
UptimeSeconds = healthState.GetUptime()
153
+
StartTime = healthState.StartTime.ToString("o")
154
+
DatabaseStatus = checkDatabaseHealth connectionString
155
+
DiskUsage = getDiskUsage dataPath
156
+
BackupStatus = Some(getBackupStatus healthState.LastBackupTime)
157
+
}
+131
PDSharp.Core/Lexicon.fs
+131
PDSharp.Core/Lexicon.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Text.Json
5
+
6
+
module Lexicon =
7
+
type LexiconResult =
8
+
| Ok
9
+
| Error of string
10
+
11
+
module Validation =
12
+
let private getProperty (p : string) (element : JsonElement) =
13
+
match element.TryGetProperty(p) with
14
+
| true, prop -> Some prop
15
+
| _ -> None
16
+
17
+
let private getString (p : string) (element : JsonElement) =
18
+
match getProperty p element with
19
+
| Some prop when prop.ValueKind = JsonValueKind.String -> Some(prop.GetString())
20
+
| _ -> None
21
+
22
+
let private validateStringField
23
+
(element : JsonElement)
24
+
(fieldName : string)
25
+
(maxLength : int option)
26
+
(required : bool)
27
+
: LexiconResult =
28
+
match getProperty fieldName element with
29
+
| Some prop ->
30
+
if prop.ValueKind <> JsonValueKind.String then
31
+
Error $"Field '{fieldName}' must be a string"
32
+
else
33
+
match maxLength with
34
+
| Some maxLen when prop.GetString().Length > maxLen ->
35
+
Error $"Field '{fieldName}' exceeds maximum length of {maxLen}"
36
+
| _ -> Ok
37
+
| None ->
38
+
if required then
39
+
Error $"Missing required field '{fieldName}'"
40
+
else
41
+
Ok
42
+
43
+
let private validateIsoDate (element : JsonElement) (fieldName : string) (required : bool) : LexiconResult =
44
+
match getProperty fieldName element with
45
+
| Some prop ->
46
+
if prop.ValueKind <> JsonValueKind.String then
47
+
Error $"Field '{fieldName}' must be a string"
48
+
else
49
+
let s = prop.GetString()
50
+
let mutable dt = DateTimeOffset.MinValue
51
+
52
+
if DateTimeOffset.TryParse(s, &dt) then
53
+
Ok
54
+
else
55
+
Error $"Field '{fieldName}' must be a valid ISO 8601 date string"
56
+
| None ->
57
+
if required then
58
+
Error $"Missing required field '{fieldName}'"
59
+
else
60
+
Ok
61
+
62
+
let private validateRef (element : JsonElement) (fieldName : string) (required : bool) : LexiconResult =
63
+
match getProperty fieldName element with
64
+
| Some prop ->
65
+
if prop.ValueKind <> JsonValueKind.Object then
66
+
Error $"Field '{fieldName}' must be an object"
67
+
else
68
+
match validateStringField prop "uri" None true, validateStringField prop "cid" None true with
69
+
| Ok, Ok -> Ok
70
+
| Error e, _ -> Error $"Field '{fieldName}': {e}"
71
+
| _, Error e -> Error $"Field '{fieldName}': {e}"
72
+
| None ->
73
+
if required then
74
+
Error $"Missing required field '{fieldName}'"
75
+
else
76
+
Ok
77
+
78
+
let validatePost (record : JsonElement) : LexiconResult =
79
+
let textCheck = validateStringField record "text" (Some 3000) true
80
+
let dateCheck = validateIsoDate record "createdAt" true
81
+
82
+
match textCheck, dateCheck with
83
+
| Ok, Ok -> Ok
84
+
| Error e, _ -> Error e
85
+
| _, Error e -> Error e
86
+
87
+
let validateLike (record : JsonElement) : LexiconResult =
88
+
let subjectCheck = validateRef record "subject" true
89
+
let dateCheck = validateIsoDate record "createdAt" true
90
+
91
+
match subjectCheck, dateCheck with
92
+
| Ok, Ok -> Ok
93
+
| Error e, _ -> Error e
94
+
| _, Error e -> Error e
95
+
96
+
let validateRepost (record : JsonElement) : LexiconResult =
97
+
let subjectCheck = validateRef record "subject" true
98
+
let dateCheck = validateIsoDate record "createdAt" true
99
+
100
+
match subjectCheck, dateCheck with
101
+
| Ok, Ok -> Ok
102
+
| Error e, _ -> Error e
103
+
| _, Error e -> Error e
104
+
105
+
let validateFollow (record : JsonElement) : LexiconResult =
106
+
let subjectCheck = validateStringField record "subject" None true
107
+
let dateCheck = validateIsoDate record "createdAt" true
108
+
109
+
match subjectCheck, dateCheck with
110
+
| Ok, Ok -> Ok
111
+
| Error e, _ -> Error e
112
+
| _, Error e -> Error e
113
+
114
+
let validateProfile (record : JsonElement) : LexiconResult =
115
+
let nameCheck = validateStringField record "displayName" (Some 640) false
116
+
let descCheck = validateStringField record "description" (Some 2560) false
117
+
118
+
match nameCheck, descCheck with
119
+
| Ok, Ok -> Ok
120
+
| Error e, _ -> Error e
121
+
| _, Error e -> Error e
122
+
123
+
/// Unknown records are valid but unvalidated.
124
+
let validate (collection : string) (record : JsonElement) : LexiconResult =
125
+
match collection with
126
+
| "app.bsky.feed.post" -> Validation.validatePost record
127
+
| "app.bsky.feed.like" -> Validation.validateLike record
128
+
| "app.bsky.feed.repost" -> Validation.validateRepost record
129
+
| "app.bsky.graph.follow" -> Validation.validateFollow record
130
+
| "app.bsky.actor.profile" -> Validation.validateProfile record
131
+
| _ -> Ok
+812
PDSharp.Core/Mst.fs
+812
PDSharp.Core/Mst.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.Collections.Generic
5
+
open System.Formats.Cbor
6
+
7
+
module Mst =
8
+
type MstEntry = {
9
+
PrefixLen : int
10
+
KeySuffix : string
11
+
Value : Cid
12
+
Tree : Cid option
13
+
}
14
+
15
+
type MstNode = { Left : Cid option; Entries : MstEntry list }
16
+
17
+
/// Layer Calculation
18
+
let layer (key : string) : int =
19
+
let hash = Crypto.sha256Str key
20
+
let mutable zeros = 0
21
+
let mutable i = 0
22
+
let mutable found = false
23
+
24
+
while i < hash.Length && not found do
25
+
let b = hash.[i]
26
+
27
+
if b = 0uy then
28
+
zeros <- zeros + 8
29
+
i <- i + 1
30
+
else
31
+
let mutable mask = 0x80uy
32
+
33
+
while b &&& mask = 0uy do
34
+
zeros <- zeros + 1
35
+
mask <- mask >>> 1
36
+
37
+
found <- true
38
+
39
+
zeros / 2
40
+
41
+
let entryToCborObj (e : MstEntry) : obj =
42
+
let arr = Array.zeroCreate<obj> 4
43
+
arr.[0] <- e.PrefixLen :> obj
44
+
arr.[1] <- e.KeySuffix :> obj
45
+
arr.[2] <- e.Value :> obj
46
+
47
+
arr.[3] <-
48
+
match e.Tree with
49
+
| Some c -> c :> obj
50
+
| None -> null
51
+
52
+
arr :> obj
53
+
54
+
let nodeToCborObj (node : MstNode) : obj =
55
+
let arr = Array.zeroCreate<obj> 2
56
+
57
+
arr.[0] <-
58
+
match node.Left with
59
+
| Some c -> c :> obj
60
+
| None -> null
61
+
62
+
let entriesArr = node.Entries |> List.map entryToCborObj |> Seq.toArray
63
+
arr.[1] <- entriesArr :> obj
64
+
arr :> obj
65
+
66
+
let serialize (node : MstNode) : byte[] = nodeToCborObj node |> DagCbor.encode
67
+
68
+
let readCid (reader : CborReader) =
69
+
let tag = reader.ReadTag()
70
+
let tag42 = LanguagePrimitives.EnumOfValue<uint64, CborTag>(42UL)
71
+
72
+
if tag <> tag42 then
73
+
failwith "Expected CID tag 42"
74
+
75
+
let bytes = reader.ReadByteString()
76
+
77
+
if bytes.Length > 0 && bytes.[0] = 0x00uy then
78
+
let raw = Array.zeroCreate<byte> (bytes.Length - 1)
79
+
Array.Copy(bytes, 1, raw, 0, raw.Length)
80
+
Cid raw
81
+
else
82
+
Cid bytes
83
+
84
+
let deserialize (data : byte[]) : MstNode =
85
+
let reader = new CborReader(data.AsMemory(), CborConformanceMode.Strict)
86
+
let len = reader.ReadStartArray()
87
+
88
+
if len.HasValue && len.Value <> 2 then
89
+
failwith "MST node must be array of length 2"
90
+
91
+
let left =
92
+
match reader.PeekState() with
93
+
| CborReaderState.Null ->
94
+
reader.ReadNull()
95
+
None
96
+
| _ -> Some(readCid reader)
97
+
98
+
let entriesLen = reader.ReadStartArray()
99
+
let mutable entries = []
100
+
101
+
let count =
102
+
if entriesLen.HasValue then
103
+
entriesLen.Value
104
+
else
105
+
Int32.MaxValue
106
+
107
+
let mutable i = 0
108
+
109
+
while i < count && reader.PeekState() <> CborReaderState.EndArray do
110
+
let entryLen = reader.ReadStartArray()
111
+
112
+
if entryLen.HasValue && entryLen.Value <> 4 then
113
+
failwith "MST entry must be array of length 4"
114
+
115
+
let p = reader.ReadInt32()
116
+
let k = reader.ReadTextString()
117
+
let v = readCid reader
118
+
119
+
let t =
120
+
match reader.PeekState() with
121
+
| CborReaderState.Null ->
122
+
reader.ReadNull()
123
+
None
124
+
| _ -> Some(readCid reader)
125
+
126
+
reader.ReadEndArray()
127
+
128
+
entries <- entries @ [ { PrefixLen = p; KeySuffix = k; Value = v; Tree = t } ]
129
+
i <- i + 1
130
+
131
+
reader.ReadEndArray()
132
+
reader.ReadEndArray()
133
+
{ Left = left; Entries = entries }
134
+
135
+
let compareKeys (a : string) (b : string) : int =
136
+
let bytesA = System.Text.Encoding.UTF8.GetBytes(a)
137
+
let bytesB = System.Text.Encoding.UTF8.GetBytes(b)
138
+
let len = min bytesA.Length bytesB.Length
139
+
let mutable res = 0
140
+
let mutable i = 0
141
+
142
+
while res = 0 && i < len do
143
+
res <- bytesA.[i].CompareTo(bytesB.[i])
144
+
i <- i + 1
145
+
146
+
if res <> 0 then
147
+
res
148
+
else
149
+
bytesA.Length.CompareTo bytesB.Length
150
+
151
+
type NodeLoader = Cid -> Async<MstNode option>
152
+
type NodePersister = MstNode -> Async<Cid>
153
+
154
+
let storeNode (persister : NodePersister) (node : MstNode) : Async<Cid> = persister node
155
+
156
+
let rec get (loader : NodeLoader) (node : MstNode) (key : string) (prevKey : string) : Async<Cid option> = async {
157
+
let mutable currentKey = prevKey
158
+
let mutable foundEntry : MstEntry option = None
159
+
let mutable nextTree : Cid option = node.Left
160
+
let mutable stop = false
161
+
let mutable i = 0
162
+
163
+
while not stop && i < node.Entries.Length do
164
+
let e = node.Entries.[i]
165
+
166
+
let prefix =
167
+
if e.PrefixLen > currentKey.Length then
168
+
currentKey
169
+
else
170
+
currentKey.Substring(0, e.PrefixLen)
171
+
172
+
let fullKey = prefix + e.KeySuffix
173
+
let cmp = compareKeys key fullKey
174
+
175
+
if cmp = 0 then
176
+
foundEntry <- Some e
177
+
stop <- true
178
+
elif cmp < 0 then
179
+
stop <- true
180
+
else
181
+
nextTree <- e.Tree
182
+
currentKey <- fullKey
183
+
i <- i + 1
184
+
185
+
match foundEntry with
186
+
| Some e -> return Some e.Value
187
+
| None ->
188
+
match nextTree with
189
+
| None -> return None
190
+
| Some cid ->
191
+
let! child = loader cid
192
+
193
+
match child with
194
+
| Some cNode -> return! get loader cNode key currentKey
195
+
| None -> return None
196
+
}
197
+
198
+
let sharedPrefixLen (a : string) (b : string) =
199
+
let len = min a.Length b.Length
200
+
let mutable i = 0
201
+
202
+
while i < len && a.[i] = b.[i] do
203
+
i <- i + 1
204
+
205
+
i
206
+
207
+
let nodeLayer (node : MstNode) (prevKey : string) =
208
+
match node.Entries with
209
+
| [] -> -1
210
+
| first :: _ ->
211
+
let prefix =
212
+
if first.PrefixLen > prevKey.Length then
213
+
prevKey
214
+
else
215
+
prevKey.Substring(0, first.PrefixLen)
216
+
217
+
let fullKey = prefix + first.KeySuffix
218
+
layer fullKey
219
+
220
+
/// Splits a node around a key (which is assumed to be higher layer than any in the node).
221
+
///
222
+
/// Returns (LeftNode, RightNode).
223
+
let rec split
224
+
(loader : NodeLoader)
225
+
(persister : NodePersister)
226
+
(node : MstNode)
227
+
(key : string)
228
+
(prevKey : string)
229
+
: Async<MstNode * MstNode> =
230
+
async {
231
+
let mutable splitIdx = -1
232
+
let mutable found = false
233
+
let mutable currentKey = prevKey
234
+
let mutable i = 0
235
+
let entries = node.Entries
236
+
let mutable splitPrevKey = prevKey
237
+
238
+
while i < entries.Length && not found do
239
+
let e = entries.[i]
240
+
241
+
let prefix =
242
+
if e.PrefixLen > currentKey.Length then
243
+
currentKey
244
+
else
245
+
currentKey.Substring(0, e.PrefixLen)
246
+
247
+
let fullKey = prefix + e.KeySuffix
248
+
249
+
if compareKeys key fullKey < 0 then
250
+
splitIdx <- i
251
+
found <- true
252
+
else
253
+
currentKey <- fullKey
254
+
splitPrevKey <- currentKey
255
+
i <- i + 1
256
+
257
+
let childCidToSplit =
258
+
if found then
259
+
if splitIdx = 0 then
260
+
node.Left
261
+
else
262
+
entries.[splitIdx - 1].Tree
263
+
else if entries.Length = 0 then
264
+
node.Left
265
+
else
266
+
entries.[entries.Length - 1].Tree
267
+
268
+
let! (lChild, rChild) = async {
269
+
match childCidToSplit with
270
+
| None -> return ({ Left = None; Entries = [] }, { Left = None; Entries = [] })
271
+
| Some cid ->
272
+
let! childNodeOpt = loader cid
273
+
274
+
match childNodeOpt with
275
+
| None -> return ({ Left = None; Entries = [] }, { Left = None; Entries = [] })
276
+
| Some childNode -> return! split loader persister childNode key currentKey
277
+
}
278
+
279
+
let persistOrNone (n : MstNode) = async {
280
+
if n.Entries.IsEmpty && n.Left.IsNone then
281
+
return None
282
+
else
283
+
let! c = persister n
284
+
return Some c
285
+
}
286
+
287
+
let! lCid = persistOrNone lChild
288
+
let! rCid = persistOrNone rChild
289
+
let leftEntries = if found then entries |> List.take splitIdx else entries
290
+
291
+
let newLeftEntries =
292
+
if leftEntries.IsEmpty then
293
+
[]
294
+
else
295
+
let last = leftEntries.[leftEntries.Length - 1]
296
+
let newLast = { last with Tree = lCid }
297
+
(leftEntries |> List.take (leftEntries.Length - 1)) @ [ newLast ]
298
+
299
+
let leftNode =
300
+
if leftEntries.IsEmpty then
301
+
{ Left = lCid; Entries = [] }
302
+
else
303
+
{ Left = node.Left; Entries = newLeftEntries }
304
+
305
+
let rightEntries = if found then entries |> List.skip splitIdx else []
306
+
307
+
let newRightEntries =
308
+
match rightEntries with
309
+
| [] -> []
310
+
| first :: rest ->
311
+
let firstFullKey =
312
+
let prefix =
313
+
if first.PrefixLen > currentKey.Length then
314
+
currentKey
315
+
else
316
+
currentKey.Substring(0, first.PrefixLen)
317
+
318
+
prefix + first.KeySuffix
319
+
320
+
let newP = sharedPrefixLen key firstFullKey
321
+
let newSuffix = firstFullKey.Substring(newP)
322
+
let newFirst = { first with PrefixLen = newP; KeySuffix = newSuffix }
323
+
newFirst :: rest
324
+
325
+
let rightNode = { Left = rCid; Entries = newRightEntries }
326
+
return (leftNode, rightNode)
327
+
}
328
+
329
+
let rec put
330
+
(loader : NodeLoader)
331
+
(persister : NodePersister)
332
+
(node : MstNode)
333
+
(key : string)
334
+
(value : Cid)
335
+
(prevKey : string)
336
+
: Async<MstNode> =
337
+
async {
338
+
let kLayer = layer key
339
+
340
+
let nLayer =
341
+
match node.Entries with
342
+
| [] -> -1
343
+
| first :: _ ->
344
+
let prefix =
345
+
if first.PrefixLen > prevKey.Length then
346
+
prevKey
347
+
else
348
+
prevKey.Substring(0, first.PrefixLen)
349
+
350
+
let fullKey = prefix + first.KeySuffix
351
+
layer fullKey
352
+
353
+
if kLayer > nLayer then
354
+
let! (lNode, rNode) = split loader persister node key prevKey
355
+
356
+
let persistOrNone (n : MstNode) = async {
357
+
if n.Entries.IsEmpty && n.Left.IsNone then
358
+
return None
359
+
else
360
+
let! c = persister n
361
+
return Some c
362
+
}
363
+
364
+
let! lCid = persistOrNone lNode
365
+
let! rCid = persistOrNone rNode
366
+
367
+
let p = sharedPrefixLen prevKey key
368
+
let suffix = key.Substring(p)
369
+
370
+
return {
371
+
Left = lCid
372
+
Entries = [
373
+
{
374
+
PrefixLen = p
375
+
KeySuffix = suffix
376
+
Value = value
377
+
Tree = rCid
378
+
}
379
+
]
380
+
}
381
+
382
+
elif kLayer < nLayer then
383
+
let mutable nextCid = node.Left
384
+
let mutable currentKey = prevKey
385
+
let mutable found = false
386
+
let mutable i = 0
387
+
388
+
let entries = node.Entries
389
+
let mutable childIdx = -1
390
+
391
+
while i < entries.Length && not found do
392
+
let e = entries.[i]
393
+
394
+
let prefix =
395
+
if e.PrefixLen > currentKey.Length then
396
+
currentKey
397
+
else
398
+
currentKey.Substring(0, e.PrefixLen)
399
+
400
+
let fullKey = prefix + e.KeySuffix
401
+
402
+
if compareKeys key fullKey < 0 then
403
+
found <- true
404
+
else
405
+
childIdx <- i
406
+
nextCid <- e.Tree
407
+
currentKey <- fullKey
408
+
i <- i + 1
409
+
410
+
let! childNode =
411
+
match nextCid with
412
+
| Some c -> loader c
413
+
| None -> async { return Some { Left = None; Entries = [] } }
414
+
415
+
match childNode with
416
+
| None -> return failwith "Failed to load child node"
417
+
| Some cn ->
418
+
let! newChildNode = put loader persister cn key value currentKey
419
+
let! newChildCid = persister newChildNode
420
+
421
+
if childIdx = -1 then
422
+
return { node with Left = Some newChildCid }
423
+
else
424
+
return {
425
+
node with
426
+
Entries =
427
+
entries
428
+
|> List.mapi (fun idx x ->
429
+
if idx = childIdx then
430
+
{ entries.[childIdx] with Tree = Some newChildCid }
431
+
else
432
+
x)
433
+
}
434
+
435
+
else
436
+
let mutable insertIdx = -1
437
+
let mutable found = false
438
+
let mutable currentKey = prevKey
439
+
let mutable i = 0
440
+
let mutable targetChildCid = None
441
+
let mutable childPrevKey = prevKey
442
+
443
+
let fullKeysCache = new List<string>()
444
+
445
+
while i < node.Entries.Length && not found do
446
+
let e = node.Entries.[i]
447
+
448
+
let prefix =
449
+
if e.PrefixLen > currentKey.Length then
450
+
currentKey
451
+
else
452
+
currentKey.Substring(0, e.PrefixLen)
453
+
454
+
let fullKey = prefix + e.KeySuffix
455
+
fullKeysCache.Add(fullKey)
456
+
457
+
let cmp = compareKeys key fullKey
458
+
459
+
if cmp = 0 then
460
+
insertIdx <- i
461
+
found <- true
462
+
elif cmp < 0 then
463
+
insertIdx <- i
464
+
targetChildCid <- if i = 0 then node.Left else node.Entries.[i - 1].Tree
465
+
childPrevKey <- currentKey
466
+
found <- true
467
+
else
468
+
currentKey <- fullKey
469
+
i <- i + 1
470
+
471
+
if not found then
472
+
insertIdx <- node.Entries.Length
473
+
474
+
targetChildCid <-
475
+
if node.Entries.Length = 0 then
476
+
node.Left
477
+
else
478
+
node.Entries.[node.Entries.Length - 1].Tree
479
+
480
+
childPrevKey <- currentKey
481
+
482
+
let entries = node.Entries
483
+
484
+
if found && compareKeys key fullKeysCache.[insertIdx] = 0 then
485
+
let e = entries.[insertIdx]
486
+
let newE = { e with Value = value }
487
+
488
+
let newEntries =
489
+
entries |> List.mapi (fun idx x -> if idx = insertIdx then newE else x)
490
+
491
+
return { node with Entries = newEntries }
492
+
else
493
+
let! (lChild, rChild) = async {
494
+
match targetChildCid with
495
+
| None -> return { Left = None; Entries = [] }, { Left = None; Entries = [] }
496
+
| Some cid ->
497
+
let! cOpt = loader cid
498
+
499
+
match cOpt with
500
+
| None -> return { Left = None; Entries = [] }, { Left = None; Entries = [] }
501
+
| Some c -> return! split loader persister c key childPrevKey
502
+
}
503
+
504
+
let persistOrNone (n : MstNode) = async {
505
+
if n.Entries.IsEmpty && n.Left.IsNone then
506
+
return None
507
+
else
508
+
let! c = persister n
509
+
return Some c
510
+
}
511
+
512
+
let! lCid = persistOrNone lChild
513
+
let! rCid = persistOrNone rChild
514
+
515
+
let beforeEntries =
516
+
if insertIdx = 0 then
517
+
[]
518
+
else
519
+
let prevE = entries.[insertIdx - 1]
520
+
let newPrevE = { prevE with Tree = lCid }
521
+
(entries |> List.take (insertIdx - 1)) @ [ newPrevE ]
522
+
523
+
let newLeft = if insertIdx = 0 then lCid else node.Left
524
+
let getFullKey idx = fullKeysCache.[idx]
525
+
let p = sharedPrefixLen childPrevKey key
526
+
let suffix = key.Substring(p)
527
+
528
+
let newEntry = {
529
+
PrefixLen = p
530
+
KeySuffix = suffix
531
+
Value = value
532
+
Tree = rCid
533
+
}
534
+
535
+
let afterEntries =
536
+
if insertIdx >= entries.Length then
537
+
[]
538
+
else
539
+
let first = entries.[insertIdx]
540
+
let firstFullKey = getFullKey insertIdx
541
+
542
+
let newP = sharedPrefixLen key firstFullKey
543
+
let newS = firstFullKey.Substring(newP)
544
+
let newFirst = { first with PrefixLen = newP; KeySuffix = newS }
545
+
546
+
[ newFirst ] @ (entries |> List.skip (insertIdx + 1))
547
+
548
+
let newEntries = beforeEntries @ [ newEntry ] @ afterEntries
549
+
550
+
return { Left = newLeft; Entries = newEntries }
551
+
}
552
+
553
+
// --- Merge Operation ---
554
+
let rec merge
555
+
(loader : NodeLoader)
556
+
(persister : NodePersister)
557
+
(leftCid : Cid option)
558
+
(rightCid : Cid option)
559
+
(prevKey : string)
560
+
: Async<MstNode> =
561
+
async {
562
+
match leftCid, rightCid with
563
+
| None, None -> return { Left = None; Entries = [] }
564
+
| Some l, None ->
565
+
let! n = loader l
566
+
return n |> Option.defaultValue { Left = None; Entries = [] }
567
+
| None, Some r ->
568
+
let! n = loader r
569
+
return n |> Option.defaultValue { Left = None; Entries = [] }
570
+
| Some l, Some r ->
571
+
let! lNodeOpt = loader l
572
+
let! rNodeOpt = loader r
573
+
let lNode = lNodeOpt |> Option.defaultValue { Left = None; Entries = [] }
574
+
let rNode = rNodeOpt |> Option.defaultValue { Left = None; Entries = [] }
575
+
576
+
let lLayer = nodeLayer lNode prevKey
577
+
let mutable current = prevKey
578
+
579
+
for e in lNode.Entries do
580
+
let p =
581
+
if e.PrefixLen > current.Length then
582
+
current
583
+
else
584
+
current.Substring(0, e.PrefixLen)
585
+
586
+
current <- p + e.KeySuffix
587
+
588
+
let rightPrevKey = current
589
+
590
+
let realLLayer = nodeLayer lNode prevKey
591
+
let realRLayer = nodeLayer rNode rightPrevKey
592
+
593
+
if realLLayer > realRLayer then
594
+
match lNode.Entries with
595
+
| [] -> return! merge loader persister lNode.Left rightCid prevKey
596
+
| entries ->
597
+
let lastIdx = entries.Length - 1
598
+
let lastEntry = entries.[lastIdx]
599
+
600
+
let! mergedChild = merge loader persister lastEntry.Tree rightCid rightPrevKey
601
+
let! mergedCid = persister mergedChild
602
+
603
+
let newEntry = { lastEntry with Tree = Some mergedCid }
604
+
let newEntries = (entries |> List.take lastIdx) @ [ newEntry ]
605
+
return { lNode with Entries = newEntries }
606
+
607
+
elif realRLayer > realLLayer then
608
+
match rNode.Entries with
609
+
| [] -> return! merge loader persister leftCid rNode.Left prevKey
610
+
| _ ->
611
+
let! mergedChild = merge loader persister leftCid rNode.Left prevKey
612
+
let! mergedCid = persister mergedChild
613
+
614
+
return { rNode with Left = Some mergedCid }
615
+
616
+
else
617
+
let boundaryL =
618
+
match lNode.Entries with
619
+
| [] -> lNode.Left
620
+
| es -> es.[es.Length - 1].Tree
621
+
622
+
let boundaryR = rNode.Left
623
+
624
+
let! mergedBoundaryNode = merge loader persister boundaryL boundaryR rightPrevKey
625
+
let! mergedBoundaryCid = persister mergedBoundaryNode
626
+
627
+
let newEntries =
628
+
match lNode.Entries with
629
+
| [] -> rNode.Entries
630
+
| lEntries ->
631
+
let lastIdx = lEntries.Length - 1
632
+
let lastE = lEntries.[lastIdx]
633
+
let newLastE = { lastE with Tree = Some mergedBoundaryCid }
634
+
(lEntries |> List.take lastIdx) @ [ newLastE ] @ rNode.Entries
635
+
636
+
let newLeft =
637
+
if lNode.Entries.IsEmpty then
638
+
Some mergedBoundaryCid
639
+
else
640
+
lNode.Left
641
+
642
+
return { Left = newLeft; Entries = newEntries }
643
+
}
644
+
645
+
let rec delete
646
+
(loader : NodeLoader)
647
+
(persister : NodePersister)
648
+
(node : MstNode)
649
+
(key : string)
650
+
(prevKey : string)
651
+
: Async<MstNode option> =
652
+
async {
653
+
let mutable currentKey = prevKey
654
+
let mutable foundIdx = -1
655
+
let mutable nextTreeIdx = -1
656
+
let mutable i = 0
657
+
let mutable found = false
658
+
659
+
while i < node.Entries.Length && not found do
660
+
let e = node.Entries.[i]
661
+
662
+
let prefix =
663
+
if e.PrefixLen > currentKey.Length then
664
+
currentKey
665
+
else
666
+
currentKey.Substring(0, e.PrefixLen)
667
+
668
+
let fullKey = prefix + e.KeySuffix
669
+
670
+
let cmp = compareKeys key fullKey
671
+
672
+
if cmp = 0 then
673
+
foundIdx <- i
674
+
found <- true
675
+
elif cmp < 0 then
676
+
found <- true
677
+
nextTreeIdx <- i - 1
678
+
else
679
+
currentKey <- fullKey
680
+
i <- i + 1
681
+
682
+
if not found then
683
+
nextTreeIdx <- node.Entries.Length - 1
684
+
685
+
if foundIdx <> -1 then
686
+
let e = node.Entries.[foundIdx]
687
+
688
+
let leftChildCid =
689
+
if foundIdx = 0 then
690
+
node.Left
691
+
else
692
+
node.Entries.[foundIdx - 1].Tree
693
+
694
+
let rightChildCid = e.Tree
695
+
696
+
let mergePrevKey = if foundIdx = 0 then prevKey else currentKey
697
+
698
+
let! mergedChildNode = merge loader persister leftChildCid rightChildCid mergePrevKey
699
+
let! mergedChildCid = persister mergedChildNode
700
+
701
+
let newEntries =
702
+
if foundIdx = 0 then
703
+
let rest = node.Entries |> List.skip 1
704
+
705
+
match rest with
706
+
| [] -> []
707
+
| first :: rs ->
708
+
let firstFullKey =
709
+
let oldP =
710
+
if first.PrefixLen > key.Length then
711
+
key
712
+
else
713
+
key.Substring(0, first.PrefixLen)
714
+
715
+
oldP + first.KeySuffix
716
+
717
+
let newP = sharedPrefixLen prevKey firstFullKey
718
+
let newSuffix = firstFullKey.Substring(newP)
719
+
let newFirst = { first with PrefixLen = newP; KeySuffix = newSuffix }
720
+
newFirst :: rs
721
+
else
722
+
let before = node.Entries |> List.take (foundIdx - 1)
723
+
let prevE = node.Entries.[foundIdx - 1]
724
+
let newPrevE = { prevE with Tree = Some mergedChildCid }
725
+
726
+
let rest = node.Entries |> List.skip (foundIdx + 1)
727
+
728
+
let newRest =
729
+
match rest with
730
+
| [] -> []
731
+
| first :: rs ->
732
+
let firstFullKey =
733
+
let oldP =
734
+
if first.PrefixLen > key.Length then
735
+
key
736
+
else
737
+
key.Substring(0, first.PrefixLen)
738
+
739
+
oldP + first.KeySuffix
740
+
741
+
let newP = sharedPrefixLen mergePrevKey firstFullKey
742
+
let newSuffix = firstFullKey.Substring(newP)
743
+
let newFirst = { first with PrefixLen = newP; KeySuffix = newSuffix }
744
+
newFirst :: rs
745
+
746
+
before @ [ newPrevE ] @ newRest
747
+
748
+
let newLeft = if foundIdx = 0 then Some mergedChildCid else node.Left
749
+
750
+
let newNode = { Left = newLeft; Entries = newEntries }
751
+
752
+
if newNode.Entries.IsEmpty && newNode.Left.IsNone then
753
+
return None
754
+
else
755
+
return Some newNode
756
+
757
+
else
758
+
let childCid =
759
+
if nextTreeIdx = -1 then
760
+
node.Left
761
+
else
762
+
node.Entries.[nextTreeIdx].Tree
763
+
764
+
match childCid with
765
+
| None -> return Some node
766
+
| Some cid ->
767
+
let! childNodeOpt = loader cid
768
+
769
+
match childNodeOpt with
770
+
| None -> return Some node
771
+
| Some childNode ->
772
+
let! newChildOpt = delete loader persister childNode key currentKey
773
+
774
+
match newChildOpt with
775
+
| None ->
776
+
if nextTreeIdx = -1 then
777
+
return Some { node with Left = None }
778
+
else
779
+
let prevE = node.Entries.[nextTreeIdx]
780
+
let newPrevE = { prevE with Tree = None }
781
+
782
+
let newEntries =
783
+
node.Entries
784
+
|> List.mapi (fun idx x -> if idx = nextTreeIdx then newPrevE else x)
785
+
786
+
return Some { node with Entries = newEntries }
787
+
| Some newChild ->
788
+
let! newChildCid = persister newChild
789
+
790
+
if nextTreeIdx = -1 then
791
+
return Some { node with Left = Some newChildCid }
792
+
else
793
+
let prevE = node.Entries.[nextTreeIdx]
794
+
let newPrevE = { prevE with Tree = Some newChildCid }
795
+
796
+
let newEntries =
797
+
node.Entries
798
+
|> List.mapi (fun idx x -> if idx = nextTreeIdx then newPrevE else x)
799
+
800
+
return Some { node with Entries = newEntries }
801
+
}
802
+
803
+
let fromEntries (loader : NodeLoader) (persister : NodePersister) (entries : (string * Cid) list) : Async<MstNode> = async {
804
+
let mutable root = { Left = None; Entries = [] }
805
+
let sortedEntries = entries |> List.sortBy fst
806
+
807
+
for k, v in sortedEntries do
808
+
let! newRoot = put loader persister root k v ""
809
+
root <- newRoot
810
+
811
+
return root
812
+
}
+24
-3
PDSharp.Core/PDSharp.Core.fsproj
+24
-3
PDSharp.Core/PDSharp.Core.fsproj
···
1
1
<Project Sdk="Microsoft.NET.Sdk">
2
-
3
2
<PropertyGroup>
4
3
<TargetFramework>net9.0</TargetFramework>
5
4
<GenerateDocumentationFile>true</GenerateDocumentationFile>
6
5
</PropertyGroup>
7
6
8
7
<ItemGroup>
9
-
<Compile Include="Config.fs"/>
10
-
<Compile Include="Library.fs"/>
8
+
<Compile Include="Config.fs" />
9
+
<Compile Include="Cid.fs" />
10
+
<Compile Include="DagCbor.fs" />
11
+
<Compile Include="Crypto.fs" />
12
+
<Compile Include="Mst.fs" />
13
+
<Compile Include="BlockStore.fs" />
14
+
<Compile Include="BlobStore.fs" />
15
+
<Compile Include="Car.fs" />
16
+
<Compile Include="AtUri.fs" />
17
+
<Compile Include="Repository.fs" />
18
+
<Compile Include="Auth.fs" />
19
+
<Compile Include="SqliteStore.fs" />
20
+
<Compile Include="Firehose.fs" />
21
+
<Compile Include="DidResolver.fs" />
22
+
<Compile Include="Lexicon.fs" />
23
+
<Compile Include="Health.fs" />
24
+
<Compile Include="Library.fs" />
11
25
</ItemGroup>
12
26
27
+
<ItemGroup>
28
+
<PackageReference Include="AWSSDK.S3" Version="4.0.16" />
29
+
<PackageReference Include="BouncyCastle.Cryptography" Version="2.6.2" />
30
+
<PackageReference Include="Dapper" Version="2.1.66" />
31
+
<PackageReference Include="Microsoft.Data.Sqlite" Version="10.0.1" />
32
+
<PackageReference Include="System.Formats.Cbor" Version="10.0.1" />
33
+
</ItemGroup>
13
34
</Project>
+123
PDSharp.Core/Repository.fs
+123
PDSharp.Core/Repository.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
5
+
/// Repository commit signing and management
6
+
module Repository =
7
+
/// TID (Timestamp ID) generation for revision IDs
8
+
module Tid =
9
+
let private chars = "234567abcdefghijklmnopqrstuvwxyz"
10
+
let private clockIdBits = 10
11
+
let private timestampBits = 53
12
+
13
+
/// Generate a random clock ID component
14
+
let private randomClockId () =
15
+
let rng = Random()
16
+
rng.Next(1 <<< clockIdBits)
17
+
18
+
/// Encode a number to base32 sortable string
19
+
let private encode (value : int64) (length : int) =
20
+
let mutable v = value
21
+
let arr = Array.zeroCreate<char> length
22
+
23
+
for i in (length - 1) .. -1 .. 0 do
24
+
arr.[i] <- chars.[int (v &&& 0x1FL)]
25
+
v <- v >>> 5
26
+
27
+
String(arr)
28
+
29
+
/// Generate a new TID based on current timestamp
30
+
let generate () : string =
31
+
let timestamp = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds()
32
+
let clockId = randomClockId ()
33
+
let combined = (timestamp <<< clockIdBits) ||| int64 clockId
34
+
encode combined 13
35
+
36
+
/// Unsigned commit record (before signing)
37
+
type UnsignedCommit = {
38
+
Did : string
39
+
Version : int
40
+
Data : Cid
41
+
Rev : string
42
+
Prev : Cid option
43
+
}
44
+
45
+
/// Signed commit record
46
+
type SignedCommit = {
47
+
Did : string
48
+
Version : int
49
+
Data : Cid
50
+
Rev : string
51
+
Prev : Cid option
52
+
Sig : byte[]
53
+
}
54
+
55
+
/// Convert unsigned commit to CBOR-encodable map
56
+
let private unsignedToCborMap (commit : UnsignedCommit) : Map<string, obj> =
57
+
let baseMap =
58
+
Map.ofList [
59
+
("did", box commit.Did)
60
+
("version", box commit.Version)
61
+
("data", box commit.Data)
62
+
("rev", box commit.Rev)
63
+
]
64
+
65
+
match commit.Prev with
66
+
| Some prev -> baseMap |> Map.add "prev" (box prev)
67
+
| None -> baseMap
68
+
69
+
/// Sign an unsigned commit
70
+
let signCommit (key : Crypto.EcKeyPair) (commit : UnsignedCommit) : SignedCommit =
71
+
let cborMap = unsignedToCborMap commit
72
+
let cborBytes = DagCbor.encode cborMap
73
+
let hash = Crypto.sha256 cborBytes
74
+
let signature = Crypto.sign key hash
75
+
76
+
{
77
+
Did = commit.Did
78
+
Version = commit.Version
79
+
Data = commit.Data
80
+
Rev = commit.Rev
81
+
Prev = commit.Prev
82
+
Sig = signature
83
+
}
84
+
85
+
/// Verify a signed commit's signature
86
+
let verifyCommit (key : Crypto.EcKeyPair) (commit : SignedCommit) : bool =
87
+
let unsigned = {
88
+
Did = commit.Did
89
+
Version = commit.Version
90
+
Data = commit.Data
91
+
Rev = commit.Rev
92
+
Prev = commit.Prev
93
+
}
94
+
95
+
let cborMap = unsignedToCborMap unsigned
96
+
let cborBytes = DagCbor.encode cborMap
97
+
let hash = Crypto.sha256 cborBytes
98
+
Crypto.verify key hash commit.Sig
99
+
100
+
/// Convert signed commit to CBOR-encodable map
101
+
let signedToCborMap (commit : SignedCommit) : Map<string, obj> =
102
+
let baseMap =
103
+
Map.ofList [
104
+
("did", box commit.Did)
105
+
("version", box commit.Version)
106
+
("data", box commit.Data)
107
+
("rev", box commit.Rev)
108
+
("sig", box commit.Sig)
109
+
]
110
+
111
+
match commit.Prev with
112
+
| Some prev -> baseMap |> Map.add "prev" (box prev)
113
+
| None -> baseMap
114
+
115
+
/// Serialize a signed commit to DAG-CBOR bytes
116
+
let serializeCommit (commit : SignedCommit) : byte[] =
117
+
signedToCborMap commit |> DagCbor.encode
118
+
119
+
/// Get CID for a signed commit
120
+
let commitCid (commit : SignedCommit) : Cid =
121
+
let bytes = serializeCommit commit
122
+
let hash = Crypto.sha256 bytes
123
+
Cid.FromHash hash
+256
PDSharp.Core/SqliteStore.fs
+256
PDSharp.Core/SqliteStore.fs
···
1
+
namespace PDSharp.Core
2
+
3
+
open System
4
+
open System.IO
5
+
open Microsoft.Data.Sqlite
6
+
open Dapper
7
+
open PDSharp.Core.BlockStore
8
+
open PDSharp.Core.Auth
9
+
open System.Threading.Tasks
10
+
open PDSharp.Core.Config
11
+
12
+
/// SQLite persistence layer
13
+
module SqliteStore =
14
+
15
+
/// Initialize the database schema
16
+
let initialize (config : AppConfig) =
17
+
use conn = new SqliteConnection(config.SqliteConnectionString)
18
+
19
+
conn.Open()
20
+
conn.Execute("PRAGMA journal_mode=WAL;") |> ignore
21
+
22
+
if config.DisableWalAutoCheckpoint then
23
+
conn.Execute("PRAGMA wal_autocheckpoint=0;") |> ignore
24
+
25
+
// TODO: fast, slightly less safe. Keep default (FULL) for now.
26
+
// conn.Execute("PRAGMA synchronous=NORMAL;") |> ignore
27
+
28
+
conn.Execute(
29
+
"""
30
+
CREATE TABLE IF NOT EXISTS blocks (
31
+
cid TEXT PRIMARY KEY,
32
+
data BLOB NOT NULL
33
+
);
34
+
"""
35
+
)
36
+
|> ignore
37
+
38
+
39
+
conn.Execute(
40
+
"""
41
+
CREATE TABLE IF NOT EXISTS accounts (
42
+
did TEXT PRIMARY KEY,
43
+
handle TEXT NOT NULL UNIQUE,
44
+
password_hash TEXT NOT NULL,
45
+
email TEXT,
46
+
created_at TEXT NOT NULL
47
+
);
48
+
"""
49
+
)
50
+
|> ignore
51
+
52
+
conn.Execute(
53
+
"""
54
+
CREATE TABLE IF NOT EXISTS repos (
55
+
did TEXT PRIMARY KEY,
56
+
rev TEXT NOT NULL,
57
+
mst_root_cid TEXT NOT NULL,
58
+
head_cid TEXT,
59
+
collections_json TEXT -- Just store serialized collection map for now
60
+
);
61
+
"""
62
+
)
63
+
|> ignore
64
+
65
+
conn.Execute(
66
+
"""
67
+
CREATE TABLE IF NOT EXISTS signing_keys (
68
+
did TEXT PRIMARY KEY,
69
+
k TEXT NOT NULL -- Hex encoded private key D
70
+
);
71
+
"""
72
+
)
73
+
|> ignore
74
+
75
+
/// DTOs for Sqlite Mapping
76
+
type RepoRow = {
77
+
did : string
78
+
rev : string
79
+
mst_root_cid : string
80
+
head_cid : string
81
+
collections_json : string
82
+
}
83
+
84
+
type BlockRow = { cid : string; data : byte[] }
85
+
86
+
/// DTO for account rows with nullable email
87
+
[<CLIMutable>]
88
+
type AccountRow = {
89
+
did : string
90
+
handle : string
91
+
password_hash : string
92
+
email : string // Nullable in DB, null becomes null here
93
+
created_at : string
94
+
}
95
+
96
+
let private toAccount (row : AccountRow) : Account = {
97
+
Did = row.did
98
+
Handle = row.handle
99
+
PasswordHash = row.password_hash
100
+
Email = if isNull row.email then None else Some row.email
101
+
CreatedAt = DateTimeOffset.Parse row.created_at
102
+
}
103
+
104
+
type IRepoStore =
105
+
abstract member GetRepo : string -> Async<RepoRow option>
106
+
abstract member SaveRepo : RepoRow -> Async<unit>
107
+
108
+
type SqliteBlockStore(connectionString : string) =
109
+
interface IBlockStore with
110
+
member _.Put(data : byte[]) = async {
111
+
let hash = Crypto.sha256 data
112
+
let cid = Cid.FromHash hash
113
+
let cidStr = cid.ToString()
114
+
115
+
use conn = new SqliteConnection(connectionString)
116
+
117
+
let! _ =
118
+
conn.ExecuteAsync(
119
+
"INSERT OR IGNORE INTO blocks (cid, data) VALUES (@cid, @data)",
120
+
{| cid = cidStr; data = data |}
121
+
)
122
+
|> Async.AwaitTask
123
+
124
+
return cid
125
+
}
126
+
127
+
member _.Get(cid : Cid) = async {
128
+
use conn = new SqliteConnection(connectionString)
129
+
130
+
let! result =
131
+
conn.QuerySingleOrDefaultAsync<byte[]>("SELECT data FROM blocks WHERE cid = @cid", {| cid = cid.ToString() |})
132
+
|> Async.AwaitTask
133
+
134
+
if isNull result then return None else return Some result
135
+
}
136
+
137
+
member _.Has(cid : Cid) = async {
138
+
use conn = new SqliteConnection(connectionString)
139
+
140
+
let! count =
141
+
conn.ExecuteScalarAsync<int>("SELECT COUNT(1) FROM blocks WHERE cid = @cid", {| cid = cid.ToString() |})
142
+
|> Async.AwaitTask
143
+
144
+
return count > 0
145
+
}
146
+
147
+
member _.GetAllCidsAndData() = async {
148
+
use conn = new SqliteConnection(connectionString)
149
+
let! rows = conn.QueryAsync<BlockRow>("SELECT cid, data FROM blocks") |> Async.AwaitTask
150
+
151
+
return
152
+
rows
153
+
|> Seq.map (fun r -> (r.cid, r.data))
154
+
|> Seq.choose (fun (cidStr, data) ->
155
+
match Cid.TryParse cidStr with
156
+
| Some c -> Some(c, data)
157
+
| None -> None)
158
+
|> Seq.toList
159
+
}
160
+
161
+
type SqliteAccountStore(connectionString : string) =
162
+
interface IAccountStore with
163
+
member _.CreateAccount(account : Account) = async {
164
+
use conn = new SqliteConnection(connectionString)
165
+
166
+
try
167
+
let emailValue = account.Email |> Option.toObj
168
+
let createdAtStr = account.CreatedAt.ToString "o"
169
+
170
+
let! _ =
171
+
conn.ExecuteAsync(
172
+
"""
173
+
INSERT INTO accounts (did, handle, password_hash, email, created_at)
174
+
VALUES (@Did, @Handle, @PasswordHash, @Email, @CreatedAt)
175
+
""",
176
+
{|
177
+
Did = account.Did
178
+
Handle = account.Handle
179
+
PasswordHash = account.PasswordHash
180
+
Email = emailValue
181
+
CreatedAt = createdAtStr
182
+
|}
183
+
)
184
+
|> Async.AwaitTask
185
+
186
+
return Ok()
187
+
with
188
+
| :? SqliteException as ex when ex.SqliteErrorCode = 19 -> // Constraint violation
189
+
return Error "Account already exists (handle or DID taken)"
190
+
| ex -> return Error ex.Message
191
+
}
192
+
193
+
member _.GetAccountByHandle(handle : string) = async {
194
+
use conn = new SqliteConnection(connectionString)
195
+
196
+
let! result =
197
+
conn.QuerySingleOrDefaultAsync<AccountRow>(
198
+
"SELECT * FROM accounts WHERE handle = @handle",
199
+
{| handle = handle |}
200
+
)
201
+
|> Async.AwaitTask
202
+
203
+
if isNull (box result) then
204
+
return None
205
+
else
206
+
return Some(toAccount result)
207
+
}
208
+
209
+
member _.GetAccountByDid(did : string) = async {
210
+
use conn = new SqliteConnection(connectionString)
211
+
212
+
let! result =
213
+
conn.QuerySingleOrDefaultAsync<AccountRow>("SELECT * FROM accounts WHERE did = @did", {| did = did |})
214
+
|> Async.AwaitTask
215
+
216
+
if isNull (box result) then
217
+
return None
218
+
else
219
+
return Some(toAccount result)
220
+
}
221
+
222
+
type SqliteRepoStore(connectionString : string) =
223
+
interface IRepoStore with
224
+
member _.GetRepo(did : string) : Async<RepoRow option> = async {
225
+
use conn = new SqliteConnection(connectionString)
226
+
227
+
let! result =
228
+
conn.QuerySingleOrDefaultAsync<RepoRow>("SELECT * FROM repos WHERE did = @did", {| did = did |})
229
+
|> Async.AwaitTask
230
+
231
+
if isNull (box result) then
232
+
return None
233
+
else
234
+
return Some result
235
+
}
236
+
237
+
member _.SaveRepo(repo : RepoRow) : Async<unit> = async {
238
+
use conn = new SqliteConnection(connectionString)
239
+
240
+
let! _ =
241
+
conn.ExecuteAsync(
242
+
"""
243
+
INSERT INTO repos (did, rev, mst_root_cid, head_cid, collections_json)
244
+
VALUES (@did, @rev, @mst_root_cid, @head_cid, @collections_json)
245
+
ON CONFLICT(did) DO UPDATE SET
246
+
rev = @rev,
247
+
mst_root_cid = @mst_root_cid,
248
+
head_cid = @head_cid,
249
+
collections_json = @collections_json
250
+
""",
251
+
repo
252
+
)
253
+
|> Async.AwaitTask
254
+
255
+
()
256
+
}
+77
PDSharp.Docs/auth.md
+77
PDSharp.Docs/auth.md
···
1
+
# AT Protocol Session & Account Authentication
2
+
3
+
## Session Authentication (Legacy Bearer JWT)
4
+
5
+
Based on the [XRPC Spec](https://atproto.com/specs/xrpc#authentication):
6
+
7
+
### Token Types
8
+
9
+
| Token | JWT `typ` Header | Lifetime | Purpose |
10
+
| ------------- | ---------------- | --------------------------- | ------------------------------ |
11
+
| Access Token | `at+jwt` | Short (~2min refresh cycle) | Authenticate most API requests |
12
+
| Refresh Token | `refresh+jwt` | Longer (~2 months) | Obtain new access tokens |
13
+
14
+
### Endpoints
15
+
16
+
- **`createSession`**: Login with identifier (handle/email) + password → returns `{accessJwt, refreshJwt, handle, did}`
17
+
- **`refreshSession`**: Uses refresh JWT in Bearer header → returns new `{accessJwt, refreshJwt, handle, did}`
18
+
- **`createAccount`**: Register new account → returns session tokens + creates DID
19
+
20
+
### JWT Claims (Server-Generated)
21
+
22
+
Servers should implement **domain separation** using the `typ` header field:
23
+
24
+
- Access: `typ: at+jwt` (per [RFC 9068](https://www.rfc-editor.org/rfc/rfc9068.html))
25
+
- Refresh: `typ: refresh+jwt`
26
+
27
+
Standard JWT claims: `sub` (DID), `iat`, `exp`, `jti` (nonce)
28
+
29
+
### Configuration Required
30
+
31
+
Yes, JWT signing requires a **secret key** for HMAC-SHA256 (HS256). This should be:
32
+
33
+
- Loaded from configuration/environment variable (e.g., `PDS_JWT_SECRET`)
34
+
- At least 32 bytes of cryptographically random data
35
+
- Never hardcoded or committed to source control
36
+
37
+
## Account Storage
38
+
39
+
### Reference PDS Approach
40
+
41
+
The Bluesky reference PDS uses:
42
+
43
+
- **SQLite database per user** (recent architecture)
44
+
- `account.sqlite` contains: handle, email, DID, password hash
45
+
- Accounts indexed by DID (primary) and handle (unique)
46
+
47
+
## App Passwords
48
+
49
+
App passwords are a security feature allowing restricted access:
50
+
51
+
- Format: `xxxx-xxxx-xxxx-xxxx`
52
+
- Created/revoked independently from main password
53
+
- Grants limited permissions (no auth settings changes)
54
+
55
+
## Inter-Service Auth (Different from Session Auth)
56
+
57
+
For service-to-service requests, different mechanism:
58
+
59
+
- Uses **asymmetric signing** (ES256/ES256K) with account's signing key
60
+
- Short-lived tokens (~60sec)
61
+
- Validated against DID document
62
+
63
+
## Summary: Implementation Decisions
64
+
65
+
| Aspect | Decision | Rationale |
66
+
| --------------- | -------------------------- | ------------------------------------ |
67
+
| Token signing | HS256 (symmetric) | Simpler, standard for session tokens |
68
+
| Secret storage | Config/env var | Required for security |
69
+
| Account storage | In-memory (initial) | Matches existing patterns |
70
+
| Password hash | SHA-256 + salt | Uses existing Crypto.fs |
71
+
| Token lifetimes | Access: 15min, Refresh: 7d | Conservative defaults |
72
+
73
+
## References
74
+
75
+
- [XRPC Authentication Spec](https://atproto.com/specs/xrpc#authentication)
76
+
- [RFC 9068 - JWT Access Tokens](https://www.rfc-editor.org/rfc/rfc9068.html)
77
+
- [Bluesky PDS GitHub](https://github.com/bluesky-social/pds)
+50
PDSharp.Docs/car.md
+50
PDSharp.Docs/car.md
···
1
+
# CAR Format Implementation Notes
2
+
3
+
The **Content Addressable aRchives (CAR)** format is used to store content-addressable objects (IPLD blocks) as a sequence of bytes.
4
+
It is the standard format for repository export (`sync.getRepo`) and block transfer (`sync.getBlocks`) in the AT Protocol.
5
+
6
+
## 1. Format Overview (v1)
7
+
8
+
A CAR file consists of a **Header** followed by a sequence of **Data** sections.
9
+
10
+
```text
11
+
|--------- Header --------| |--------------- Data Section 1 ---------------| |--------------- Data Section 2 ---------------| ...
12
+
[ varint | DAG-CBOR block ] [ varint | CID bytes | Block Data bytes ] [ varint | CID bytes | Block Data bytes ] ...
13
+
```
14
+
15
+
### LEB128 Varints
16
+
17
+
All length prefixes in CAR are encoded as **unsigned LEB128 (UVarint)** integers.
18
+
19
+
- Used to prefix the Header block.
20
+
- Used to prefix each Data section.
21
+
22
+
## 2. Header
23
+
24
+
The header is a single DAG-CBOR encoded block describing the archive.
25
+
26
+
**Encoding:**
27
+
28
+
1. Construct the CBOR map: `{ "version": 1, "roots": [<cid>, ...] }`.
29
+
2. Encode as DAG-CBOR bytes.
30
+
3. Prefix with the length of those bytes (as UVarint).
31
+
32
+
## 3. Data Sections
33
+
34
+
Following the header, the file contains a concatenated sequence of data sections. Each section represents one IPLD block.
35
+
36
+
```text
37
+
[ Section Length (UVarint) ] [ CID (raw bytes) ] [ Binary Data ]
38
+
```
39
+
40
+
- **Section Length**: The total length of the *CID bytes* + *Binary Data*.
41
+
- **CID**: The raw binary representation of the block's CID (usually CIDv1 + DAG-CBOR + SHA2-256).
42
+
- **Binary Data**: The actual content of the block.
43
+
44
+
The Section Length *includes* the length of the CID.
45
+
46
+
This is slightly different from some other framing formats where length might only cover the payload.
47
+
48
+
## 4. References
49
+
50
+
- [IPLD CARv1 Specification](https://ipld.io/specs/transport/car/carv1/)
+46
PDSharp.Docs/cbor.md
+46
PDSharp.Docs/cbor.md
···
1
+
# DAG-CBOR Implementation Notes
2
+
3
+
DAG-CBOR is the canonical data serialization format for the AT Protocol.
4
+
It is a strict subset of CBOR (RFC 8949) with specific rules for determinism and linking.
5
+
6
+
## 1. Canonicalization Rules
7
+
8
+
To ensure consistent Content IDs (CIDs) for the same data, specific canonicalization rules must be followed during encoding.
9
+
10
+
### Map Key Sorting
11
+
12
+
Maps must be sorted by keys. The sorting order is **NOT** standard lexicographical order.
13
+
14
+
1. **Length**: Shorter keys come first.
15
+
2. **Bytes**: keys of the same length are sorted lexicographically by their UTF-8 byte representation.
16
+
17
+
**Example:**
18
+
19
+
- `"a"` (len 1) comes before `"aa"` (len 2).
20
+
- `"b"` (len 1) comes before `"aa"` (len 2).
21
+
- `"a"` comes before `"b"`.
22
+
23
+
### Integer Encoding
24
+
25
+
Integers must be encoded using the smallest possible representation.
26
+
27
+
`System.Formats.Cbor` (in Strict mode) generally handles this, but care must be taken to treat `int`, `int64`, and `uint64` consistently.
28
+
29
+
## 2. Content Addressing (CIDs)
30
+
31
+
Links to other nodes (CIDs) are encoded using **CBOR Tag 42**.
32
+
33
+
### Format
34
+
35
+
1. **Tag**: `42` (Major type 6, value 42).
36
+
2. **Payload**: A byte string containing:
37
+
- The `0x00` byte (Multibase identity prefix, required by IPLD specs for binary CID inclusion).
38
+
- The raw bytes of the CID.
39
+
40
+
## 3. Known Gotchas
41
+
42
+
- **Float vs Int**:
43
+
AT Protocol generally discourages floats where integers suffice.
44
+
F# types must be matched carefully to avoid encoding `2.0` instead of `2`.
45
+
- **String Encoding**:
46
+
Must be UTF-8. Indefinite length strings are prohibited in DAG-CBOR.
+69
PDSharp.Docs/mst.md
+69
PDSharp.Docs/mst.md
···
1
+
# Merkle Search Tree (MST) Implementation Notes
2
+
3
+
The Merkle Search Tree (MST) is a probabilistic, balanced search tree used by the AT Protocol to store repository records.
4
+
5
+
## Overview
6
+
7
+
MSTs combine properties of B-trees and Merkle trees to ensure:
8
+
9
+
1. **Determinism**: The tree structure is determined by the keys (and their hashes), not insertion order.
10
+
2. **Verifyability**: Every node is content-addressed (CID), allowing the entire state to be verified via a single root hash.
11
+
3. **Efficiency**: Efficient key-value lookups and delta-based sync (subtrees that haven't changed share the same CIDs).
12
+
13
+
## Core Concepts
14
+
15
+
### Layering (Probabilistic Balance)
16
+
17
+
MSTs do not use rotation for balancing. Instead, they assign every key a "layer" based on its hash.
18
+
19
+
- **Formula**:
20
+
`Layer(key) = countLeadingZeros(SHA256(key)) / 2`.
21
+
- **Fanout**:
22
+
The divisor `2` implies a fanout of roughly 4 (2 bits per layer increment).
23
+
- Keys with higher layers appear higher in the tree, splitting the range of keys below them.
24
+
25
+
### Data Structure (`MstNode`)
26
+
27
+
An MST node consists of:
28
+
29
+
- **Left Child (`l`)**: Use to traverse to keys lexicographically smaller than the first entry in this node.
30
+
- **Entries (`e`)**: A sorted list of entries. Each entry contains:
31
+
- **Prefix Length (`p`)**: Length of the shared prefix with the *previous* key in the node (or the split key).
32
+
- **Key Suffix (`k`)**: The remaining bytes of the key.
33
+
- **Value (`v`)**: The CID of the record data.
34
+
- **Tree (`t`)**: (Optional) CID of the subtree containing keys between this entry and the next.
35
+
36
+
**Serialization**: The node is serialized as a DAG-CBOR array: `[l, [e1, e2, ...]]`.
37
+
38
+
## Algorithms
39
+
40
+
### Insertion (`Put`)
41
+
42
+
Insertion relies on the "Layer" property:
43
+
44
+
1. Calculate `Layer(newKey)`.
45
+
2. Traverse the tree from the root.
46
+
3. **Split Condition**: If `Layer(newKey)` is **greater** than the layer of the current node, the new key belongs *above* this node.
47
+
- The current node is **split** into two children (Left and Right) based on the `newKey`.
48
+
- The `newKey` becomes a new node pointing to these two children.
49
+
4. **Recurse**: If `Layer(newKey)` is **less** than the current node, find the correct child subtree (based on key comparison) and recurse.
50
+
5. **Same Layer**: If `Layer(newKey)` equals the current node's layer:
51
+
- Insert perfectly into the sorted entries list.
52
+
- Any existing child pointer at that position must be split and redistributed if necessary (though spec usually implies layers are unique enough or handled by standard BST insert at that level).
53
+
54
+
### Deletion
55
+
56
+
1. Locate the key.
57
+
2. Remove the entry.
58
+
3. **Merge**:
59
+
Since the key acted as a separator for two subtrees (its "Left" previous child and its "Tree" child), removing it requires merging these two adjacent subtrees into a single valid MST node to preserve the tree's density and structure.
60
+
61
+
### Determinism & Prefix Compression
62
+
63
+
- **Canonical Order**: Keys must always be sorted.
64
+
- **Prefix Compression**:
65
+
Crucial for space saving.
66
+
The prefix length `p` is calculated relative to the *immediately preceding key* in the node.
67
+
- **Issues**:
68
+
Insertion order *should not* matter (commutativity).
69
+
However, implementations must be careful with `Split` and `Merge` operations to ensure exactly the same node boundaries are created regardless of history.
+74
PDSharp.Tests/AtUri.Tests.fs
+74
PDSharp.Tests/AtUri.Tests.fs
···
1
+
module AtUriTests
2
+
3
+
open Xunit
4
+
open PDSharp.Core.AtUri
5
+
6
+
[<Fact>]
7
+
let ``Parse valid AT-URI`` () =
8
+
let uri = "at://did:plc:abcd1234/app.bsky.feed.post/3kbq5vk4beg2f"
9
+
let result = parse uri
10
+
11
+
match result with
12
+
| Ok parsed ->
13
+
Assert.Equal("did:plc:abcd1234", parsed.Did)
14
+
Assert.Equal("app.bsky.feed.post", parsed.Collection)
15
+
Assert.Equal("3kbq5vk4beg2f", parsed.Rkey)
16
+
| Error msg -> Assert.Fail(msg)
17
+
18
+
[<Fact>]
19
+
let ``Parse did:web AT-URI`` () =
20
+
let uri = "at://did:web:example.com/app.bsky.actor.profile/self"
21
+
let result = parse uri
22
+
23
+
match result with
24
+
| Ok parsed ->
25
+
Assert.Equal("did:web:example.com", parsed.Did)
26
+
Assert.Equal("app.bsky.actor.profile", parsed.Collection)
27
+
Assert.Equal("self", parsed.Rkey)
28
+
| Error msg -> Assert.Fail(msg)
29
+
30
+
[<Fact>]
31
+
let ``Parse fails without at:// prefix`` () =
32
+
let uri = "http://did:plc:abcd/app.bsky.feed.post/123"
33
+
let result = parse uri
34
+
35
+
Assert.True(Result.isError result)
36
+
37
+
[<Fact>]
38
+
let ``Parse fails with invalid DID`` () =
39
+
let uri = "at://not-a-did/app.bsky.feed.post/123"
40
+
let result = parse uri
41
+
42
+
Assert.True(Result.isError result)
43
+
44
+
[<Fact>]
45
+
let ``Parse fails with invalid collection`` () =
46
+
let uri = "at://did:plc:abcd/NotAnNsid/123"
47
+
let result = parse uri
48
+
49
+
Assert.True(Result.isError result)
50
+
51
+
[<Fact>]
52
+
let ``Parse fails with missing parts`` () =
53
+
let uri = "at://did:plc:abcd/app.bsky.feed.post"
54
+
let result = parse uri
55
+
56
+
Assert.True(Result.isError result)
57
+
58
+
[<Fact>]
59
+
let ``ToString roundtrip`` () =
60
+
let original = {
61
+
Did = "did:plc:abcd"
62
+
Collection = "app.bsky.feed.post"
63
+
Rkey = "123"
64
+
}
65
+
66
+
let str = toString original
67
+
let parsed = parse str
68
+
69
+
match parsed with
70
+
| Ok p ->
71
+
Assert.Equal(original.Did, p.Did)
72
+
Assert.Equal(original.Collection, p.Collection)
73
+
Assert.Equal(original.Rkey, p.Rkey)
74
+
| Error msg -> Assert.Fail(msg)
+147
PDSharp.Tests/Auth.Tests.fs
+147
PDSharp.Tests/Auth.Tests.fs
···
1
+
module Auth.Tests
2
+
3
+
open Xunit
4
+
open PDSharp.Core.Auth
5
+
open System
6
+
open System.Collections.Concurrent
7
+
8
+
/// Mock in-memory store for testing
9
+
type VolatileAccountStore() =
10
+
let accounts = ConcurrentDictionary<string, Account>()
11
+
let handles = ConcurrentDictionary<string, string>()
12
+
13
+
interface IAccountStore with
14
+
member _.CreateAccount(account : Account) = async {
15
+
if handles.ContainsKey(account.Handle) then
16
+
return Error "Handle already taken"
17
+
elif accounts.ContainsKey(account.Did) then
18
+
return Error "Account already exists"
19
+
else
20
+
accounts.TryAdd(account.Did, account) |> ignore
21
+
handles.TryAdd(account.Handle, account.Did) |> ignore
22
+
return Ok()
23
+
}
24
+
25
+
member _.GetAccountByHandle(handle : string) = async {
26
+
match handles.TryGetValue(handle) with
27
+
| true, did ->
28
+
match accounts.TryGetValue(did) with
29
+
| true, acc -> return Some acc
30
+
| _ -> return None
31
+
| _ -> return None
32
+
}
33
+
34
+
member _.GetAccountByDid(did : string) = async {
35
+
match accounts.TryGetValue(did) with
36
+
| true, acc -> return Some acc
37
+
| _ -> return None
38
+
}
39
+
40
+
[<Fact>]
41
+
let ``Password hashing produces salt$hash format`` () =
42
+
let hash = hashPassword "mypassword"
43
+
Assert.Contains("$", hash)
44
+
let parts = hash.Split('$')
45
+
Assert.Equal(2, parts.Length)
46
+
47
+
[<Fact>]
48
+
let ``Password verification succeeds for correct password`` () =
49
+
let hash = hashPassword "mypassword"
50
+
Assert.True(verifyPassword "mypassword" hash)
51
+
52
+
[<Fact>]
53
+
let ``Password verification fails for wrong password`` () =
54
+
let hash = hashPassword "mypassword"
55
+
Assert.False(verifyPassword "wrongpassword" hash)
56
+
57
+
[<Fact>]
58
+
let ``Password verification fails for invalid hash format`` () =
59
+
Assert.False(verifyPassword "password" "invalidhash")
60
+
Assert.False(verifyPassword "password" "")
61
+
62
+
[<Fact>]
63
+
let ``JWT access token creation and validation`` () =
64
+
let secret = "test-secret-key-minimum-32-chars!"
65
+
let did = "did:web:test.example"
66
+
67
+
let token = createAccessToken secret did
68
+
69
+
let parts = token.Split('.')
70
+
Assert.Equal(3, parts.Length)
71
+
72
+
match validateToken secret token with
73
+
| Valid(extractedDid, tokenType, _) ->
74
+
Assert.Equal(did, extractedDid)
75
+
Assert.Equal(Access, tokenType)
76
+
| Invalid reason -> Assert.Fail $"Token should be valid, got: {reason}"
77
+
78
+
[<Fact>]
79
+
let ``JWT refresh token has correct type`` () =
80
+
let secret = "test-secret-key-minimum-32-chars!"
81
+
let did = "did:web:test.example"
82
+
83
+
let token = createRefreshToken secret did
84
+
85
+
match validateToken secret token with
86
+
| Valid(_, tokenType, _) -> Assert.Equal(Refresh, tokenType)
87
+
| Invalid reason -> Assert.Fail $"Token should be valid, got: {reason}"
88
+
89
+
[<Fact>]
90
+
let ``JWT validation fails with wrong secret`` () =
91
+
let secret = "test-secret-key-minimum-32-chars!"
92
+
let wrongSecret = "wrong-secret-key-minimum-32-chars!"
93
+
let did = "did:web:test.example"
94
+
95
+
let token = createAccessToken secret did
96
+
97
+
match validateToken wrongSecret token with
98
+
| Invalid _ -> Assert.True(true)
99
+
| Valid _ -> Assert.Fail "Token should be invalid with wrong secret"
100
+
101
+
[<Fact>]
102
+
let ``Account creation and lookup by handle`` () =
103
+
let store = VolatileAccountStore()
104
+
105
+
match
106
+
createAccount store "test.user" "password123" (Some "test@example.com")
107
+
|> Async.RunSynchronously
108
+
with
109
+
| Error msg -> Assert.Fail msg
110
+
| Ok account ->
111
+
Assert.Equal("test.user", account.Handle)
112
+
Assert.Equal("did:web:test.user", account.Did)
113
+
Assert.Equal(Some "test@example.com", account.Email)
114
+
115
+
let found =
116
+
(store :> IAccountStore).GetAccountByHandle "test.user"
117
+
|> Async.RunSynchronously
118
+
119
+
match found with
120
+
| None -> Assert.Fail "Account should be found"
121
+
| Some foundAcc -> Assert.Equal(account.Did, foundAcc.Did)
122
+
123
+
[<Fact>]
124
+
let ``Account creation fails for duplicate handle`` () =
125
+
let store = VolatileAccountStore()
126
+
127
+
createAccount store "duplicate.user" "password" None
128
+
|> Async.RunSynchronously
129
+
|> ignore
130
+
131
+
match createAccount store "duplicate.user" "password2" None |> Async.RunSynchronously with
132
+
| Error msg -> Assert.Contains("already", msg.ToLower())
133
+
| Ok _ -> Assert.Fail "Should fail for duplicate handle"
134
+
135
+
[<Fact>]
136
+
let ``Account lookup by DID`` () =
137
+
let store = VolatileAccountStore()
138
+
139
+
match createAccount store "did.user" "password123" None |> Async.RunSynchronously with
140
+
| Error msg -> Assert.Fail msg
141
+
| Ok account ->
142
+
let found =
143
+
(store :> IAccountStore).GetAccountByDid account.Did |> Async.RunSynchronously
144
+
145
+
match found with
146
+
| None -> Assert.Fail "Account should be found by DID"
147
+
| Some foundAcc -> Assert.Equal(account.Handle, foundAcc.Handle)
+52
PDSharp.Tests/BlockStore.Tests.fs
+52
PDSharp.Tests/BlockStore.Tests.fs
···
1
+
module BlockStoreTests
2
+
3
+
open Xunit
4
+
open PDSharp.Core
5
+
open PDSharp.Core.BlockStore
6
+
7
+
[<Fact>]
8
+
let ``MemoryBlockStore Put and Get roundtrip`` () =
9
+
let store = MemoryBlockStore() :> IBlockStore
10
+
let data = System.Text.Encoding.UTF8.GetBytes("hello world")
11
+
12
+
let cid = store.Put(data) |> Async.RunSynchronously
13
+
let retrieved = store.Get(cid) |> Async.RunSynchronously
14
+
15
+
Assert.True(Option.isSome retrieved)
16
+
Assert.Equal<byte[]>(data, Option.get retrieved)
17
+
18
+
[<Fact>]
19
+
let ``MemoryBlockStore Has returns true for existing`` () =
20
+
let store = MemoryBlockStore() :> IBlockStore
21
+
let data = System.Text.Encoding.UTF8.GetBytes("test data")
22
+
23
+
let cid = store.Put(data) |> Async.RunSynchronously
24
+
let exists = store.Has(cid) |> Async.RunSynchronously
25
+
26
+
Assert.True(exists)
27
+
28
+
[<Fact>]
29
+
let ``MemoryBlockStore Has returns false for missing`` () =
30
+
let store = MemoryBlockStore() :> IBlockStore
31
+
let fakeCid = Cid.FromHash(Crypto.sha256Str "nonexistent")
32
+
33
+
let exists = store.Has(fakeCid) |> Async.RunSynchronously
34
+
35
+
Assert.False(exists)
36
+
37
+
[<Fact>]
38
+
let ``MemoryBlockStore Get returns None for missing`` () =
39
+
let store = MemoryBlockStore() :> IBlockStore
40
+
let fakeCid = Cid.FromHash(Crypto.sha256Str "nonexistent")
41
+
42
+
let result = store.Get(fakeCid) |> Async.RunSynchronously
43
+
44
+
Assert.True(Option.isNone result)
45
+
46
+
[<Fact>]
47
+
let ``MemoryBlockStore CID is content-addressed`` () =
48
+
let store = MemoryBlockStore() :> IBlockStore
49
+
let data = System.Text.Encoding.UTF8.GetBytes("same content")
50
+
let cid1 = store.Put data |> Async.RunSynchronously
51
+
let cid2 = store.Put data |> Async.RunSynchronously
52
+
Assert.Equal<byte[]>(cid1.Bytes, cid2.Bytes)
+71
PDSharp.Tests/Car.Tests.fs
+71
PDSharp.Tests/Car.Tests.fs
···
1
+
module CarTests
2
+
3
+
open Xunit
4
+
open PDSharp.Core
5
+
open PDSharp.Core.Car
6
+
open PDSharp.Core.Crypto
7
+
8
+
[<Fact>]
9
+
let ``Varint encodes zero correctly`` () =
10
+
let result = encodeVarint 0
11
+
Assert.Equal<byte[]>([| 0uy |], result)
12
+
13
+
[<Fact>]
14
+
let ``Varint encodes single byte values correctly`` () =
15
+
let result1 = encodeVarint 1
16
+
Assert.Equal<byte[]>([| 1uy |], result1)
17
+
18
+
let result127 = encodeVarint 127
19
+
Assert.Equal<byte[]>([| 127uy |], result127)
20
+
21
+
[<Fact>]
22
+
let ``Varint encodes multi-byte values correctly`` () =
23
+
let result128 = encodeVarint 128
24
+
Assert.Equal<byte[]>([| 0x80uy; 0x01uy |], result128)
25
+
26
+
let result300 = encodeVarint 300
27
+
Assert.Equal<byte[]>([| 0xACuy; 0x02uy |], result300)
28
+
29
+
let result16384 = encodeVarint 16384
30
+
Assert.Equal<byte[]>([| 0x80uy; 0x80uy; 0x01uy |], result16384)
31
+
32
+
[<Fact>]
33
+
let ``CAR header starts with version and roots`` () =
34
+
let hash = sha256Str "test-root"
35
+
let root = Cid.FromHash hash
36
+
let header = createHeader [ root ]
37
+
38
+
Assert.True(header.Length > 0, "Header should not be empty")
39
+
Assert.True(header.[0] >= 0xa0uy && header.[0] <= 0xbfuy, "Header should be a CBOR map")
40
+
41
+
[<Fact>]
42
+
let ``CAR block section is varint + CID + data`` () =
43
+
let hash = sha256Str "test-block"
44
+
let cid = Cid.FromHash hash
45
+
let data = [| 1uy; 2uy; 3uy; 4uy |]
46
+
47
+
let block = encodeBlock cid data
48
+
49
+
Assert.Equal(40uy, block.[0])
50
+
Assert.Equal(41, block.Length)
51
+
52
+
[<Fact>]
53
+
let ``Full CAR creation produces valid structure`` () =
54
+
let hash = sha256Str "root-data"
55
+
let rootCid = Cid.FromHash hash
56
+
let blocks = [ (rootCid, [| 1uy; 2uy; 3uy |]) ]
57
+
let car = createCar [ rootCid ] blocks
58
+
59
+
Assert.True(car.Length > 0, "CAR should not be empty")
60
+
Assert.True(car.[0] < 128uy, "Header length should fit in one varint byte for small headers")
61
+
62
+
[<Fact>]
63
+
let ``CAR with multiple blocks`` () =
64
+
let hash1 = sha256Str "block1"
65
+
let hash2 = sha256Str "block2"
66
+
let cid1 = Cid.FromHash hash1
67
+
let cid2 = Cid.FromHash hash2
68
+
69
+
let blocks = [ cid1, [| 1uy; 2uy; 3uy |]; cid2, [| 4uy; 5uy; 6uy; 7uy |] ]
70
+
let car = createCar [ cid1 ] blocks
71
+
Assert.True(car.Length > 80, "CAR with two blocks should be substantial")
+145
PDSharp.Tests/Conformance.Tests.fs
+145
PDSharp.Tests/Conformance.Tests.fs
···
1
+
module PDSharp.Tests.Conformance
2
+
3
+
open Xunit
4
+
open System
5
+
open System.Text.Json
6
+
open PDSharp.Core
7
+
8
+
module LexiconTests =
9
+
10
+
let parse (json : string) =
11
+
JsonSerializer.Deserialize<JsonElement>(json)
12
+
13
+
[<Fact>]
14
+
let ``Valid Post passes validation`` () =
15
+
let json =
16
+
"""{
17
+
"$type": "app.bsky.feed.post",
18
+
"text": "Hello, world!",
19
+
"createdAt": "2023-10-27T10:00:00Z"
20
+
}"""
21
+
22
+
let element = parse json
23
+
let result = Lexicon.validate "app.bsky.feed.post" element
24
+
Assert.Equal(Lexicon.Ok, result)
25
+
26
+
[<Fact>]
27
+
let ``Post missing text fails validation`` () =
28
+
let json =
29
+
"""{
30
+
"$type": "app.bsky.feed.post",
31
+
"createdAt": "2023-10-27T10:00:00Z"
32
+
}"""
33
+
34
+
let element = parse json
35
+
let result = Lexicon.validate "app.bsky.feed.post" element
36
+
37
+
match result with
38
+
| Lexicon.Error msg -> Assert.Contains("text", msg)
39
+
| _ -> Assert.Fail("Should have failed validation")
40
+
41
+
[<Fact>]
42
+
let ``Post text too long passes validation`` () =
43
+
let longText = String('a', 3001)
44
+
45
+
let template =
46
+
"""{
47
+
"$type": "app.bsky.feed.post",
48
+
"text": "TEXT_PLACEHOLDER",
49
+
"createdAt": "2023-10-27T10:00:00Z"
50
+
}"""
51
+
52
+
let json = template.Replace("TEXT_PLACEHOLDER", longText)
53
+
let element = parse json
54
+
let result = Lexicon.validate "app.bsky.feed.post" element
55
+
56
+
match result with
57
+
| Lexicon.Error msg -> Assert.Contains("exceeds maximum length", msg)
58
+
| _ -> Assert.Fail("Should have failed validation")
59
+
60
+
[<Fact>]
61
+
let ``Valid Like passes validation`` () =
62
+
let json =
63
+
"""{
64
+
"$type": "app.bsky.feed.like",
65
+
"subject": {
66
+
"uri": "at://did:plc:123/app.bsky.feed.post/3k5",
67
+
"cid": "bafyreih..."
68
+
},
69
+
"createdAt": "2023-10-27T10:00:00Z"
70
+
}"""
71
+
72
+
let element = parse json
73
+
let result = Lexicon.validate "app.bsky.feed.like" element
74
+
Assert.Equal(Lexicon.Ok, result)
75
+
76
+
[<Fact>]
77
+
let ``Like missing subject fails validation`` () =
78
+
let json =
79
+
"""{
80
+
"$type": "app.bsky.feed.like",
81
+
"createdAt": "2023-10-27T10:00:00Z"
82
+
}"""
83
+
84
+
let element = parse json
85
+
let result = Lexicon.validate "app.bsky.feed.like" element
86
+
87
+
match result with
88
+
| Lexicon.Error msg -> Assert.Contains("subject", msg)
89
+
| _ -> Assert.Fail("Should have failed validation")
90
+
91
+
[<Fact>]
92
+
let ``Like subject missing uri passes validation (should fail)`` () =
93
+
let json =
94
+
"""{
95
+
"$type": "app.bsky.feed.like",
96
+
"subject": {
97
+
"cid": "bafyreih..."
98
+
},
99
+
"createdAt": "2023-10-27T10:00:00Z"
100
+
}"""
101
+
102
+
let element = parse json
103
+
let result = Lexicon.validate "app.bsky.feed.like" element
104
+
105
+
match result with
106
+
| Lexicon.Error msg -> Assert.Contains("uri", msg)
107
+
| _ -> Assert.Fail("Should have failed validation")
108
+
109
+
[<Fact>]
110
+
let ``Valid Profile passes validation`` () =
111
+
let json =
112
+
"""{
113
+
"$type": "app.bsky.actor.profile",
114
+
"displayName": "Alice",
115
+
"description": "Bob's friend"
116
+
}"""
117
+
118
+
let element = parse json
119
+
let result = Lexicon.validate "app.bsky.actor.profile" element
120
+
Assert.Equal(Lexicon.Ok, result)
121
+
122
+
[<Fact>]
123
+
let ``Profile description check length`` () =
124
+
let longDesc = String('a', 2561)
125
+
126
+
let template =
127
+
"""{
128
+
"$type": "app.bsky.actor.profile",
129
+
"description": "DESC_PLACEHOLDER"
130
+
}"""
131
+
132
+
let json = template.Replace("DESC_PLACEHOLDER", longDesc)
133
+
let element = parse json
134
+
let result = Lexicon.validate "app.bsky.actor.profile" element
135
+
136
+
match result with
137
+
| Lexicon.Error msg -> Assert.Contains("exceeds maximum length", msg)
138
+
| _ -> Assert.Fail("Should have failed validation")
139
+
140
+
[<Fact>]
141
+
let ``Unknown type validation is lax`` () =
142
+
let json = """{ "random": "stuff" }"""
143
+
let element = parse json
144
+
let result = Lexicon.validate "com.unknown.record" element
145
+
Assert.Equal(Lexicon.Ok, result)
+55
PDSharp.Tests/Firehose.Tests.fs
+55
PDSharp.Tests/Firehose.Tests.fs
···
1
+
module Firehose.Tests
2
+
3
+
open Xunit
4
+
open PDSharp.Core
5
+
open PDSharp.Core.Firehose
6
+
open PDSharp.Core.Crypto
7
+
8
+
[<Fact>]
9
+
let ``nextSeq monotonically increases`` () =
10
+
resetSeq ()
11
+
let seq1 = nextSeq ()
12
+
let seq2 = nextSeq ()
13
+
let seq3 = nextSeq ()
14
+
15
+
Assert.Equal(1L, seq1)
16
+
Assert.Equal(2L, seq2)
17
+
Assert.Equal(3L, seq3)
18
+
19
+
[<Fact>]
20
+
let ``currentSeq returns without incrementing`` () =
21
+
resetSeq ()
22
+
let _ = nextSeq () // 1
23
+
let _ = nextSeq () // 2
24
+
let current = currentSeq ()
25
+
let next = nextSeq ()
26
+
27
+
Assert.Equal(2L, current)
28
+
Assert.Equal(3L, next)
29
+
30
+
[<Fact>]
31
+
let ``createCommitEvent has correct fields`` () =
32
+
resetSeq ()
33
+
let hash = sha256Str "test"
34
+
let cid = Cid.FromHash hash
35
+
let carBytes = [| 0x01uy; 0x02uy |]
36
+
37
+
let event = createCommitEvent "did:web:test" "rev123" cid carBytes
38
+
39
+
Assert.Equal(1L, event.Seq)
40
+
Assert.Equal("did:web:test", event.Did)
41
+
Assert.Equal("rev123", event.Rev)
42
+
Assert.Equal<byte[]>(cid.Bytes, event.Commit.Bytes)
43
+
Assert.Equal<byte[]>(carBytes, event.Blocks)
44
+
45
+
[<Fact>]
46
+
let ``encodeEvent produces valid CBOR`` () =
47
+
resetSeq ()
48
+
let hash = sha256Str "test"
49
+
let cid = Cid.FromHash hash
50
+
let carBytes = [| 0x01uy; 0x02uy |]
51
+
let event = createCommitEvent "did:web:test" "rev123" cid carBytes
52
+
let encoded = encodeEvent event
53
+
54
+
Assert.True(encoded.Length > 0)
55
+
Assert.True(encoded.[0] >= 0xa0uy, "Should encode as CBOR map")
+179
PDSharp.Tests/Handlers.Tests.fs
+179
PDSharp.Tests/Handlers.Tests.fs
···
1
+
module Handlers.Tests
2
+
3
+
open System
4
+
open System.IO
5
+
open System.Text
6
+
open System.Text.Json
7
+
open System.Threading.Tasks
8
+
open System.Collections.Generic
9
+
open Xunit
10
+
open Microsoft.AspNetCore.Http
11
+
open Giraffe
12
+
open PDSharp.Core.Config
13
+
open PDSharp.Core.BlockStore
14
+
open PDSharp.Core
15
+
open PDSharp.Core.SqliteStore
16
+
open PDSharp.Core.Auth
17
+
18
+
type MockAccountStore() =
19
+
let mutable accounts = Map.empty<string, Account>
20
+
21
+
interface IAccountStore with
22
+
member _.CreateAccount(account) = async {
23
+
if accounts.ContainsKey account.Did then
24
+
return Error "Exists"
25
+
else
26
+
accounts <- accounts.Add(account.Did, account)
27
+
return Ok()
28
+
}
29
+
30
+
member _.GetAccountByHandle(handle) = async {
31
+
return accounts |> Map.tryPick (fun _ v -> if v.Handle = handle then Some v else None)
32
+
}
33
+
34
+
member _.GetAccountByDid did = async { return accounts.TryFind did }
35
+
36
+
type MockBlockStore() =
37
+
let mutable blocks = Map.empty<string, byte[]>
38
+
39
+
interface IBlockStore with
40
+
member _.Put(data) = async {
41
+
let hash = Crypto.sha256 data
42
+
let cid = Cid.FromHash hash
43
+
blocks <- blocks.Add(cid.ToString(), data)
44
+
return cid
45
+
}
46
+
47
+
member _.Get cid = async { return blocks.TryFind(cid.ToString()) }
48
+
member _.Has cid = async { return blocks.ContainsKey(cid.ToString()) }
49
+
50
+
member _.GetAllCidsAndData() = async {
51
+
return
52
+
blocks
53
+
|> Map.toList
54
+
|> List.choose (fun (k, v) -> Cid.TryParse k |> Option.map (fun c -> (c, v)))
55
+
}
56
+
57
+
type MockRepoStore() =
58
+
let mutable repos = Map.empty<string, RepoRow>
59
+
60
+
interface IRepoStore with
61
+
member _.GetRepo(did) = async { return repos.TryFind did }
62
+
member _.SaveRepo(repo) = async { repos <- repos.Add(repo.did, repo) }
63
+
64
+
type MockJsonSerializer() =
65
+
interface Giraffe.Json.ISerializer with
66
+
member _.SerializeToString x = JsonSerializer.Serialize x
67
+
member _.SerializeToBytes x = JsonSerializer.SerializeToUtf8Bytes x
68
+
member _.Deserialize<'T>(json : string) = JsonSerializer.Deserialize<'T> json
69
+
70
+
member _.Deserialize<'T>(bytes : byte[]) =
71
+
JsonSerializer.Deserialize<'T>(ReadOnlySpan bytes)
72
+
73
+
member _.DeserializeAsync<'T>(stream : Stream) = task { return! JsonSerializer.DeserializeAsync<'T>(stream) }
74
+
75
+
member _.SerializeToStreamAsync<'T> (x : 'T) (stream : Stream) = task {
76
+
do! JsonSerializer.SerializeAsync<'T>(stream, x)
77
+
}
78
+
79
+
let mockContext (services : (Type * obj) list) (body : string) (query : Map<string, string>) =
80
+
let ctx = new DefaultHttpContext()
81
+
let serializer = MockJsonSerializer()
82
+
let allServices = (typeof<Giraffe.Json.ISerializer>, box serializer) :: services
83
+
84
+
let sp =
85
+
{ new IServiceProvider with
86
+
member _.GetService(serviceType) =
87
+
allServices
88
+
|> List.tryPick (fun (t, s) -> if t = serviceType then Some s else None)
89
+
|> Option.toObj
90
+
}
91
+
92
+
ctx.RequestServices <- sp
93
+
94
+
if not (String.IsNullOrEmpty body) then
95
+
let stream = new MemoryStream(Encoding.UTF8.GetBytes(body))
96
+
ctx.Request.Body <- stream
97
+
ctx.Request.ContentLength <- stream.Length
98
+
99
+
if not query.IsEmpty then
100
+
let dict = Dictionary<string, Microsoft.Extensions.Primitives.StringValues>()
101
+
102
+
for kvp in query do
103
+
dict.Add(kvp.Key, Microsoft.Extensions.Primitives.StringValues(kvp.Value))
104
+
105
+
ctx.Request.Query <- QueryCollection dict
106
+
107
+
ctx
108
+
109
+
[<Fact>]
110
+
let ``Auth.createAccountHandler creates account successfully`` () = task {
111
+
let accountStore = MockAccountStore()
112
+
113
+
let config = {
114
+
PublicUrl = "https://pds.example.com"
115
+
DidHost = "did:web:pds.example.com"
116
+
JwtSecret = "secret"
117
+
SqliteConnectionString = ""
118
+
DisableWalAutoCheckpoint = false
119
+
BlobStore = Disk "blobs"
120
+
}
121
+
122
+
let services = [ typeof<AppConfig>, box config; typeof<IAccountStore>, box accountStore ]
123
+
124
+
let req : PDSharp.Handlers.Auth.CreateAccountRequest = {
125
+
handle = "alice.test"
126
+
email = Some "alice@test.com"
127
+
password = "password123"
128
+
inviteCode = None
129
+
}
130
+
131
+
let body = JsonSerializer.Serialize req
132
+
let ctx = mockContext services body Map.empty
133
+
let next : HttpFunc = fun _ -> Task.FromResult(None)
134
+
let! result = PDSharp.Handlers.Auth.createAccountHandler next ctx
135
+
Assert.Equal(200, ctx.Response.StatusCode)
136
+
137
+
let store = accountStore :> IAccountStore
138
+
let! accountOpt = store.GetAccountByHandle "alice.test"
139
+
Assert.True accountOpt.IsSome
140
+
}
141
+
142
+
[<Fact>]
143
+
let ``Server.indexHandler returns HTML`` () = task {
144
+
let ctx = new DefaultHttpContext()
145
+
let next : HttpFunc = fun _ -> Task.FromResult(None)
146
+
let! result = PDSharp.Handlers.Server.indexHandler next ctx
147
+
Assert.Equal(200, ctx.Response.StatusCode)
148
+
Assert.Equal("text/html", ctx.Response.ContentType)
149
+
}
150
+
151
+
[<Fact>]
152
+
let ``Repo.createRecordHandler invalid collection returns error`` () = task {
153
+
let blockStore = MockBlockStore()
154
+
let repoStore = MockRepoStore()
155
+
let keyStore = PDSharp.Handlers.SigningKeyStore()
156
+
let firehose = PDSharp.Handlers.FirehoseState()
157
+
158
+
let services = [
159
+
typeof<IBlockStore>, box blockStore
160
+
typeof<IRepoStore>, box repoStore
161
+
typeof<PDSharp.Handlers.SigningKeyStore>, box keyStore
162
+
typeof<PDSharp.Handlers.FirehoseState>, box firehose
163
+
]
164
+
165
+
let record = JsonSerializer.Deserialize<JsonElement> "{\"text\":\"hello\"}"
166
+
167
+
let req : PDSharp.Handlers.Repo.CreateRecordRequest = {
168
+
repo = "did:web:alice.test"
169
+
collection = "app.bsky.feed.post"
170
+
record = record
171
+
rkey = None
172
+
}
173
+
174
+
let body = JsonSerializer.Serialize(req)
175
+
let ctx = mockContext services body Map.empty
176
+
let next : HttpFunc = fun _ -> Task.FromResult(None)
177
+
let! result = PDSharp.Handlers.Repo.createRecordHandler next ctx
178
+
Assert.Equal(400, ctx.Response.StatusCode)
179
+
}
+116
PDSharp.Tests/Health.Tests.fs
+116
PDSharp.Tests/Health.Tests.fs
···
1
+
module PDSharp.Tests.Health
2
+
3
+
open System
4
+
open Xunit
5
+
open PDSharp.Core.Health
6
+
7
+
[<Fact>]
8
+
let ``getDiskUsage returns disk info for valid path`` () =
9
+
let result = getDiskUsage "."
10
+
11
+
match result with
12
+
| Some usage ->
13
+
Assert.True(usage.TotalBytes > 0L)
14
+
Assert.True(usage.FreeBytes >= 0L)
15
+
Assert.True(usage.UsedBytes >= 0L)
16
+
Assert.True(usage.UsedPercent >= 0.0 && usage.UsedPercent <= 100.0)
17
+
| None -> Assert.True(true)
18
+
19
+
[<Fact>]
20
+
let ``getDiskUsage UsedPercent is calculated correctly`` () =
21
+
let result = getDiskUsage "."
22
+
23
+
match result with
24
+
| Some usage ->
25
+
let expectedUsed = usage.TotalBytes - usage.FreeBytes
26
+
Assert.Equal(expectedUsed, usage.UsedBytes)
27
+
let expectedPercent = float usage.UsedBytes / float usage.TotalBytes * 100.0
28
+
Assert.True(abs (usage.UsedPercent - expectedPercent) < 0.1)
29
+
| None -> Assert.True(true)
30
+
31
+
[<Fact>]
32
+
let ``getDiskUsage IsCritical is true when usage > 90 percent`` () =
33
+
let result = getDiskUsage "."
34
+
35
+
match result with
36
+
| Some usage -> Assert.Equal(usage.UsedPercent >= 90.0, usage.IsCritical)
37
+
| None -> Assert.True(true)
38
+
39
+
[<Fact>]
40
+
let ``checkDatabaseHealth returns healthy for existing file`` () =
41
+
let tempPath = System.IO.Path.GetTempFileName()
42
+
43
+
try
44
+
let connStr = $"Data Source={tempPath}"
45
+
let result = checkDatabaseHealth connStr
46
+
Assert.True result.IsHealthy
47
+
Assert.True result.Message.IsNone
48
+
finally
49
+
System.IO.File.Delete tempPath
50
+
51
+
[<Fact>]
52
+
let ``checkDatabaseHealth returns unhealthy for missing file`` () =
53
+
let connStr = "Data Source=/nonexistent/path/to/database.db"
54
+
let result = checkDatabaseHealth connStr
55
+
Assert.False result.IsHealthy
56
+
Assert.True result.Message.IsSome
57
+
58
+
[<Fact>]
59
+
let ``checkDatabaseHealth handles invalid connection string`` () =
60
+
let connStr = "invalid"
61
+
let result = checkDatabaseHealth connStr
62
+
Assert.False result.IsHealthy
63
+
Assert.True result.Message.IsSome
64
+
65
+
[<Fact>]
66
+
let ``getBackupStatus returns stale when no backup`` () =
67
+
let result = getBackupStatus None
68
+
Assert.True result.IsStale
69
+
Assert.True result.LastBackupTime.IsNone
70
+
Assert.True result.BackupAgeHours.IsNone
71
+
72
+
[<Fact>]
73
+
let ``getBackupStatus returns not stale for recent backup`` () =
74
+
let recentTime = DateTimeOffset.UtcNow.AddHours(-1.0)
75
+
let result = getBackupStatus (Some recentTime)
76
+
Assert.False result.IsStale
77
+
Assert.True result.LastBackupTime.IsSome
78
+
Assert.True result.BackupAgeHours.IsSome
79
+
Assert.True(result.BackupAgeHours.Value < 24.0)
80
+
81
+
[<Fact>]
82
+
let ``getBackupStatus returns stale for old backup`` () =
83
+
let oldTime = DateTimeOffset.UtcNow.AddHours(-25.0)
84
+
let result = getBackupStatus (Some oldTime)
85
+
Assert.True result.IsStale
86
+
Assert.True(result.BackupAgeHours.Value > 24.0)
87
+
88
+
[<Fact>]
89
+
let ``HealthState tracks uptime correctly`` () =
90
+
let state = HealthState()
91
+
state.SetStartTime(DateTimeOffset.UtcNow.AddSeconds(-10.0))
92
+
let uptime = state.GetUptime()
93
+
Assert.True(uptime >= 9L && uptime <= 12L)
94
+
95
+
[<Fact>]
96
+
let ``HealthState records backup time`` () =
97
+
let state = HealthState()
98
+
Assert.True state.LastBackupTime.IsNone
99
+
state.RecordBackup()
100
+
Assert.True state.LastBackupTime.IsSome
101
+
102
+
[<Fact>]
103
+
let ``buildHealthStatus constructs complete status`` () =
104
+
let state = HealthState()
105
+
let tempPath = System.IO.Path.GetTempFileName()
106
+
107
+
try
108
+
let connStr = $"Data Source={tempPath}"
109
+
let status = buildHealthStatus "1.0.0" state connStr "."
110
+
111
+
Assert.Equal("1.0.0", status.Version)
112
+
Assert.True(status.UptimeSeconds >= 0L)
113
+
Assert.True status.DatabaseStatus.IsHealthy
114
+
Assert.True status.BackupStatus.IsSome
115
+
finally
116
+
System.IO.File.Delete tempPath
+264
PDSharp.Tests/Mst.Tests.fs
+264
PDSharp.Tests/Mst.Tests.fs
···
1
+
module MstTests
2
+
3
+
open Xunit
4
+
open PDSharp.Core
5
+
open PDSharp.Core.Mst
6
+
7
+
[<Fact>]
8
+
let ``Serialization Roundtrip`` () =
9
+
let cid1 = Cid(Crypto.sha256Str "val1")
10
+
11
+
let e1 = {
12
+
PrefixLen = 0
13
+
KeySuffix = "apple"
14
+
Value = cid1
15
+
Tree = None
16
+
}
17
+
18
+
let e2 = {
19
+
PrefixLen = 2
20
+
KeySuffix = "ricot"
21
+
Value = cid1
22
+
Tree = None
23
+
}
24
+
25
+
let node = { Left = None; Entries = [ e1; e2 ] }
26
+
27
+
let bytes = Mst.serialize node
28
+
let node2 = Mst.deserialize bytes
29
+
30
+
Assert.Equal(node.Entries.Length, node2.Entries.Length)
31
+
Assert.Equal("apple", node2.Entries.[0].KeySuffix)
32
+
Assert.Equal("ricot", node2.Entries.[1].KeySuffix)
33
+
Assert.Equal(2, node2.Entries.[1].PrefixLen)
34
+
35
+
[<Fact>]
36
+
let ``Get Operation Linear Scan`` () =
37
+
let cid1 = Cid(Crypto.sha256Str "val1")
38
+
let cid2 = Cid(Crypto.sha256Str "val2")
39
+
40
+
let e1 = {
41
+
PrefixLen = 0
42
+
KeySuffix = "apple"
43
+
Value = cid1
44
+
Tree = None
45
+
}
46
+
47
+
let e2 = {
48
+
PrefixLen = 0
49
+
KeySuffix = "banana"
50
+
Value = cid2
51
+
Tree = None
52
+
}
53
+
54
+
let node = { Left = None; Entries = [ e1; e2 ] }
55
+
56
+
let loader (c : Cid) = async { return None }
57
+
58
+
let res1 = Mst.get loader node "apple" "" |> Async.RunSynchronously
59
+
Assert.Equal(Some cid1, res1)
60
+
61
+
let res2 = Mst.get loader node "banana" "" |> Async.RunSynchronously
62
+
Assert.Equal(Some cid2, res2)
63
+
64
+
let res3 = Mst.get loader node "cherry" "" |> Async.RunSynchronously
65
+
Assert.True(Option.isNone res3)
66
+
67
+
[<Fact>]
68
+
let ``Get Operation With Prefix Compression`` () =
69
+
let cid1 = Cid(Crypto.sha256Str "val1")
70
+
let cid2 = Cid(Crypto.sha256Str "val2")
71
+
72
+
let e1 = {
73
+
PrefixLen = 0
74
+
KeySuffix = "apple"
75
+
Value = cid1
76
+
Tree = None
77
+
}
78
+
79
+
let e2 = {
80
+
PrefixLen = 2
81
+
KeySuffix = "ricot"
82
+
Value = cid2
83
+
Tree = None
84
+
}
85
+
86
+
let node = { Left = None; Entries = [ e1; e2 ] }
87
+
let loader (c : Cid) = async { return None }
88
+
89
+
let res1 = Mst.get loader node "apricot" "" |> Async.RunSynchronously
90
+
Assert.Equal(Some cid2, res1)
91
+
92
+
[<Fact>]
93
+
let ``Put Operation Simple Insert`` () =
94
+
let store = System.Collections.Concurrent.ConcurrentDictionary<string, MstNode>()
95
+
96
+
let loader (c : Cid) = async {
97
+
let key = System.Convert.ToBase64String(c.Bytes)
98
+
let success, node = store.TryGetValue(key)
99
+
return if success then Some node else None
100
+
}
101
+
102
+
let persister (n : MstNode) = async {
103
+
let bytes = Mst.serialize n
104
+
let cid = Cid(Crypto.sha256 bytes)
105
+
let key = System.Convert.ToBase64String(cid.Bytes)
106
+
store.[key] <- n
107
+
return cid
108
+
}
109
+
110
+
let node = { Left = None; Entries = [] }
111
+
let cid1 = Cid(Crypto.sha256Str "v1")
112
+
let node2 = Mst.put loader persister node "apple" cid1 "" |> Async.RunSynchronously
113
+
114
+
Assert.Equal(1, node2.Entries.Length)
115
+
Assert.Equal("apple", node2.Entries.[0].KeySuffix)
116
+
Assert.Equal(0, node2.Entries.[0].PrefixLen)
117
+
Assert.Equal(cid1, node2.Entries.[0].Value)
118
+
119
+
let res = Mst.get loader node2 "apple" "" |> Async.RunSynchronously
120
+
Assert.Equal(Some cid1, res)
121
+
122
+
[<Fact>]
123
+
let ``Put Operation Multiple Sorted`` () =
124
+
let store = System.Collections.Concurrent.ConcurrentDictionary<string, MstNode>()
125
+
126
+
let loader (c : Cid) = async {
127
+
let key = System.Convert.ToBase64String(c.Bytes)
128
+
let success, node = store.TryGetValue(key)
129
+
return if success then Some node else None
130
+
}
131
+
132
+
let persister (n : MstNode) = async {
133
+
let bytes = Mst.serialize n
134
+
let cid = Cid(Crypto.sha256 bytes)
135
+
let key = System.Convert.ToBase64String(cid.Bytes)
136
+
store.[key] <- n
137
+
return cid
138
+
}
139
+
140
+
let mutable node = { Left = None; Entries = [] }
141
+
142
+
let k1, v1 = "apple", Cid(Crypto.sha256Str "1")
143
+
let k2, v2 = "banana", Cid(Crypto.sha256Str "2")
144
+
let k3, v3 = "cherry", Cid(Crypto.sha256Str "3")
145
+
146
+
node <- Mst.put loader persister node k1 v1 "" |> Async.RunSynchronously
147
+
node <- Mst.put loader persister node k2 v2 "" |> Async.RunSynchronously
148
+
node <- Mst.put loader persister node k3 v3 "" |> Async.RunSynchronously
149
+
150
+
let g1 = Mst.get loader node "apple" "" |> Async.RunSynchronously
151
+
let g2 = Mst.get loader node "banana" "" |> Async.RunSynchronously
152
+
let g3 = Mst.get loader node "cherry" "" |> Async.RunSynchronously
153
+
154
+
Assert.Equal(Some v1, g1)
155
+
Assert.Equal(Some v2, g2)
156
+
Assert.Equal(Some v3, g3)
157
+
158
+
[<Fact>]
159
+
let ``Put Operation Multiple Reverse`` () =
160
+
let store = System.Collections.Concurrent.ConcurrentDictionary<string, MstNode>()
161
+
162
+
let loader (c : Cid) = async {
163
+
let key = System.Convert.ToBase64String(c.Bytes)
164
+
let success, node = store.TryGetValue(key)
165
+
return if success then Some node else None
166
+
}
167
+
168
+
let persister (n : MstNode) = async {
169
+
let bytes = Mst.serialize n
170
+
let cid = Cid(Crypto.sha256 bytes)
171
+
let key = System.Convert.ToBase64String(cid.Bytes)
172
+
store.[key] <- n
173
+
return cid
174
+
}
175
+
176
+
let mutable node = { Left = None; Entries = [] }
177
+
178
+
let data = [ "zebra"; "yak"; "xylophone" ]
179
+
180
+
for k in data do
181
+
let v = Cid(Crypto.sha256Str k)
182
+
node <- Mst.put loader persister node k v "" |> Async.RunSynchronously
183
+
184
+
for k in data do
185
+
let expected = Cid(Crypto.sha256Str k)
186
+
let actual = Mst.get loader node k "" |> Async.RunSynchronously
187
+
Assert.Equal(Some expected, actual)
188
+
189
+
[<Fact>]
190
+
let ``Delete Operation Simple`` () =
191
+
let store = System.Collections.Concurrent.ConcurrentDictionary<string, MstNode>()
192
+
193
+
let loader (c : Cid) = async {
194
+
let key = System.Convert.ToBase64String(c.Bytes)
195
+
let success, node = store.TryGetValue(key)
196
+
return if success then Some node else None
197
+
}
198
+
199
+
let persister (n : MstNode) = async {
200
+
let bytes = Mst.serialize n
201
+
let cid = Cid(Crypto.sha256 bytes)
202
+
let key = System.Convert.ToBase64String(cid.Bytes)
203
+
store.[key] <- n
204
+
return cid
205
+
}
206
+
207
+
let mutable node = { Left = None; Entries = [] }
208
+
let cid1 = Cid(Crypto.sha256Str "val1")
209
+
210
+
node <- Mst.put loader persister node "apple" cid1 "" |> Async.RunSynchronously
211
+
212
+
let res1 = Mst.get loader node "apple" "" |> Async.RunSynchronously
213
+
Assert.Equal(Some cid1, res1)
214
+
215
+
// Delete
216
+
let nodeOpt = Mst.delete loader persister node "apple" "" |> Async.RunSynchronously
217
+
218
+
match nodeOpt with
219
+
| None -> ()
220
+
| Some n ->
221
+
let res2 = Mst.get loader n "apple" "" |> Async.RunSynchronously
222
+
Assert.True(Option.isNone res2)
223
+
224
+
[<Fact>]
225
+
let ``Determinism From Entries`` () =
226
+
let store = System.Collections.Concurrent.ConcurrentDictionary<string, MstNode>()
227
+
228
+
let loader (c : Cid) = async {
229
+
let key = System.Convert.ToBase64String(c.Bytes)
230
+
let success, node = store.TryGetValue(key)
231
+
return if success then Some node else None
232
+
}
233
+
234
+
let persister (n : MstNode) = async {
235
+
let bytes = Mst.serialize n
236
+
let cid = Cid(Crypto.sha256 bytes)
237
+
let key = System.Convert.ToBase64String(cid.Bytes)
238
+
store.[key] <- n
239
+
return cid
240
+
}
241
+
242
+
let data = [
243
+
"apple", Cid(Crypto.sha256Str "1")
244
+
"banana", Cid(Crypto.sha256Str "2")
245
+
"cherry", Cid(Crypto.sha256Str "3")
246
+
"date", Cid(Crypto.sha256Str "4")
247
+
"elderberry", Cid(Crypto.sha256Str "5")
248
+
]
249
+
250
+
251
+
let node1 = Mst.fromEntries loader persister data |> Async.RunSynchronously
252
+
let cid1 = persister node1 |> Async.RunSynchronously
253
+
254
+
let node2 =
255
+
Mst.fromEntries loader persister (List.rev data) |> Async.RunSynchronously
256
+
257
+
let cid2 = persister node2 |> Async.RunSynchronously
258
+
let data3 = [ data.[2]; data.[0]; data.[4]; data.[1]; data.[3] ]
259
+
let node3 = Mst.fromEntries loader persister data3 |> Async.RunSynchronously
260
+
let cid3 = persister node3 |> Async.RunSynchronously
261
+
262
+
Assert.Equal(cid1, cid2)
263
+
Assert.Equal(cid1, cid3)
264
+
()
+13
-5
PDSharp.Tests/PDSharp.Tests.fsproj
+13
-5
PDSharp.Tests/PDSharp.Tests.fsproj
···
1
1
<Project Sdk="Microsoft.NET.Sdk">
2
-
3
2
<PropertyGroup>
4
3
<TargetFramework>net9.0</TargetFramework>
5
4
<IsPackable>false</IsPackable>
···
8
7
9
8
<ItemGroup>
10
9
<Compile Include="Tests.fs" />
10
+
<Compile Include="Mst.Tests.fs" />
11
+
<Compile Include="BlockStore.Tests.fs" />
12
+
<Compile Include="AtUri.Tests.fs" />
13
+
<Compile Include="Repository.Tests.fs" />
14
+
<Compile Include="Car.Tests.fs" />
15
+
<Compile Include="Firehose.Tests.fs" />
16
+
<Compile Include="Auth.Tests.fs" />
17
+
<Compile Include="Conformance.Tests.fs" />
18
+
<Compile Include="Handlers.Tests.fs" />
19
+
<Compile Include="Health.Tests.fs" />
11
20
<Compile Include="Program.fs" />
12
21
</ItemGroup>
13
22
···
18
27
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
19
28
</ItemGroup>
20
29
21
-
<ItemGroup>
22
-
<ProjectReference Include="..\PDSharp\PDSharp.fsproj" />
23
-
<ProjectReference Include="..\PDSharp.Core\PDSharp.Core.fsproj" />
30
+
<ItemGroup>
31
+
<ProjectReference Include="..\PDSharp\PDSharp.fsproj" />
32
+
<ProjectReference Include="..\PDSharp.Core\PDSharp.Core.fsproj" />
24
33
</ItemGroup>
25
-
26
34
</Project>
+128
PDSharp.Tests/Repository.Tests.fs
+128
PDSharp.Tests/Repository.Tests.fs
···
1
+
module RepositoryTests
2
+
3
+
open Xunit
4
+
open PDSharp.Core
5
+
open PDSharp.Core.Crypto
6
+
open PDSharp.Core.Repository
7
+
8
+
[<Fact>]
9
+
let ``TID generation produces 13 character string`` () =
10
+
let tid = Tid.generate ()
11
+
Assert.Equal(13, tid.Length)
12
+
13
+
[<Fact>]
14
+
let ``TID generation is sortable by time`` () =
15
+
let tid1 = Tid.generate ()
16
+
System.Threading.Thread.Sleep(2)
17
+
let tid2 = Tid.generate ()
18
+
Assert.True(tid2 > tid1, $"Expected {tid2} > {tid1}")
19
+
20
+
[<Fact>]
21
+
let ``Commit signing produces valid signature`` () =
22
+
let keyPair = generateKey P256
23
+
let mstRoot = Cid.FromHash(sha256Str "mst-root")
24
+
25
+
let unsigned = {
26
+
Did = "did:plc:test1234"
27
+
Version = 3
28
+
Data = mstRoot
29
+
Rev = Tid.generate ()
30
+
Prev = None
31
+
}
32
+
33
+
let signed = signCommit keyPair unsigned
34
+
35
+
Assert.Equal(64, signed.Sig.Length)
36
+
Assert.Equal(unsigned.Did, signed.Did)
37
+
Assert.Equal(unsigned.Data, signed.Data)
38
+
39
+
[<Fact>]
40
+
let ``Commit verification succeeds for valid commit`` () =
41
+
let keyPair = generateKey P256
42
+
let mstRoot = Cid.FromHash(sha256Str "data")
43
+
44
+
let unsigned = {
45
+
Did = "did:plc:abc"
46
+
Version = 3
47
+
Data = mstRoot
48
+
Rev = Tid.generate ()
49
+
Prev = None
50
+
}
51
+
52
+
let signed = signCommit keyPair unsigned
53
+
54
+
Assert.True(verifyCommit keyPair signed)
55
+
56
+
[<Fact>]
57
+
let ``Commit verification fails for tampered data`` () =
58
+
let keyPair = generateKey P256
59
+
let mstRoot = Cid.FromHash(sha256Str "original")
60
+
61
+
let unsigned = {
62
+
Did = "did:plc:abc"
63
+
Version = 3
64
+
Data = mstRoot
65
+
Rev = Tid.generate ()
66
+
Prev = None
67
+
}
68
+
69
+
let signed = signCommit keyPair unsigned
70
+
let tampered = { signed with Did = "did:plc:different" }
71
+
72
+
Assert.False(verifyCommit keyPair tampered)
73
+
74
+
[<Fact>]
75
+
let ``Commit with prev CID`` () =
76
+
let keyPair = generateKey P256
77
+
let mstRoot = Cid.FromHash(sha256Str "new-data")
78
+
let prevCid = Cid.FromHash(sha256Str "prev-commit")
79
+
80
+
let unsigned = {
81
+
Did = "did:plc:abc"
82
+
Version = 3
83
+
Data = mstRoot
84
+
Rev = Tid.generate ()
85
+
Prev = Some prevCid
86
+
}
87
+
88
+
let signed = signCommit keyPair unsigned
89
+
Assert.True(verifyCommit keyPair signed)
90
+
Assert.True(Option.isSome signed.Prev)
91
+
92
+
[<Fact>]
93
+
let ``Commit CID is deterministic`` () =
94
+
let keyPair = generateKey P256
95
+
let mstRoot = Cid.FromHash(sha256Str "data")
96
+
97
+
let unsigned = {
98
+
Did = "did:plc:abc"
99
+
Version = 3
100
+
Data = mstRoot
101
+
Rev = "abcdefghijklm"
102
+
Prev = None
103
+
}
104
+
105
+
let signed = signCommit keyPair unsigned
106
+
let cid1 = commitCid signed
107
+
let cid2 = commitCid signed
108
+
109
+
Assert.Equal<byte[]>(cid1.Bytes, cid2.Bytes)
110
+
111
+
[<Fact>]
112
+
let ``Commit serialization produces valid DAG-CBOR`` () =
113
+
let keyPair = generateKey P256
114
+
let mstRoot = Cid.FromHash(sha256Str "test")
115
+
116
+
let unsigned = {
117
+
Did = "did:plc:test"
118
+
Version = 3
119
+
Data = mstRoot
120
+
Rev = Tid.generate ()
121
+
Prev = None
122
+
}
123
+
124
+
let signed = signCommit keyPair unsigned
125
+
let bytes = serializeCommit signed
126
+
127
+
Assert.True(bytes.Length > 0)
128
+
Assert.True(bytes.[0] >= 0xa0uy && bytes.[0] <= 0xbfuy)
+114
-4
PDSharp.Tests/Tests.fs
+114
-4
PDSharp.Tests/Tests.fs
···
1
1
module Tests
2
2
3
-
open System
4
3
open Xunit
5
4
open PDSharp.Core.Models
6
5
open PDSharp.Core.Config
7
-
8
-
[<Fact>]
9
-
let ``My test`` () = Assert.True(true)
6
+
open PDSharp.Core.Crypto
7
+
open PDSharp.Core
8
+
open PDSharp.Core.DidResolver
9
+
open Org.BouncyCastle.Utilities.Encoders
10
+
open System.Text
11
+
open System.Text.Json
12
+
open Org.BouncyCastle.Math
10
13
11
14
[<Fact>]
12
15
let ``Can instantiate AppConfig`` () =
13
16
let config = {
14
17
PublicUrl = "https://example.com"
15
18
DidHost = "did:web:example.com"
19
+
JwtSecret = "test-secret-key-for-testing-only"
20
+
SqliteConnectionString = "Data Source=:memory:"
21
+
DisableWalAutoCheckpoint = false
22
+
BlobStore = Disk "blobs"
16
23
}
17
24
18
25
Assert.Equal("did:web:example.com", config.DidHost)
19
26
20
27
[<Fact>]
28
+
let ``CID TryParse roundtrip`` () =
29
+
let hash = Crypto.sha256Str "test-data"
30
+
let cid = Cid.FromHash hash
31
+
let cidStr = cid.ToString()
32
+
33
+
match Cid.TryParse cidStr with
34
+
| Some parsed -> Assert.Equal<byte[]>(cid.Bytes, parsed.Bytes)
35
+
| None -> Assert.Fail "TryParse should succeed for valid CID"
36
+
37
+
[<Fact>]
38
+
let ``CID TryParse returns None for invalid`` () =
39
+
Assert.True(Cid.TryParse("invalid").IsNone)
40
+
Assert.True(Cid.TryParse("").IsNone)
41
+
Assert.True(Cid.TryParse("btooshort").IsNone)
42
+
43
+
[<Fact>]
21
44
let ``Can instantiate DescribeServerResponse`` () =
22
45
let response = {
23
46
availableUserDomains = [ "example.com" ]
···
27
50
28
51
Assert.Equal("did:web:example.com", response.did)
29
52
Assert.Equal(1, response.availableUserDomains.Length)
53
+
54
+
[<Fact>]
55
+
let ``SHA-256 Hashing correct`` () =
56
+
let input = "hello world"
57
+
let hash = sha256Str input
58
+
let expected = "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
59
+
let actual = Hex.ToHexString(hash)
60
+
Assert.Equal(expected, actual)
61
+
62
+
[<Fact>]
63
+
let ``ECDSA P-256 Sign and Verify`` () =
64
+
let keyPair = generateKey P256
65
+
let data = Encoding.UTF8.GetBytes("test message")
66
+
let hash = sha256 data
67
+
let signature = sign keyPair hash
68
+
Assert.True(signature.Length = 64, "Signature should be 64 bytes (R|S)")
69
+
70
+
let valid = verify keyPair hash signature
71
+
Assert.True(valid, "Signature verification failed")
72
+
73
+
[<Fact>]
74
+
let ``ECDSA K-256 Sign and Verify`` () =
75
+
let keyPair = generateKey K256
76
+
let data = Encoding.UTF8.GetBytes("test k256")
77
+
let hash = sha256 data
78
+
let signature = sign keyPair hash
79
+
Assert.True(signature.Length = 64, "Signature should be 64 bytes")
80
+
81
+
let valid = verify keyPair hash signature
82
+
Assert.True(valid, "Signature verification failed")
83
+
84
+
[<Fact>]
85
+
let ``Low-S Enforcement Logic`` () =
86
+
let n =
87
+
BigInteger("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", 16) // secp256k1 N
88
+
89
+
let halfN = n.ShiftRight(1)
90
+
let highS = halfN.Add(BigInteger.One)
91
+
92
+
let lowS = enforceLowS highS n
93
+
Assert.True(lowS.CompareTo halfN <= 0, "S value should be <= N/2")
94
+
Assert.Equal(n.Subtract highS, lowS)
95
+
96
+
[<Fact>]
97
+
let ``DidDocument JSON deserialization`` () =
98
+
let json =
99
+
"""{
100
+
"id": "did:web:example.com",
101
+
"verificationMethod": [{
102
+
"id": "did:web:example.com#atproto",
103
+
"type": "Multikey",
104
+
"controller": "did:web:example.com",
105
+
"publicKeyMultibase": "zQ3sh..."
106
+
}]
107
+
}"""
108
+
109
+
let doc =
110
+
JsonSerializer.Deserialize<DidDocument>(json, JsonSerializerOptions(PropertyNameCaseInsensitive = true))
111
+
112
+
Assert.Equal("did:web:example.com", doc.Id)
113
+
Assert.Single doc.VerificationMethod |> ignore
114
+
Assert.Equal("Multikey", doc.VerificationMethod.Head.Type)
115
+
116
+
[<Fact>]
117
+
let ``CID Generation from Hash`` () =
118
+
let hash =
119
+
Hex.Decode "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
120
+
121
+
let cid = Cid.FromHash hash
122
+
Assert.Equal<byte>(0x01uy, cid.Bytes.[0])
123
+
Assert.Equal<byte>(0x71uy, cid.Bytes.[1])
124
+
Assert.Equal<byte>(0x12uy, cid.Bytes.[2])
125
+
Assert.Equal<byte>(0x20uy, cid.Bytes.[3])
126
+
127
+
[<Fact>]
128
+
let ``DAG-CBOR Canonical Sorting`` () =
129
+
let m = Map.ofList [ "b", box 1; "a", box 2 ]
130
+
let encoded = DagCbor.encode m
131
+
let hex = Hex.ToHexString encoded
132
+
Assert.Equal("a2616102616201", hex)
133
+
134
+
[<Fact>]
135
+
let ``DAG-CBOR Sorting Length vs Bytes`` () =
136
+
let m = Map.ofList [ "aa", box 1; "b", box 2 ]
137
+
let encoded = DagCbor.encode m
138
+
let hex = Hex.ToHexString encoded
139
+
Assert.Equal("a261620262616101", hex)
+115
-14
README.md
+115
-14
README.md
···
1
+
<!-- markdownlint-disable MD033 -->
1
2
# PDSharp
2
3
3
-
> A Personal Data Server (PDS) for the AT Protocol, written in F# with Giraffe.
4
+
A Personal Data Server (PDS) for the AT Protocol, written in F# with Giraffe.
4
5
5
6
## Goal
6
7
···
8
9
9
10
## Requirements
10
11
11
-
- .NET 9.0 SDK
12
-
- [Just](https://github.com/casey/just) (optional, for potential future task running)
12
+
.NET 9.0 SDK
13
13
14
14
## Getting Started
15
15
···
34
34
35
35
The server will start at `http://localhost:5000`.
36
36
37
-
### Verify
38
-
39
-
Check the `describeServer` endpoint:
40
-
41
-
```bash
42
-
curl http://localhost:5000/xrpc/com.atproto.server.describeServer
43
-
```
44
-
45
37
## Configuration
46
38
47
39
The application uses `appsettings.json` and supports Environment Variable overrides.
···
60
52
}
61
53
```
62
54
55
+
## API Testing
56
+
57
+
<details>
58
+
<summary>Server Info</summary>
59
+
60
+
```bash
61
+
curl http://localhost:5000/xrpc/com.atproto.server.describeServer
62
+
```
63
+
64
+
</details>
65
+
66
+
### Record Operations
67
+
68
+
<details>
69
+
<summary>Create a record</summary>
70
+
71
+
```bash
72
+
curl -X POST http://localhost:5000/xrpc/com.atproto.repo.createRecord \
73
+
-H "Content-Type: application/json" \
74
+
-d '{"repo":"did:web:test","collection":"app.bsky.feed.post","record":{"text":"Hello, ATProto!"}}'
75
+
```
76
+
77
+
</details>
78
+
79
+
<details>
80
+
<summary>Get a record</summary>
81
+
82
+
```bash
83
+
curl "http://localhost:5000/xrpc/com.atproto.repo.getRecord?repo=did:web:test&collection=app.bsky.feed.post&rkey=<RKEY>"
84
+
```
85
+
86
+
</details>
87
+
88
+
<details>
89
+
<summary>Put a record</summary>
90
+
91
+
```bash
92
+
curl -X POST http://localhost:5000/xrpc/com.atproto.repo.putRecord \
93
+
-H "Content-Type: application/json" \
94
+
-d '{"repo":"did:web:test","collection":"app.bsky.feed.post","rkey":"my-post","record":{"text":"Updated!"}}'
95
+
```
96
+
97
+
</details>
98
+
99
+
### Sync & CAR Export
100
+
101
+
<details>
102
+
<summary>Get entire repository as CAR</summary>
103
+
104
+
```bash
105
+
curl "http://localhost:5000/xrpc/com.atproto.sync.getRepo?did=did:web:test" -o repo.car
106
+
```
107
+
108
+
</details>
109
+
110
+
<details>
111
+
<summary>Get specific blocks</summary>
112
+
113
+
```bash
114
+
curl "http://localhost:5000/xrpc/com.atproto.sync.getBlocks?did=did:web:test&cids=<CID1>,<CID2>" -o blocks.car
115
+
```
116
+
117
+
</details>
118
+
119
+
<details>
120
+
<summary>Get a blob by CID</summary>
121
+
122
+
```bash
123
+
curl "http://localhost:5000/xrpc/com.atproto.sync.getBlob?did=did:web:test&cid=<BLOB_CID>"
124
+
```
125
+
126
+
</details>
127
+
128
+
### Firehose (WebSocket)
129
+
130
+
Subscribe to real-time commit events using [websocat](https://github.com/vi/websocat):
131
+
132
+
<details>
133
+
<summary>Open a WebSocket connection</summary>
134
+
135
+
```bash
136
+
websocat ws://localhost:5000/xrpc/com.atproto.sync.subscribeRepos
137
+
```
138
+
139
+
</details>
140
+
141
+
<br />
142
+
Then create/update records in another terminal to see CBOR-encoded commit events stream in real-time.
143
+
144
+
<br />
145
+
146
+
<details>
147
+
<summary>Open a WebSocket connection with cursor for resumption</summary>
148
+
149
+
```bash
150
+
websocat "ws://localhost:5000/xrpc/com.atproto.sync.subscribeRepos?cursor=5"
151
+
```
152
+
153
+
</details>
154
+
63
155
## Architecture
64
156
65
-
### App (Giraffe)
157
+
<details>
158
+
<summary>App (Giraffe)</summary>
66
159
67
160
- `XrpcRouter`: `/xrpc/<NSID>` routing
68
161
- `Auth`: Session management (JWTs)
69
162
- `RepoApi`: Write/Read records (`putRecord`, `getRecord`)
70
163
- `ServerApi`: Server meta (`describeServer`)
71
164
72
-
### Core (Pure F#)
165
+
</details>
166
+
167
+
<details>
168
+
<summary>Core (Pure F#)</summary>
73
169
74
170
- `DidResolver`: Identity resolution
75
171
- `RepoEngine`: MST, DAG-CBOR, CIDs, Blocks
76
172
- `Models`: Data types for XRPC/Database
77
173
78
-
### Infra
174
+
</details>
175
+
176
+
<details>
177
+
<summary>Infra</summary>
79
178
80
179
- SQLite/Postgres for persistence
81
180
- S3/Disk for blob storage
181
+
182
+
</details>
+59
-44
roadmap.txt
+59
-44
roadmap.txt
···
11
11
--------------------------------------------------------------------------------
12
12
Milestone B: Identity + Crypto Primitives
13
13
--------------------------------------------------------------------------------
14
-
- DID document fetch/parse for signing key and PDS endpoint
15
-
- SHA-256 hashing, ECDSA sign/verify (p256 + k256), low-S enforcement
14
+
- [x] DID document fetch/parse for signing key and PDS endpoint
15
+
- [x] SHA-256 hashing, ECDSA sign/verify (p256 + k256), low-S enforcement
16
16
DoD: Sign and verify atproto commit hash with low-S
17
17
--------------------------------------------------------------------------------
18
18
Milestone C: DAG-CBOR + CID
19
19
--------------------------------------------------------------------------------
20
-
- Canonical DAG-CBOR encode/decode with IPLD link tagging
21
-
- CID creation/parsing (multicodec dag-cbor, sha2-256)
20
+
- [x] Canonical DAG-CBOR encode/decode with IPLD link tagging
21
+
- [x] CID creation/parsing (multicodec dag-cbor, sha2-256)
22
22
DoD: Record JSON → stable DAG-CBOR bytes → deterministic CID
23
23
--------------------------------------------------------------------------------
24
24
Milestone D: MST Implementation
25
25
--------------------------------------------------------------------------------
26
-
- Merkle Search Tree per repository spec
27
-
- Key depth = leading zero bits in SHA-256(key) counted in 2-bit chunks
28
-
- Node encoding: (l, e[p,k,v,t]) with key prefix compression
26
+
- [x] Merkle Search Tree per repository spec
27
+
- [x] Key depth = leading zero bits in SHA-256(key) counted in 2-bit chunks
28
+
- [x] Node encoding: (l, e[p,k,v,t]) with key prefix compression
29
29
DoD: Insert/update/delete yields reproducible root CID
30
30
--------------------------------------------------------------------------------
31
31
Milestone E: Commit + BlockStore + putRecord
32
32
--------------------------------------------------------------------------------
33
-
- BlockStore: cid → bytes, indexed by DID/rev/head
34
-
- Commit signing: UnsignedCommit → DAG-CBOR → sha256 → ECDSA sign
35
-
- Implement com.atproto.repo.putRecord/createRecord
33
+
- [x] BlockStore: cid → bytes, indexed by DID/rev/head
34
+
- [x] Commit signing: UnsignedCommit → DAG-CBOR → sha256 → ECDSA sign
35
+
- [x] Implement com.atproto.repo.putRecord/createRecord
36
36
DoD: Write and read records by path/AT-URI
37
37
--------------------------------------------------------------------------------
38
38
Milestone F: CAR Export + Sync Endpoints
39
39
--------------------------------------------------------------------------------
40
-
- CARv1 writer (roots = commit CID, blocks stream)
41
-
- Implement: sync.getRepo, sync.getBlocks, sync.getBlob
40
+
- [x] CARv1 writer (roots = commit CID, blocks stream)
41
+
- [x] Implement: sync.getRepo, sync.getBlocks, sync.getBlob
42
42
DoD: External services can fetch repo snapshot + blocks
43
43
--------------------------------------------------------------------------------
44
44
Milestone G: subscribeRepos Firehose
45
45
--------------------------------------------------------------------------------
46
-
- Monotonic sequence number + commit event generation
47
-
- WebSocket streaming for subscribeRepos
46
+
- [x] Monotonic sequence number + commit event generation
47
+
- [x] WebSocket streaming for subscribeRepos
48
48
DoD: Relay/client receives commit events after writes
49
49
--------------------------------------------------------------------------------
50
50
Milestone H: Account + Sessions
51
51
--------------------------------------------------------------------------------
52
-
- Implement: server.createAccount, server.createSession, refreshSession
53
-
- Password/app-password hashing + JWT issuance
52
+
- [x] Implement: server.createAccount, server.createSession, refreshSession
53
+
- [x] Password/app-password hashing + JWT issuance
54
54
DoD: Authenticate and write records with accessJwt
55
55
--------------------------------------------------------------------------------
56
56
Milestone I: Lexicon Validation + Conformance
57
57
--------------------------------------------------------------------------------
58
-
- Lexicon validation for writes (app.bsky.* records)
59
-
- Conformance testing: diff CIDs/CARs/signatures vs reference PDS
58
+
- [x] Lexicon validation for writes (app.bsky.* records)
59
+
- [x] Conformance testing: diff CIDs/CARs/signatures vs reference PDS
60
60
DoD: Same inputs → same outputs for repo/sync surfaces
61
+
--------------------------------------------------------------------------------
62
+
Milestone J: Storage Backend Configuration
63
+
--------------------------------------------------------------------------------
64
+
- [x] Configure SQLite WAL mode (PDS_SQLITE_DISABLE_WAL_AUTO_CHECKPOINT=true)
65
+
- [x] Implement S3-compatible blobstore adapter (optional via config)
66
+
- [x] Configure disk-based vs S3-based blob storage selection
67
+
DoD: PDS runs with S3 blobs (if configured) and SQLite passes Litestream checks
68
+
--------------------------------------------------------------------------------
69
+
Milestone K: Backup Automation + Guardrails
70
+
--------------------------------------------------------------------------------
71
+
- [ ] Implement BackupOps module (scheduler/cron logic)
72
+
- [ ] Automated backup jobs:
73
+
- [ ] Databases (Litestream or raw copy) + /pds/actors backup
74
+
- [ ] Local disk blobs (if applicable)
75
+
- [ ] Guardrails & Monitoring:
76
+
- [x] Uptime check endpoint: /xrpc/_health with JSON status
77
+
- [x] Alerts: "Latest backup" too old, Disk pressure > 90%
78
+
- [ ] Log retention policies
79
+
DoD:
80
+
- Backups run automatically and report status
81
+
- Health checks indicate system state
82
+
- Restore drill: Restore backups onto a fresh host passes verification
83
+
- Backup set is explicitly documented
61
84
================================================================================
62
85
PHASE 2: DEPLOYMENT (Self-Host)
63
86
================================================================================
64
-
Milestone J: Topology + Domain Planning
87
+
Milestone L: Topology + Domain Planning
65
88
--------------------------------------------------------------------------------
66
89
- Choose PDS hostname (pds.example.com) vs handle domain (example.com)
67
90
- Obtain domain, DNS access, VPS with static IP, reverse proxy
68
91
DoD: Clear plan for PDS location, handle, and DID resolution
69
92
--------------------------------------------------------------------------------
70
-
Milestone K: DNS + TLS + Reverse Proxy
93
+
Milestone M: DNS + TLS + Reverse Proxy
71
94
--------------------------------------------------------------------------------
72
95
- DNS A/AAAA records for PDS hostname
73
-
- TLS certs (ACME) via Caddy/Nginx/Traefik
96
+
- TLS certs (ACME) via Caddy
74
97
DoD: https://<pds-hostname> responds with valid cert
75
98
--------------------------------------------------------------------------------
76
-
Milestone L: Deploy PDSharp
99
+
Milestone N: Deploy PDSharp
77
100
--------------------------------------------------------------------------------
78
101
- Deploy built PDS with persistence (SQLite/Postgres + blob storage)
79
102
- Verify /xrpc/com.atproto.server.describeServer
80
103
DoD: describeServer returns capabilities payload
81
104
--------------------------------------------------------------------------------
82
-
Milestone M: Account Creation
105
+
Milestone O: Account Creation
83
106
--------------------------------------------------------------------------------
84
107
- Create account using admin tooling
85
108
- Verify authentication: createSession
86
109
DoD: Obtain session and perform authenticated write
87
110
--------------------------------------------------------------------------------
88
-
Milestone N: Smoke Test Repo + Blobs
111
+
Milestone P: Smoke Test Repo + Blobs
89
112
--------------------------------------------------------------------------------
90
113
- Write record via putRecord
91
114
- Upload blob, verify retrieval via sync.getBlob
92
115
DoD: Posts appear in clients, media loads reliably
93
116
--------------------------------------------------------------------------------
94
-
Milestone O: Account Migration (Optional)
117
+
Milestone Q: Account Migration
95
118
--------------------------------------------------------------------------------
96
119
- Export/import from bsky.social
97
120
- Update DID service endpoint
98
121
- Verify handle/DID resolution
99
122
DoD: Handle unchanged, DID points to your PDS
100
123
--------------------------------------------------------------------------------
101
-
Milestone P: Reliability
102
-
--------------------------------------------------------------------------------
103
-
- Backups: repo storage + database + blobs
104
-
- Restore drill on fresh instance
105
-
- Monitoring: uptime checks for describeServer + getBlob
106
-
DoD: Restore from backup passes smoke tests
107
-
--------------------------------------------------------------------------------
108
-
Milestone Q: Updates + Security
124
+
Milestone R: Updates + Security
109
125
--------------------------------------------------------------------------------
110
126
- Update cadence with rollback plan
111
127
- Rate limits and access controls at proxy
112
-
- Log retention and disk growth alerts
113
128
DoD: Update smoothly, maintain stable federation
114
129
================================================================================
115
130
QUICK CHECKLIST
116
131
================================================================================
117
-
[ ] describeServer endpoint working
118
-
[ ] Crypto primitives (sha256, ECDSA p256/k256, low-S)
119
-
[ ] DAG-CBOR + CID generation correct
120
-
[ ] MST producing deterministic root CIDs
121
-
[ ] putRecord + blockstore operational
122
-
[ ] CAR export + sync endpoints
123
-
[ ] subscribeRepos firehose
124
-
[ ] Authentication (createAccount, createSession)
125
-
[ ] Lexicon validation
132
+
[x] describeServer endpoint working
133
+
[x] Crypto primitives (sha256, ECDSA p256/k256, low-S)
134
+
[x] DAG-CBOR + CID generation correct
135
+
[x] MST producing deterministic root CIDs
136
+
[x] putRecord + blockstore operational
137
+
[x] CAR export + sync endpoints
138
+
[x] subscribeRepos firehose
139
+
[x] Authentication (createAccount, createSession)
140
+
[x] Lexicon validation
126
141
[ ] Domain + TLS configured
127
142
[ ] PDS deployed and reachable
128
143
[ ] Account created, session works
129
144
[ ] Writes + blobs verified
130
-
[ ] Backups + monitoring in place
145
+
[/] Backups + monitoring in place (health endpoint done, backup automation pending)
131
146
================================================================================
132
147
REFERENCES
133
148
================================================================================