1package main
2
3import (
4 "encoding/json"
5 "fmt"
6 "io"
7 "os"
8 "strings"
9
10 "github.com/bluesky-social/indigo/api/agnostic"
11 "github.com/bluesky-social/indigo/atproto/data"
12 "github.com/bluesky-social/indigo/atproto/identity"
13 "github.com/bluesky-social/indigo/atproto/lexicon"
14 "github.com/bluesky-social/indigo/atproto/syntax"
15 "github.com/bluesky-social/indigo/xrpc"
16
17 "github.com/urfave/cli/v2"
18)
19
20var cmdLex = &cli.Command{
21 Name: "lex",
22 Usage: "sub-commands for Lexicons",
23 Flags: []cli.Flag{},
24 Subcommands: []*cli.Command{
25 &cli.Command{
26 Name: "resolve",
27 Usage: "lookup a schema for an NSID",
28 ArgsUsage: `<nsid>`,
29 Flags: []cli.Flag{
30 &cli.BoolFlag{
31 Name: "did",
32 Usage: "just resolve to DID, not the schema itself",
33 },
34 },
35 Action: runLexResolve,
36 },
37 &cli.Command{
38 Name: "parse",
39 Usage: "parse and validate Lexicon schema files",
40 ArgsUsage: `<path>+`,
41 Flags: []cli.Flag{},
42 Action: runLexParse,
43 },
44 &cli.Command{
45 Name: "publish",
46 Usage: "add schema JSON files to atproto repo",
47 ArgsUsage: `<path>+`,
48 Flags: []cli.Flag{},
49 Action: runLexPublish,
50 },
51 &cli.Command{
52 Name: "ls",
53 Aliases: []string{"list"},
54 Usage: "list all known Lexicon NSIDs at the same level of hierarchy",
55 ArgsUsage: `<nsid>`,
56 Flags: []cli.Flag{},
57 Action: runLexList,
58 },
59 &cli.Command{
60 Name: "validate",
61 Usage: "validate a record, either AT-URI or local file",
62 ArgsUsage: `<uri-or-path>`,
63 Flags: []cli.Flag{
64 &cli.BoolFlag{
65 Name: "allow-legacy-blob",
66 Usage: "be permissive of legacy blobs",
67 },
68 &cli.StringFlag{
69 Name: "catalog",
70 Aliases: []string{"c"},
71 Usage: "path to directory of Lexicon files",
72 },
73 },
74 Action: runLexValidate,
75 },
76 },
77}
78
79func loadSchemaFile(p string) (map[string]any, error) {
80 f, err := os.Open(p)
81 if err != nil {
82 return nil, err
83 }
84 defer func() { _ = f.Close() }()
85 b, err := io.ReadAll(f)
86 if err != nil {
87 return nil, err
88 }
89
90 // verify format
91 var sf lexicon.SchemaFile
92 if err := json.Unmarshal(b, &sf); err != nil {
93 return nil, err
94 }
95 // TODO: additional validation?
96
97 // parse as raw data
98 d, err := data.UnmarshalJSON(b)
99 if err != nil {
100 return nil, err
101 }
102 return d, nil
103}
104
105func runLexParse(cctx *cli.Context) error {
106 if cctx.Args().Len() <= 0 {
107 return fmt.Errorf("require at least one path to parse")
108 }
109 for _, path := range cctx.Args().Slice() {
110 _, err := loadSchemaFile(path)
111 if err != nil {
112 return fmt.Errorf("failed to parse %s: %w", path, err)
113 }
114 fmt.Printf("%s: success\n", path)
115 }
116 return nil
117}
118
119func runLexPublish(cctx *cli.Context) error {
120 if cctx.Args().Len() <= 0 {
121 return fmt.Errorf("require at least one path to publish")
122 }
123
124 ctx := cctx.Context
125 xrpcc, err := loadAuthClient(ctx)
126 if err == ErrNoAuthSession {
127 return fmt.Errorf("auth required, but not logged in")
128 } else if err != nil {
129 return err
130 }
131
132 validateFlag := false
133
134 for _, path := range cctx.Args().Slice() {
135 recordVal, err := loadSchemaFile(path)
136 if err != nil {
137 return fmt.Errorf("failed to parse %s: %w", path, err)
138 }
139
140 recordVal["$type"] = "com.atproto.lexicon.schema"
141 val, ok := recordVal["id"]
142 if !ok {
143 return fmt.Errorf("missing NSID in Lexicon schema")
144 }
145 rawNSID, ok := val.(string)
146 if !ok {
147 return fmt.Errorf("missing NSID in Lexicon schema")
148 }
149 nsid, err := syntax.ParseNSID(rawNSID)
150 if err != nil {
151 return err
152 }
153 nsidStr := nsid.String()
154
155 resp, err := agnostic.RepoPutRecord(ctx, xrpcc, &agnostic.RepoPutRecord_Input{
156 Collection: "com.atproto.lexicon.schema",
157 Repo: xrpcc.Auth.Did,
158 Record: recordVal,
159 Rkey: nsidStr,
160 Validate: &validateFlag,
161 })
162 if err != nil {
163 return err
164 }
165
166 fmt.Printf("%s\t%s\n", resp.Uri, resp.Cid)
167 }
168 return nil
169}
170
171func runLexResolve(cctx *cli.Context) error {
172 ctx := cctx.Context
173 raw := cctx.Args().First()
174 if raw == "" {
175 return fmt.Errorf("NSID argument is required")
176 }
177
178 // TODO: handle fragments
179 nsid, err := syntax.ParseNSID(raw)
180 if err != nil {
181 return err
182 }
183
184 dir := identity.BaseDirectory{}
185 if cctx.Bool("did") {
186 did, err := dir.ResolveNSID(ctx, nsid)
187 if err != nil {
188 return err
189 }
190 fmt.Println(did)
191 return nil
192 }
193
194 data, err := lexicon.ResolveLexiconData(ctx, &dir, nsid)
195 if err != nil {
196 return err
197 }
198
199 b, err := json.MarshalIndent(data, "", " ")
200 if err != nil {
201 return err
202 }
203 fmt.Println(string(b))
204
205 return nil
206}
207
208func runLexList(cctx *cli.Context) error {
209 ctx := cctx.Context
210 raw := cctx.Args().First()
211 if raw == "" {
212 return fmt.Errorf("NSID argument is required")
213 }
214
215 // TODO: handle fragments?
216 nsid, err := syntax.ParseNSID(raw)
217 if err != nil {
218 return err
219 }
220 authority := nsid.Authority()
221
222 dir := identity.BaseDirectory{}
223 did, err := dir.ResolveNSID(ctx, nsid)
224 if err != nil {
225 return err
226 }
227
228 ident, err := dir.LookupDID(ctx, did)
229 if err != nil {
230 return err
231 }
232
233 // create a new API client to connect to the account's PDS
234 xrpcc := xrpc.Client{
235 Host: ident.PDSEndpoint(),
236 UserAgent: userAgent(),
237 }
238 if xrpcc.Host == "" {
239 return fmt.Errorf("no PDS endpoint for identity")
240 }
241
242 // iterate through all records in the lexicon schema collection, and check if prefix ("authority") matches that of the original NSID
243 // NOTE: much of this code is copied from runRecordList
244 cursor := ""
245 for {
246 // collection string, cursor string, limit int64, repo string, reverse bool
247 resp, err := agnostic.RepoListRecords(ctx, &xrpcc, "com.atproto.lexicon.schema", cursor, 100, ident.DID.String(), false)
248 if err != nil {
249 return err
250 }
251 for _, rec := range resp.Records {
252 aturi, err := syntax.ParseATURI(rec.Uri)
253 if err != nil {
254 return err
255 }
256 schemaNSID, err := syntax.ParseNSID(aturi.RecordKey().String())
257 if err != nil {
258 continue
259 }
260 if schemaNSID.Authority() == authority {
261 fmt.Println(schemaNSID)
262 }
263 }
264 if resp.Cursor != nil && *resp.Cursor != "" {
265 cursor = *resp.Cursor
266 } else {
267 break
268 }
269 }
270
271 return nil
272}
273
274func runLexValidate(cctx *cli.Context) error {
275 ctx := cctx.Context
276 ref := cctx.Args().First()
277 if ref == "" {
278 return fmt.Errorf("URI or file path argument is required")
279 }
280
281 var nsid syntax.NSID
282 var recordData map[string]any
283 dir := identity.BaseDirectory{}
284 cat := lexicon.NewResolvingCatalog()
285
286 var flags lexicon.ValidateFlags = 0
287 if cctx.Bool("allow-legacy-blob") {
288 flags |= lexicon.AllowLegacyBlob
289 }
290
291 if cctx.String("catalog") != "" {
292 fmt.Printf("loading catalog directory: %s\n", cctx.String("catalog"))
293 if err := cat.Base.LoadDirectory(cctx.String("catalog")); err != nil {
294 return err
295 }
296 }
297
298 // fetch from network if an AT-URI
299 if strings.HasPrefix(ref, "at://") {
300 aturi, err := syntax.ParseATURI(ref)
301 if err != nil {
302 return err
303 }
304 nsid = aturi.Collection()
305
306 ident, err := dir.Lookup(ctx, aturi.Authority())
307 if err != nil {
308 return err
309 }
310
311 recordData, err = fetchRecord(ctx, *ident, aturi)
312 if err != nil {
313 return err
314 }
315 } else {
316 // otherwise try to read from disk
317 recordBytes, err := os.ReadFile(ref)
318 if err != nil {
319 return err
320 }
321
322 rawNSID, err := data.ExtractTypeJSON(recordBytes)
323 if err != nil {
324 return err
325 }
326 nsid, err = syntax.ParseNSID(rawNSID)
327 if err != nil {
328 return err
329 }
330
331 recordData, err = data.UnmarshalJSON(recordBytes)
332 if err != nil {
333 return err
334 }
335 }
336
337 if err := lexicon.ValidateRecord(&cat, recordData, nsid.String(), flags); err != nil {
338 return err
339 }
340 fmt.Printf("valid %s record\n", nsid)
341 return nil
342}