Compare changes

Choose any two refs to compare.

+9949 -10383
+9 -7
api/tangled/actorprofile.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.actor.profile 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.actor.profile 6 6 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.actor.profile", &ActorProfile{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.actor.profile", &ActorProfile{}) 17 + } // 18 + // RECORDTYPE: ActorProfile 19 19 type ActorProfile struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.actor.profile"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.actor.profile" cborgen:"$type,const=sh.tangled.actor.profile"` 21 + // avatar: Small image to be displayed next to posts from account. AKA, 'profile picture' 22 + Avatar *util.LexBlob `json:"avatar,omitempty" cborgen:"avatar,omitempty"` 21 23 // bluesky: Include link to this account on Bluesky. 22 24 Bluesky bool `json:"bluesky" cborgen:"bluesky"` 23 25 // description: Free-form profile description text.
+44 -912
api/tangled/cbor_gen.go
··· 26 26 } 27 27 28 28 cw := cbg.NewCborWriter(w) 29 - fieldCount := 8 29 + fieldCount := 9 30 + 31 + if t.Avatar == nil { 32 + fieldCount-- 33 + } 30 34 31 35 if t.Description == nil { 32 36 fieldCount-- ··· 144 148 return err 145 149 } 146 150 151 + } 152 + } 153 + 154 + // t.Avatar (util.LexBlob) (struct) 155 + if t.Avatar != nil { 156 + 157 + if len("avatar") > 1000000 { 158 + return xerrors.Errorf("Value in field \"avatar\" was too long") 159 + } 160 + 161 + if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("avatar"))); err != nil { 162 + return err 163 + } 164 + if _, err := cw.WriteString(string("avatar")); err != nil { 165 + return err 166 + } 167 + 168 + if err := t.Avatar.MarshalCBOR(cw); err != nil { 169 + return err 147 170 } 148 171 } 149 172 ··· 428 451 } 429 452 430 453 } 454 + } 455 + // t.Avatar (util.LexBlob) (struct) 456 + case "avatar": 457 + 458 + { 459 + 460 + b, err := cr.ReadByte() 461 + if err != nil { 462 + return err 463 + } 464 + if b != cbg.CborNull[0] { 465 + if err := cr.UnreadByte(); err != nil { 466 + return err 467 + } 468 + t.Avatar = new(util.LexBlob) 469 + if err := t.Avatar.UnmarshalCBOR(cr); err != nil { 470 + return xerrors.Errorf("unmarshaling t.Avatar pointer: %w", err) 471 + } 472 + } 473 + 431 474 } 432 475 // t.Bluesky (bool) (bool) 433 476 case "bluesky": ··· 9890 9933 9891 9934 return nil 9892 9935 } 9893 - func (t *CiEvent) MarshalCBOR(w io.Writer) error { 9894 - if t == nil { 9895 - _, err := w.Write(cbg.CborNull) 9896 - return err 9897 - } 9898 - 9899 - cw := cbg.NewCborWriter(w) 9900 - 9901 - if _, err := cw.Write([]byte{161}); err != nil { 9902 - return err 9903 - } 9904 - 9905 - // t.Meta (tangled.CiEvent_Meta) (struct) 9906 - if len("meta") > 1000000 { 9907 - return xerrors.Errorf("Value in field \"meta\" was too long") 9908 - } 9909 - 9910 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("meta"))); err != nil { 9911 - return err 9912 - } 9913 - if _, err := cw.WriteString(string("meta")); err != nil { 9914 - return err 9915 - } 9916 - 9917 - if err := t.Meta.MarshalCBOR(cw); err != nil { 9918 - return err 9919 - } 9920 - return nil 9921 - } 9922 - 9923 - func (t *CiEvent) UnmarshalCBOR(r io.Reader) (err error) { 9924 - *t = CiEvent{} 9925 - 9926 - cr := cbg.NewCborReader(r) 9927 - 9928 - maj, extra, err := cr.ReadHeader() 9929 - if err != nil { 9930 - return err 9931 - } 9932 - defer func() { 9933 - if err == io.EOF { 9934 - err = io.ErrUnexpectedEOF 9935 - } 9936 - }() 9937 - 9938 - if maj != cbg.MajMap { 9939 - return fmt.Errorf("cbor input should be of type map") 9940 - } 9941 - 9942 - if extra > cbg.MaxLength { 9943 - return fmt.Errorf("CiEvent: map struct too large (%d)", extra) 9944 - } 9945 - 9946 - n := extra 9947 - 9948 - nameBuf := make([]byte, 4) 9949 - for i := uint64(0); i < n; i++ { 9950 - nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000) 9951 - if err != nil { 9952 - return err 9953 - } 9954 - 9955 - if !ok { 9956 - // Field doesn't exist on this type, so ignore it 9957 - if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil { 9958 - return err 9959 - } 9960 - continue 9961 - } 9962 - 9963 - switch string(nameBuf[:nameLen]) { 9964 - // t.Meta (tangled.CiEvent_Meta) (struct) 9965 - case "meta": 9966 - 9967 - { 9968 - 9969 - b, err := cr.ReadByte() 9970 - if err != nil { 9971 - return err 9972 - } 9973 - if b != cbg.CborNull[0] { 9974 - if err := cr.UnreadByte(); err != nil { 9975 - return err 9976 - } 9977 - t.Meta = new(CiEvent_Meta) 9978 - if err := t.Meta.UnmarshalCBOR(cr); err != nil { 9979 - return xerrors.Errorf("unmarshaling t.Meta pointer: %w", err) 9980 - } 9981 - } 9982 - 9983 - } 9984 - 9985 - default: 9986 - // Field doesn't exist on this type, so ignore it 9987 - if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil { 9988 - return err 9989 - } 9990 - } 9991 - } 9992 - 9993 - return nil 9994 - } 9995 - func (t *CiEvent_PullRequest) MarshalCBOR(w io.Writer) error { 9996 - if t == nil { 9997 - _, err := w.Write(cbg.CborNull) 9998 - return err 9999 - } 10000 - 10001 - cw := cbg.NewCborWriter(w) 10002 - 10003 - if _, err := cw.Write([]byte{161}); err != nil { 10004 - return err 10005 - } 10006 - 10007 - // t.LexiconTypeID (string) (string) 10008 - if len("$type") > 1000000 { 10009 - return xerrors.Errorf("Value in field \"$type\" was too long") 10010 - } 10011 - 10012 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("$type"))); err != nil { 10013 - return err 10014 - } 10015 - if _, err := cw.WriteString(string("$type")); err != nil { 10016 - return err 10017 - } 10018 - 10019 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("sh.tangled.ci.event#pullRequest"))); err != nil { 10020 - return err 10021 - } 10022 - if _, err := cw.WriteString(string("sh.tangled.ci.event#pullRequest")); err != nil { 10023 - return err 10024 - } 10025 - return nil 10026 - } 10027 - 10028 - func (t *CiEvent_PullRequest) UnmarshalCBOR(r io.Reader) (err error) { 10029 - *t = CiEvent_PullRequest{} 10030 - 10031 - cr := cbg.NewCborReader(r) 10032 - 10033 - maj, extra, err := cr.ReadHeader() 10034 - if err != nil { 10035 - return err 10036 - } 10037 - defer func() { 10038 - if err == io.EOF { 10039 - err = io.ErrUnexpectedEOF 10040 - } 10041 - }() 10042 - 10043 - if maj != cbg.MajMap { 10044 - return fmt.Errorf("cbor input should be of type map") 10045 - } 10046 - 10047 - if extra > cbg.MaxLength { 10048 - return fmt.Errorf("CiEvent_PullRequest: map struct too large (%d)", extra) 10049 - } 10050 - 10051 - n := extra 10052 - 10053 - nameBuf := make([]byte, 5) 10054 - for i := uint64(0); i < n; i++ { 10055 - nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000) 10056 - if err != nil { 10057 - return err 10058 - } 10059 - 10060 - if !ok { 10061 - // Field doesn't exist on this type, so ignore it 10062 - if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil { 10063 - return err 10064 - } 10065 - continue 10066 - } 10067 - 10068 - switch string(nameBuf[:nameLen]) { 10069 - // t.LexiconTypeID (string) (string) 10070 - case "$type": 10071 - 10072 - { 10073 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10074 - if err != nil { 10075 - return err 10076 - } 10077 - 10078 - t.LexiconTypeID = string(sval) 10079 - } 10080 - 10081 - default: 10082 - // Field doesn't exist on this type, so ignore it 10083 - if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil { 10084 - return err 10085 - } 10086 - } 10087 - } 10088 - 10089 - return nil 10090 - } 10091 - func (t *CiEvent_Push) MarshalCBOR(w io.Writer) error { 10092 - if t == nil { 10093 - _, err := w.Write(cbg.CborNull) 10094 - return err 10095 - } 10096 - 10097 - cw := cbg.NewCborWriter(w) 10098 - 10099 - if _, err := cw.Write([]byte{164}); err != nil { 10100 - return err 10101 - } 10102 - 10103 - // t.Ref (string) (string) 10104 - if len("ref") > 1000000 { 10105 - return xerrors.Errorf("Value in field \"ref\" was too long") 10106 - } 10107 - 10108 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("ref"))); err != nil { 10109 - return err 10110 - } 10111 - if _, err := cw.WriteString(string("ref")); err != nil { 10112 - return err 10113 - } 10114 - 10115 - if len(t.Ref) > 1000000 { 10116 - return xerrors.Errorf("Value in field t.Ref was too long") 10117 - } 10118 - 10119 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Ref))); err != nil { 10120 - return err 10121 - } 10122 - if _, err := cw.WriteString(string(t.Ref)); err != nil { 10123 - return err 10124 - } 10125 - 10126 - // t.LexiconTypeID (string) (string) 10127 - if len("$type") > 1000000 { 10128 - return xerrors.Errorf("Value in field \"$type\" was too long") 10129 - } 10130 - 10131 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("$type"))); err != nil { 10132 - return err 10133 - } 10134 - if _, err := cw.WriteString(string("$type")); err != nil { 10135 - return err 10136 - } 10137 - 10138 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("sh.tangled.ci.event#push"))); err != nil { 10139 - return err 10140 - } 10141 - if _, err := cw.WriteString(string("sh.tangled.ci.event#push")); err != nil { 10142 - return err 10143 - } 10144 - 10145 - // t.NewSha (string) (string) 10146 - if len("newSha") > 1000000 { 10147 - return xerrors.Errorf("Value in field \"newSha\" was too long") 10148 - } 10149 - 10150 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("newSha"))); err != nil { 10151 - return err 10152 - } 10153 - if _, err := cw.WriteString(string("newSha")); err != nil { 10154 - return err 10155 - } 10156 - 10157 - if len(t.NewSha) > 1000000 { 10158 - return xerrors.Errorf("Value in field t.NewSha was too long") 10159 - } 10160 - 10161 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.NewSha))); err != nil { 10162 - return err 10163 - } 10164 - if _, err := cw.WriteString(string(t.NewSha)); err != nil { 10165 - return err 10166 - } 10167 - 10168 - // t.OldSha (string) (string) 10169 - if len("oldSha") > 1000000 { 10170 - return xerrors.Errorf("Value in field \"oldSha\" was too long") 10171 - } 10172 - 10173 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("oldSha"))); err != nil { 10174 - return err 10175 - } 10176 - if _, err := cw.WriteString(string("oldSha")); err != nil { 10177 - return err 10178 - } 10179 - 10180 - if len(t.OldSha) > 1000000 { 10181 - return xerrors.Errorf("Value in field t.OldSha was too long") 10182 - } 10183 - 10184 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.OldSha))); err != nil { 10185 - return err 10186 - } 10187 - if _, err := cw.WriteString(string(t.OldSha)); err != nil { 10188 - return err 10189 - } 10190 - return nil 10191 - } 10192 - 10193 - func (t *CiEvent_Push) UnmarshalCBOR(r io.Reader) (err error) { 10194 - *t = CiEvent_Push{} 10195 - 10196 - cr := cbg.NewCborReader(r) 10197 - 10198 - maj, extra, err := cr.ReadHeader() 10199 - if err != nil { 10200 - return err 10201 - } 10202 - defer func() { 10203 - if err == io.EOF { 10204 - err = io.ErrUnexpectedEOF 10205 - } 10206 - }() 10207 - 10208 - if maj != cbg.MajMap { 10209 - return fmt.Errorf("cbor input should be of type map") 10210 - } 10211 - 10212 - if extra > cbg.MaxLength { 10213 - return fmt.Errorf("CiEvent_Push: map struct too large (%d)", extra) 10214 - } 10215 - 10216 - n := extra 10217 - 10218 - nameBuf := make([]byte, 6) 10219 - for i := uint64(0); i < n; i++ { 10220 - nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000) 10221 - if err != nil { 10222 - return err 10223 - } 10224 - 10225 - if !ok { 10226 - // Field doesn't exist on this type, so ignore it 10227 - if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil { 10228 - return err 10229 - } 10230 - continue 10231 - } 10232 - 10233 - switch string(nameBuf[:nameLen]) { 10234 - // t.Ref (string) (string) 10235 - case "ref": 10236 - 10237 - { 10238 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10239 - if err != nil { 10240 - return err 10241 - } 10242 - 10243 - t.Ref = string(sval) 10244 - } 10245 - // t.LexiconTypeID (string) (string) 10246 - case "$type": 10247 - 10248 - { 10249 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10250 - if err != nil { 10251 - return err 10252 - } 10253 - 10254 - t.LexiconTypeID = string(sval) 10255 - } 10256 - // t.NewSha (string) (string) 10257 - case "newSha": 10258 - 10259 - { 10260 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10261 - if err != nil { 10262 - return err 10263 - } 10264 - 10265 - t.NewSha = string(sval) 10266 - } 10267 - // t.OldSha (string) (string) 10268 - case "oldSha": 10269 - 10270 - { 10271 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10272 - if err != nil { 10273 - return err 10274 - } 10275 - 10276 - t.OldSha = string(sval) 10277 - } 10278 - 10279 - default: 10280 - // Field doesn't exist on this type, so ignore it 10281 - if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil { 10282 - return err 10283 - } 10284 - } 10285 - } 10286 - 10287 - return nil 10288 - } 10289 - func (t *CiEvent_Manual) MarshalCBOR(w io.Writer) error { 10290 - if t == nil { 10291 - _, err := w.Write(cbg.CborNull) 10292 - return err 10293 - } 10294 - 10295 - cw := cbg.NewCborWriter(w) 10296 - 10297 - if _, err := cw.Write([]byte{161}); err != nil { 10298 - return err 10299 - } 10300 - 10301 - // t.LexiconTypeID (string) (string) 10302 - if len("$type") > 1000000 { 10303 - return xerrors.Errorf("Value in field \"$type\" was too long") 10304 - } 10305 - 10306 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("$type"))); err != nil { 10307 - return err 10308 - } 10309 - if _, err := cw.WriteString(string("$type")); err != nil { 10310 - return err 10311 - } 10312 - 10313 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("sh.tangled.ci.event#manual"))); err != nil { 10314 - return err 10315 - } 10316 - if _, err := cw.WriteString(string("sh.tangled.ci.event#manual")); err != nil { 10317 - return err 10318 - } 10319 - return nil 10320 - } 10321 - 10322 - func (t *CiEvent_Manual) UnmarshalCBOR(r io.Reader) (err error) { 10323 - *t = CiEvent_Manual{} 10324 - 10325 - cr := cbg.NewCborReader(r) 10326 - 10327 - maj, extra, err := cr.ReadHeader() 10328 - if err != nil { 10329 - return err 10330 - } 10331 - defer func() { 10332 - if err == io.EOF { 10333 - err = io.ErrUnexpectedEOF 10334 - } 10335 - }() 10336 - 10337 - if maj != cbg.MajMap { 10338 - return fmt.Errorf("cbor input should be of type map") 10339 - } 10340 - 10341 - if extra > cbg.MaxLength { 10342 - return fmt.Errorf("CiEvent_Manual: map struct too large (%d)", extra) 10343 - } 10344 - 10345 - n := extra 10346 - 10347 - nameBuf := make([]byte, 5) 10348 - for i := uint64(0); i < n; i++ { 10349 - nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000) 10350 - if err != nil { 10351 - return err 10352 - } 10353 - 10354 - if !ok { 10355 - // Field doesn't exist on this type, so ignore it 10356 - if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil { 10357 - return err 10358 - } 10359 - continue 10360 - } 10361 - 10362 - switch string(nameBuf[:nameLen]) { 10363 - // t.LexiconTypeID (string) (string) 10364 - case "$type": 10365 - 10366 - { 10367 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10368 - if err != nil { 10369 - return err 10370 - } 10371 - 10372 - t.LexiconTypeID = string(sval) 10373 - } 10374 - 10375 - default: 10376 - // Field doesn't exist on this type, so ignore it 10377 - if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil { 10378 - return err 10379 - } 10380 - } 10381 - } 10382 - 10383 - return nil 10384 - } 10385 - func (t *CiPipeline) MarshalCBOR(w io.Writer) error { 10386 - if t == nil { 10387 - _, err := w.Write(cbg.CborNull) 10388 - return err 10389 - } 10390 - 10391 - cw := cbg.NewCborWriter(w) 10392 - 10393 - if _, err := cw.Write([]byte{163}); err != nil { 10394 - return err 10395 - } 10396 - 10397 - // t.LexiconTypeID (string) (string) 10398 - if len("$type") > 1000000 { 10399 - return xerrors.Errorf("Value in field \"$type\" was too long") 10400 - } 10401 - 10402 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("$type"))); err != nil { 10403 - return err 10404 - } 10405 - if _, err := cw.WriteString(string("$type")); err != nil { 10406 - return err 10407 - } 10408 - 10409 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("sh.tangled.ci.pipeline"))); err != nil { 10410 - return err 10411 - } 10412 - if _, err := cw.WriteString(string("sh.tangled.ci.pipeline")); err != nil { 10413 - return err 10414 - } 10415 - 10416 - // t.Event (tangled.CiEvent) (struct) 10417 - if len("event") > 1000000 { 10418 - return xerrors.Errorf("Value in field \"event\" was too long") 10419 - } 10420 - 10421 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("event"))); err != nil { 10422 - return err 10423 - } 10424 - if _, err := cw.WriteString(string("event")); err != nil { 10425 - return err 10426 - } 10427 - 10428 - if err := t.Event.MarshalCBOR(cw); err != nil { 10429 - return err 10430 - } 10431 - 10432 - // t.WorkflowRuns ([]string) (slice) 10433 - if len("workflowRuns") > 1000000 { 10434 - return xerrors.Errorf("Value in field \"workflowRuns\" was too long") 10435 - } 10436 - 10437 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("workflowRuns"))); err != nil { 10438 - return err 10439 - } 10440 - if _, err := cw.WriteString(string("workflowRuns")); err != nil { 10441 - return err 10442 - } 10443 - 10444 - if len(t.WorkflowRuns) > 8192 { 10445 - return xerrors.Errorf("Slice value in field t.WorkflowRuns was too long") 10446 - } 10447 - 10448 - if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.WorkflowRuns))); err != nil { 10449 - return err 10450 - } 10451 - for _, v := range t.WorkflowRuns { 10452 - if len(v) > 1000000 { 10453 - return xerrors.Errorf("Value in field v was too long") 10454 - } 10455 - 10456 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil { 10457 - return err 10458 - } 10459 - if _, err := cw.WriteString(string(v)); err != nil { 10460 - return err 10461 - } 10462 - 10463 - } 10464 - return nil 10465 - } 10466 - 10467 - func (t *CiPipeline) UnmarshalCBOR(r io.Reader) (err error) { 10468 - *t = CiPipeline{} 10469 - 10470 - cr := cbg.NewCborReader(r) 10471 - 10472 - maj, extra, err := cr.ReadHeader() 10473 - if err != nil { 10474 - return err 10475 - } 10476 - defer func() { 10477 - if err == io.EOF { 10478 - err = io.ErrUnexpectedEOF 10479 - } 10480 - }() 10481 - 10482 - if maj != cbg.MajMap { 10483 - return fmt.Errorf("cbor input should be of type map") 10484 - } 10485 - 10486 - if extra > cbg.MaxLength { 10487 - return fmt.Errorf("CiPipeline: map struct too large (%d)", extra) 10488 - } 10489 - 10490 - n := extra 10491 - 10492 - nameBuf := make([]byte, 12) 10493 - for i := uint64(0); i < n; i++ { 10494 - nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000) 10495 - if err != nil { 10496 - return err 10497 - } 10498 - 10499 - if !ok { 10500 - // Field doesn't exist on this type, so ignore it 10501 - if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil { 10502 - return err 10503 - } 10504 - continue 10505 - } 10506 - 10507 - switch string(nameBuf[:nameLen]) { 10508 - // t.LexiconTypeID (string) (string) 10509 - case "$type": 10510 - 10511 - { 10512 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10513 - if err != nil { 10514 - return err 10515 - } 10516 - 10517 - t.LexiconTypeID = string(sval) 10518 - } 10519 - // t.Event (tangled.CiEvent) (struct) 10520 - case "event": 10521 - 10522 - { 10523 - 10524 - b, err := cr.ReadByte() 10525 - if err != nil { 10526 - return err 10527 - } 10528 - if b != cbg.CborNull[0] { 10529 - if err := cr.UnreadByte(); err != nil { 10530 - return err 10531 - } 10532 - t.Event = new(CiEvent) 10533 - if err := t.Event.UnmarshalCBOR(cr); err != nil { 10534 - return xerrors.Errorf("unmarshaling t.Event pointer: %w", err) 10535 - } 10536 - } 10537 - 10538 - } 10539 - // t.WorkflowRuns ([]string) (slice) 10540 - case "workflowRuns": 10541 - 10542 - maj, extra, err = cr.ReadHeader() 10543 - if err != nil { 10544 - return err 10545 - } 10546 - 10547 - if extra > 8192 { 10548 - return fmt.Errorf("t.WorkflowRuns: array too large (%d)", extra) 10549 - } 10550 - 10551 - if maj != cbg.MajArray { 10552 - return fmt.Errorf("expected cbor array") 10553 - } 10554 - 10555 - if extra > 0 { 10556 - t.WorkflowRuns = make([]string, extra) 10557 - } 10558 - 10559 - for i := 0; i < int(extra); i++ { 10560 - { 10561 - var maj byte 10562 - var extra uint64 10563 - var err error 10564 - _ = maj 10565 - _ = extra 10566 - _ = err 10567 - 10568 - { 10569 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10570 - if err != nil { 10571 - return err 10572 - } 10573 - 10574 - t.WorkflowRuns[i] = string(sval) 10575 - } 10576 - 10577 - } 10578 - } 10579 - 10580 - default: 10581 - // Field doesn't exist on this type, so ignore it 10582 - if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil { 10583 - return err 10584 - } 10585 - } 10586 - } 10587 - 10588 - return nil 10589 - } 10590 - func (t *CiWorkflowRun) MarshalCBOR(w io.Writer) error { 10591 - if t == nil { 10592 - _, err := w.Write(cbg.CborNull) 10593 - return err 10594 - } 10595 - 10596 - cw := cbg.NewCborWriter(w) 10597 - 10598 - if _, err := cw.Write([]byte{164}); err != nil { 10599 - return err 10600 - } 10601 - 10602 - // t.Name (string) (string) 10603 - if len("name") > 1000000 { 10604 - return xerrors.Errorf("Value in field \"name\" was too long") 10605 - } 10606 - 10607 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("name"))); err != nil { 10608 - return err 10609 - } 10610 - if _, err := cw.WriteString(string("name")); err != nil { 10611 - return err 10612 - } 10613 - 10614 - if len(t.Name) > 1000000 { 10615 - return xerrors.Errorf("Value in field t.Name was too long") 10616 - } 10617 - 10618 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Name))); err != nil { 10619 - return err 10620 - } 10621 - if _, err := cw.WriteString(string(t.Name)); err != nil { 10622 - return err 10623 - } 10624 - 10625 - // t.LexiconTypeID (string) (string) 10626 - if len("$type") > 1000000 { 10627 - return xerrors.Errorf("Value in field \"$type\" was too long") 10628 - } 10629 - 10630 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("$type"))); err != nil { 10631 - return err 10632 - } 10633 - if _, err := cw.WriteString(string("$type")); err != nil { 10634 - return err 10635 - } 10636 - 10637 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("sh.tangled.ci.workflow.run"))); err != nil { 10638 - return err 10639 - } 10640 - if _, err := cw.WriteString(string("sh.tangled.ci.workflow.run")); err != nil { 10641 - return err 10642 - } 10643 - 10644 - // t.Status (string) (string) 10645 - if len("status") > 1000000 { 10646 - return xerrors.Errorf("Value in field \"status\" was too long") 10647 - } 10648 - 10649 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("status"))); err != nil { 10650 - return err 10651 - } 10652 - if _, err := cw.WriteString(string("status")); err != nil { 10653 - return err 10654 - } 10655 - 10656 - if t.Status == nil { 10657 - if _, err := cw.Write(cbg.CborNull); err != nil { 10658 - return err 10659 - } 10660 - } else { 10661 - if len(*t.Status) > 1000000 { 10662 - return xerrors.Errorf("Value in field t.Status was too long") 10663 - } 10664 - 10665 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(*t.Status))); err != nil { 10666 - return err 10667 - } 10668 - if _, err := cw.WriteString(string(*t.Status)); err != nil { 10669 - return err 10670 - } 10671 - } 10672 - 10673 - // t.Adapter (string) (string) 10674 - if len("adapter") > 1000000 { 10675 - return xerrors.Errorf("Value in field \"adapter\" was too long") 10676 - } 10677 - 10678 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("adapter"))); err != nil { 10679 - return err 10680 - } 10681 - if _, err := cw.WriteString(string("adapter")); err != nil { 10682 - return err 10683 - } 10684 - 10685 - if len(t.Adapter) > 1000000 { 10686 - return xerrors.Errorf("Value in field t.Adapter was too long") 10687 - } 10688 - 10689 - if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Adapter))); err != nil { 10690 - return err 10691 - } 10692 - if _, err := cw.WriteString(string(t.Adapter)); err != nil { 10693 - return err 10694 - } 10695 - return nil 10696 - } 10697 - 10698 - func (t *CiWorkflowRun) UnmarshalCBOR(r io.Reader) (err error) { 10699 - *t = CiWorkflowRun{} 10700 - 10701 - cr := cbg.NewCborReader(r) 10702 - 10703 - maj, extra, err := cr.ReadHeader() 10704 - if err != nil { 10705 - return err 10706 - } 10707 - defer func() { 10708 - if err == io.EOF { 10709 - err = io.ErrUnexpectedEOF 10710 - } 10711 - }() 10712 - 10713 - if maj != cbg.MajMap { 10714 - return fmt.Errorf("cbor input should be of type map") 10715 - } 10716 - 10717 - if extra > cbg.MaxLength { 10718 - return fmt.Errorf("CiWorkflowRun: map struct too large (%d)", extra) 10719 - } 10720 - 10721 - n := extra 10722 - 10723 - nameBuf := make([]byte, 7) 10724 - for i := uint64(0); i < n; i++ { 10725 - nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000) 10726 - if err != nil { 10727 - return err 10728 - } 10729 - 10730 - if !ok { 10731 - // Field doesn't exist on this type, so ignore it 10732 - if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil { 10733 - return err 10734 - } 10735 - continue 10736 - } 10737 - 10738 - switch string(nameBuf[:nameLen]) { 10739 - // t.Name (string) (string) 10740 - case "name": 10741 - 10742 - { 10743 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10744 - if err != nil { 10745 - return err 10746 - } 10747 - 10748 - t.Name = string(sval) 10749 - } 10750 - // t.LexiconTypeID (string) (string) 10751 - case "$type": 10752 - 10753 - { 10754 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10755 - if err != nil { 10756 - return err 10757 - } 10758 - 10759 - t.LexiconTypeID = string(sval) 10760 - } 10761 - // t.Status (string) (string) 10762 - case "status": 10763 - 10764 - { 10765 - b, err := cr.ReadByte() 10766 - if err != nil { 10767 - return err 10768 - } 10769 - if b != cbg.CborNull[0] { 10770 - if err := cr.UnreadByte(); err != nil { 10771 - return err 10772 - } 10773 - 10774 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10775 - if err != nil { 10776 - return err 10777 - } 10778 - 10779 - t.Status = (*string)(&sval) 10780 - } 10781 - } 10782 - // t.Adapter (string) (string) 10783 - case "adapter": 10784 - 10785 - { 10786 - sval, err := cbg.ReadStringWithMax(cr, 1000000) 10787 - if err != nil { 10788 - return err 10789 - } 10790 - 10791 - t.Adapter = string(sval) 10792 - } 10793 - 10794 - default: 10795 - // Field doesn't exist on this type, so ignore it 10796 - if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil { 10797 - return err 10798 - } 10799 - } 10800 - } 10801 - 10802 - return nil 10803 - }
-124
api/tangled/cievent.go
··· 1 - // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 - 3 - // Lexicon schema: sh.tangled.ci.event 4 - 5 - package tangled 6 - 7 - import ( 8 - "bytes" 9 - "encoding/json" 10 - "fmt" 11 - "io" 12 - 13 - lexutil "github.com/bluesky-social/indigo/lex/util" 14 - cbg "github.com/whyrusleeping/cbor-gen" 15 - ) 16 - 17 - const ( 18 - CiEventNSID = "sh.tangled.ci.event" 19 - ) 20 - 21 - // CiEvent is a "main" in the sh.tangled.ci.event schema. 22 - type CiEvent struct { 23 - Meta *CiEvent_Meta `json:"meta" cborgen:"meta"` 24 - } 25 - 26 - // CiEvent_Manual is a "manual" in the sh.tangled.ci.event schema. 27 - type CiEvent_Manual struct { 28 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.ci.event#manual"` 29 - } 30 - 31 - type CiEvent_Meta struct { 32 - CiEvent_PullRequest *CiEvent_PullRequest 33 - CiEvent_Push *CiEvent_Push 34 - CiEvent_Manual *CiEvent_Manual 35 - } 36 - 37 - func (t *CiEvent_Meta) MarshalJSON() ([]byte, error) { 38 - if t.CiEvent_PullRequest != nil { 39 - t.CiEvent_PullRequest.LexiconTypeID = "sh.tangled.ci.event#pullRequest" 40 - return json.Marshal(t.CiEvent_PullRequest) 41 - } 42 - if t.CiEvent_Push != nil { 43 - t.CiEvent_Push.LexiconTypeID = "sh.tangled.ci.event#push" 44 - return json.Marshal(t.CiEvent_Push) 45 - } 46 - if t.CiEvent_Manual != nil { 47 - t.CiEvent_Manual.LexiconTypeID = "sh.tangled.ci.event#manual" 48 - return json.Marshal(t.CiEvent_Manual) 49 - } 50 - return nil, fmt.Errorf("can not marshal empty union as JSON") 51 - } 52 - 53 - func (t *CiEvent_Meta) UnmarshalJSON(b []byte) error { 54 - typ, err := lexutil.TypeExtract(b) 55 - if err != nil { 56 - return err 57 - } 58 - 59 - switch typ { 60 - case "sh.tangled.ci.event#pullRequest": 61 - t.CiEvent_PullRequest = new(CiEvent_PullRequest) 62 - return json.Unmarshal(b, t.CiEvent_PullRequest) 63 - case "sh.tangled.ci.event#push": 64 - t.CiEvent_Push = new(CiEvent_Push) 65 - return json.Unmarshal(b, t.CiEvent_Push) 66 - case "sh.tangled.ci.event#manual": 67 - t.CiEvent_Manual = new(CiEvent_Manual) 68 - return json.Unmarshal(b, t.CiEvent_Manual) 69 - default: 70 - return nil 71 - } 72 - } 73 - 74 - func (t *CiEvent_Meta) MarshalCBOR(w io.Writer) error { 75 - 76 - if t == nil { 77 - _, err := w.Write(cbg.CborNull) 78 - return err 79 - } 80 - if t.CiEvent_PullRequest != nil { 81 - return t.CiEvent_PullRequest.MarshalCBOR(w) 82 - } 83 - if t.CiEvent_Push != nil { 84 - return t.CiEvent_Push.MarshalCBOR(w) 85 - } 86 - if t.CiEvent_Manual != nil { 87 - return t.CiEvent_Manual.MarshalCBOR(w) 88 - } 89 - return fmt.Errorf("can not marshal empty union as CBOR") 90 - } 91 - 92 - func (t *CiEvent_Meta) UnmarshalCBOR(r io.Reader) error { 93 - typ, b, err := lexutil.CborTypeExtractReader(r) 94 - if err != nil { 95 - return err 96 - } 97 - 98 - switch typ { 99 - case "sh.tangled.ci.event#pullRequest": 100 - t.CiEvent_PullRequest = new(CiEvent_PullRequest) 101 - return t.CiEvent_PullRequest.UnmarshalCBOR(bytes.NewReader(b)) 102 - case "sh.tangled.ci.event#push": 103 - t.CiEvent_Push = new(CiEvent_Push) 104 - return t.CiEvent_Push.UnmarshalCBOR(bytes.NewReader(b)) 105 - case "sh.tangled.ci.event#manual": 106 - t.CiEvent_Manual = new(CiEvent_Manual) 107 - return t.CiEvent_Manual.UnmarshalCBOR(bytes.NewReader(b)) 108 - default: 109 - return nil 110 - } 111 - } 112 - 113 - // CiEvent_PullRequest is a "pullRequest" in the sh.tangled.ci.event schema. 114 - type CiEvent_PullRequest struct { 115 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.ci.event#pullRequest"` 116 - } 117 - 118 - // CiEvent_Push is a "push" in the sh.tangled.ci.event schema. 119 - type CiEvent_Push struct { 120 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.ci.event#push"` 121 - NewSha string `json:"newSha" cborgen:"newSha"` 122 - OldSha string `json:"oldSha" cborgen:"oldSha"` 123 - Ref string `json:"ref" cborgen:"ref"` 124 - }
-23
api/tangled/cipipeline.go
··· 1 - // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 - 3 - // Lexicon schema: sh.tangled.ci.pipeline 4 - 5 - package tangled 6 - 7 - import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 9 - ) 10 - 11 - const ( 12 - CiPipelineNSID = "sh.tangled.ci.pipeline" 13 - ) 14 - 15 - func init() { 16 - lexutil.RegisterType("sh.tangled.ci.pipeline", &CiPipeline{}) 17 - } 18 - 19 - type CiPipeline struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.ci.pipeline"` 21 - Event *CiEvent `json:"event" cborgen:"event"` 22 - WorkflowRuns []string `json:"workflowRuns" cborgen:"workflowRuns"` 23 - }
+7 -7
api/tangled/feedreaction.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.feed.reaction 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.feed.reaction 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.feed.reaction", &FeedReaction{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.feed.reaction", &FeedReaction{}) 17 + } // 18 + // RECORDTYPE: FeedReaction 19 19 type FeedReaction struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.feed.reaction"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.feed.reaction" cborgen:"$type,const=sh.tangled.feed.reaction"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 Reaction string `json:"reaction" cborgen:"reaction"` 23 23 Subject string `json:"subject" cborgen:"subject"`
+7 -7
api/tangled/feedstar.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.feed.star 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.feed.star 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.feed.star", &FeedStar{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.feed.star", &FeedStar{}) 17 + } // 18 + // RECORDTYPE: FeedStar 19 19 type FeedStar struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.feed.star"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.feed.star" cborgen:"$type,const=sh.tangled.feed.star"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 Subject string `json:"subject" cborgen:"subject"` 23 23 }
+7 -7
api/tangled/gitrefUpdate.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.git.refUpdate 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.git.refUpdate 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.git.refUpdate", &GitRefUpdate{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.git.refUpdate", &GitRefUpdate{}) 17 + } // 18 + // RECORDTYPE: GitRefUpdate 19 19 type GitRefUpdate struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.git.refUpdate"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.git.refUpdate" cborgen:"$type,const=sh.tangled.git.refUpdate"` 21 21 // committerDid: did of the user that pushed this ref 22 22 CommitterDid string `json:"committerDid" cborgen:"committerDid"` 23 23 Meta *GitRefUpdate_Meta `json:"meta" cborgen:"meta"`
+7 -7
api/tangled/graphfollow.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.graph.follow 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.graph.follow 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.graph.follow", &GraphFollow{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.graph.follow", &GraphFollow{}) 17 + } // 18 + // RECORDTYPE: GraphFollow 19 19 type GraphFollow struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.graph.follow"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.graph.follow" cborgen:"$type,const=sh.tangled.graph.follow"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 Subject string `json:"subject" cborgen:"subject"` 23 23 }
+7 -7
api/tangled/issuecomment.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.issue.comment 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo.issue.comment 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.issue.comment", &RepoIssueComment{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.issue.comment", &RepoIssueComment{}) 17 + } // 18 + // RECORDTYPE: RepoIssueComment 19 19 type RepoIssueComment struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.issue.comment"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue.comment" cborgen:"$type,const=sh.tangled.repo.issue.comment"` 21 21 Body string `json:"body" cborgen:"body"` 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 Issue string `json:"issue" cborgen:"issue"`
+7 -7
api/tangled/issuestate.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.issue.state 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo.issue.state 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.issue.state", &RepoIssueState{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.issue.state", &RepoIssueState{}) 17 + } // 18 + // RECORDTYPE: RepoIssueState 19 19 type RepoIssueState struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.issue.state"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue.state" cborgen:"$type,const=sh.tangled.repo.issue.state"` 21 21 Issue string `json:"issue" cborgen:"issue"` 22 22 // state: state of the issue 23 23 State string `json:"state" cborgen:"state"`
+5 -5
api/tangled/knotlistKeys.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.knot.listKeys 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.knot.listKeys 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 35 35 // 36 36 // cursor: Pagination cursor 37 37 // limit: Maximum number of keys to return 38 - func KnotListKeys(ctx context.Context, c lexutil.LexClient, cursor string, limit int64) (*KnotListKeys_Output, error) { 38 + func KnotListKeys(ctx context.Context, c util.LexClient, cursor string, limit int64) (*KnotListKeys_Output, error) { 39 39 var out KnotListKeys_Output 40 40 41 41 params := map[string]interface{}{} ··· 45 45 if limit != 0 { 46 46 params["limit"] = limit 47 47 } 48 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.knot.listKeys", params, nil, &out); err != nil { 48 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.knot.listKeys", params, nil, &out); err != nil { 49 49 return nil, err 50 50 } 51 51
+7 -7
api/tangled/knotmember.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.knot.member 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.knot.member 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.knot.member", &KnotMember{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.knot.member", &KnotMember{}) 17 + } // 18 + // RECORDTYPE: KnotMember 19 19 type KnotMember struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.knot.member"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.knot.member" cborgen:"$type,const=sh.tangled.knot.member"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 // domain: domain that this member now belongs to 23 23 Domain string `json:"domain" cborgen:"domain"`
+5 -5
api/tangled/knotversion.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.knot.version 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.knot.version 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 20 20 } 21 21 22 22 // KnotVersion calls the XRPC method "sh.tangled.knot.version". 23 - func KnotVersion(ctx context.Context, c lexutil.LexClient) (*KnotVersion_Output, error) { 23 + func KnotVersion(ctx context.Context, c util.LexClient) (*KnotVersion_Output, error) { 24 24 var out KnotVersion_Output 25 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.knot.version", nil, nil, &out); err != nil { 25 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.knot.version", nil, nil, &out); err != nil { 26 26 return nil, err 27 27 } 28 28
+7 -7
api/tangled/labeldefinition.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.label.definition 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.label.definition 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.label.definition", &LabelDefinition{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.label.definition", &LabelDefinition{}) 17 + } // 18 + // RECORDTYPE: LabelDefinition 19 19 type LabelDefinition struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.label.definition"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.label.definition" cborgen:"$type,const=sh.tangled.label.definition"` 21 21 // color: The hex value for the background color for the label. Appviews may choose to respect this. 22 22 Color *string `json:"color,omitempty" cborgen:"color,omitempty"` 23 23 CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+7 -7
api/tangled/labelop.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.label.op 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.label.op 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.label.op", &LabelOp{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.label.op", &LabelOp{}) 17 + } // 18 + // RECORDTYPE: LabelOp 19 19 type LabelOp struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.label.op"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.label.op" cborgen:"$type,const=sh.tangled.label.op"` 21 21 Add []*LabelOp_Operand `json:"add" cborgen:"add"` 22 22 Delete []*LabelOp_Operand `json:"delete" cborgen:"delete"` 23 23 PerformedAt string `json:"performedAt" cborgen:"performedAt"`
+5 -5
api/tangled/pipelinecancelPipeline.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.pipeline.cancelPipeline 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.pipeline.cancelPipeline 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 25 25 } 26 26 27 27 // PipelineCancelPipeline calls the XRPC method "sh.tangled.pipeline.cancelPipeline". 28 - func PipelineCancelPipeline(ctx context.Context, c lexutil.LexClient, input *PipelineCancelPipeline_Input) error { 29 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.pipeline.cancelPipeline", nil, input, nil); err != nil { 28 + func PipelineCancelPipeline(ctx context.Context, c util.LexClient, input *PipelineCancelPipeline_Input) error { 29 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.pipeline.cancelPipeline", nil, input, nil); err != nil { 30 30 return err 31 31 } 32 32
+7 -7
api/tangled/pipelinestatus.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.pipeline.status 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.pipeline.status 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.pipeline.status", &PipelineStatus{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.pipeline.status", &PipelineStatus{}) 17 + } // 18 + // RECORDTYPE: PipelineStatus 19 19 type PipelineStatus struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.pipeline.status"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.pipeline.status" cborgen:"$type,const=sh.tangled.pipeline.status"` 21 21 // createdAt: time of creation of this status update 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 // error: error message if failed
+7 -7
api/tangled/pullcomment.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.pull.comment 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo.pull.comment 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.pull.comment", &RepoPullComment{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.pull.comment", &RepoPullComment{}) 17 + } // 18 + // RECORDTYPE: RepoPullComment 19 19 type RepoPullComment struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.pull.comment"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull.comment" cborgen:"$type,const=sh.tangled.repo.pull.comment"` 21 21 Body string `json:"body" cborgen:"body"` 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+7 -7
api/tangled/pullstatus.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.pull.status 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo.pull.status 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.pull.status", &RepoPullStatus{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.pull.status", &RepoPullStatus{}) 17 + } // 18 + // RECORDTYPE: RepoPullStatus 19 19 type RepoPullStatus struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.pull.status"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull.status" cborgen:"$type,const=sh.tangled.repo.pull.status"` 21 21 Pull string `json:"pull" cborgen:"pull"` 22 22 // status: status of the pull request 23 23 Status string `json:"status" cborgen:"status"`
+5 -5
api/tangled/repoaddSecret.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.addSecret 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.addSecret 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 22 22 } 23 23 24 24 // RepoAddSecret calls the XRPC method "sh.tangled.repo.addSecret". 25 - func RepoAddSecret(ctx context.Context, c lexutil.LexClient, input *RepoAddSecret_Input) error { 26 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.addSecret", nil, input, nil); err != nil { 25 + func RepoAddSecret(ctx context.Context, c util.LexClient, input *RepoAddSecret_Input) error { 26 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.addSecret", nil, input, nil); err != nil { 27 27 return err 28 28 } 29 29
+5 -5
api/tangled/repoarchive.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.archive 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.archive 6 6 7 7 import ( 8 8 "bytes" 9 9 "context" 10 10 11 - lexutil "github.com/bluesky-social/indigo/lex/util" 11 + "github.com/bluesky-social/indigo/lex/util" 12 12 ) 13 13 14 14 const ( ··· 21 21 // prefix: Prefix for files in the archive 22 22 // ref: Git reference (branch, tag, or commit SHA) 23 23 // repo: Repository identifier in format 'did:plc:.../repoName' 24 - func RepoArchive(ctx context.Context, c lexutil.LexClient, format string, prefix string, ref string, repo string) ([]byte, error) { 24 + func RepoArchive(ctx context.Context, c util.LexClient, format string, prefix string, ref string, repo string) ([]byte, error) { 25 25 buf := new(bytes.Buffer) 26 26 27 27 params := map[string]interface{}{} ··· 33 33 } 34 34 params["ref"] = ref 35 35 params["repo"] = repo 36 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.archive", params, nil, buf); err != nil { 36 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.archive", params, nil, buf); err != nil { 37 37 return nil, err 38 38 } 39 39
+9 -9
api/tangled/repoartifact.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.artifact 3 + package tangled 4 4 5 - package tangled 5 + // schema: sh.tangled.repo.artifact 6 6 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.artifact", &RepoArtifact{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.artifact", &RepoArtifact{}) 17 + } // 18 + // RECORDTYPE: RepoArtifact 19 19 type RepoArtifact struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.artifact"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.artifact" cborgen:"$type,const=sh.tangled.repo.artifact"` 21 21 // artifact: the artifact 22 - Artifact *lexutil.LexBlob `json:"artifact" cborgen:"artifact"` 22 + Artifact *util.LexBlob `json:"artifact" cborgen:"artifact"` 23 23 // createdAt: time of creation of this artifact 24 24 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 25 25 // name: name of the artifact ··· 27 27 // repo: repo that this artifact is being uploaded to 28 28 Repo string `json:"repo" cborgen:"repo"` 29 29 // tag: hash of the tag object that this artifact is attached to (only annotated tags are supported) 30 - Tag lexutil.LexBytes `json:"tag,omitempty" cborgen:"tag,omitempty"` 30 + Tag util.LexBytes `json:"tag,omitempty" cborgen:"tag,omitempty"` 31 31 }
+5 -7
api/tangled/repoblob.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.blob 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.blob 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 21 21 Hash string `json:"hash" cborgen:"hash"` 22 22 // message: Commit message 23 23 Message string `json:"message" cborgen:"message"` 24 - // shortHash: Short commit hash 25 - ShortHash *string `json:"shortHash,omitempty" cborgen:"shortHash,omitempty"` 26 24 // when: Commit timestamp 27 25 When string `json:"when" cborgen:"when"` 28 26 } ··· 74 72 // raw: Return raw file content instead of JSON response 75 73 // ref: Git reference (branch, tag, or commit SHA) 76 74 // repo: Repository identifier in format 'did:plc:.../repoName' 77 - func RepoBlob(ctx context.Context, c lexutil.LexClient, path string, raw bool, ref string, repo string) (*RepoBlob_Output, error) { 75 + func RepoBlob(ctx context.Context, c util.LexClient, path string, raw bool, ref string, repo string) (*RepoBlob_Output, error) { 78 76 var out RepoBlob_Output 79 77 80 78 params := map[string]interface{}{} ··· 84 82 } 85 83 params["ref"] = ref 86 84 params["repo"] = repo 87 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.blob", params, nil, &out); err != nil { 85 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.blob", params, nil, &out); err != nil { 88 86 return nil, err 89 87 } 90 88
+5 -5
api/tangled/repobranch.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.branch 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.branch 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 45 45 // 46 46 // name: Branch name to get information for 47 47 // repo: Repository identifier in format 'did:plc:.../repoName' 48 - func RepoBranch(ctx context.Context, c lexutil.LexClient, name string, repo string) (*RepoBranch_Output, error) { 48 + func RepoBranch(ctx context.Context, c util.LexClient, name string, repo string) (*RepoBranch_Output, error) { 49 49 var out RepoBranch_Output 50 50 51 51 params := map[string]interface{}{} 52 52 params["name"] = name 53 53 params["repo"] = repo 54 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.branch", params, nil, &out); err != nil { 54 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.branch", params, nil, &out); err != nil { 55 55 return nil, err 56 56 } 57 57
+5 -5
api/tangled/repobranches.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.branches 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.branches 6 6 7 7 import ( 8 8 "bytes" 9 9 "context" 10 10 11 - lexutil "github.com/bluesky-social/indigo/lex/util" 11 + "github.com/bluesky-social/indigo/lex/util" 12 12 ) 13 13 14 14 const ( ··· 20 20 // cursor: Pagination cursor 21 21 // limit: Maximum number of branches to return 22 22 // repo: Repository identifier in format 'did:plc:.../repoName' 23 - func RepoBranches(ctx context.Context, c lexutil.LexClient, cursor string, limit int64, repo string) ([]byte, error) { 23 + func RepoBranches(ctx context.Context, c util.LexClient, cursor string, limit int64, repo string) ([]byte, error) { 24 24 buf := new(bytes.Buffer) 25 25 26 26 params := map[string]interface{}{} ··· 31 31 params["limit"] = limit 32 32 } 33 33 params["repo"] = repo 34 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.branches", params, nil, buf); err != nil { 34 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.branches", params, nil, buf); err != nil { 35 35 return nil, err 36 36 } 37 37
+7 -7
api/tangled/repocollaborator.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.collaborator 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo.collaborator 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.collaborator", &RepoCollaborator{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.collaborator", &RepoCollaborator{}) 17 + } // 18 + // RECORDTYPE: RepoCollaborator 19 19 type RepoCollaborator struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.collaborator"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.collaborator" cborgen:"$type,const=sh.tangled.repo.collaborator"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 // repo: repo to add this user to 23 23 Repo string `json:"repo" cborgen:"repo"`
+5 -5
api/tangled/repocompare.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.compare 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.compare 6 6 7 7 import ( 8 8 "bytes" 9 9 "context" 10 10 11 - lexutil "github.com/bluesky-social/indigo/lex/util" 11 + "github.com/bluesky-social/indigo/lex/util" 12 12 ) 13 13 14 14 const ( ··· 20 20 // repo: Repository identifier in format 'did:plc:.../repoName' 21 21 // rev1: First revision (commit, branch, or tag) 22 22 // rev2: Second revision (commit, branch, or tag) 23 - func RepoCompare(ctx context.Context, c lexutil.LexClient, repo string, rev1 string, rev2 string) ([]byte, error) { 23 + func RepoCompare(ctx context.Context, c util.LexClient, repo string, rev1 string, rev2 string) ([]byte, error) { 24 24 buf := new(bytes.Buffer) 25 25 26 26 params := map[string]interface{}{} 27 27 params["repo"] = repo 28 28 params["rev1"] = rev1 29 29 params["rev2"] = rev2 30 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.compare", params, nil, buf); err != nil { 30 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.compare", params, nil, buf); err != nil { 31 31 return nil, err 32 32 } 33 33
+5 -5
api/tangled/repocreate.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.create 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.create 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 25 25 } 26 26 27 27 // RepoCreate calls the XRPC method "sh.tangled.repo.create". 28 - func RepoCreate(ctx context.Context, c lexutil.LexClient, input *RepoCreate_Input) error { 29 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.create", nil, input, nil); err != nil { 28 + func RepoCreate(ctx context.Context, c util.LexClient, input *RepoCreate_Input) error { 29 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.create", nil, input, nil); err != nil { 30 30 return err 31 31 } 32 32
+5 -5
api/tangled/repodelete.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.delete 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.delete 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 25 25 } 26 26 27 27 // RepoDelete calls the XRPC method "sh.tangled.repo.delete". 28 - func RepoDelete(ctx context.Context, c lexutil.LexClient, input *RepoDelete_Input) error { 29 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.delete", nil, input, nil); err != nil { 28 + func RepoDelete(ctx context.Context, c util.LexClient, input *RepoDelete_Input) error { 29 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.delete", nil, input, nil); err != nil { 30 30 return err 31 31 } 32 32
+5 -5
api/tangled/repodeleteBranch.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.deleteBranch 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.deleteBranch 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 21 21 } 22 22 23 23 // RepoDeleteBranch calls the XRPC method "sh.tangled.repo.deleteBranch". 24 - func RepoDeleteBranch(ctx context.Context, c lexutil.LexClient, input *RepoDeleteBranch_Input) error { 25 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.deleteBranch", nil, input, nil); err != nil { 24 + func RepoDeleteBranch(ctx context.Context, c util.LexClient, input *RepoDeleteBranch_Input) error { 25 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.deleteBranch", nil, input, nil); err != nil { 26 26 return err 27 27 } 28 28
+5 -5
api/tangled/repodiff.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.diff 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.diff 6 6 7 7 import ( 8 8 "bytes" 9 9 "context" 10 10 11 - lexutil "github.com/bluesky-social/indigo/lex/util" 11 + "github.com/bluesky-social/indigo/lex/util" 12 12 ) 13 13 14 14 const ( ··· 19 19 // 20 20 // ref: Git reference (branch, tag, or commit SHA) 21 21 // repo: Repository identifier in format 'did:plc:.../repoName' 22 - func RepoDiff(ctx context.Context, c lexutil.LexClient, ref string, repo string) ([]byte, error) { 22 + func RepoDiff(ctx context.Context, c util.LexClient, ref string, repo string) ([]byte, error) { 23 23 buf := new(bytes.Buffer) 24 24 25 25 params := map[string]interface{}{} 26 26 params["ref"] = ref 27 27 params["repo"] = repo 28 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.diff", params, nil, buf); err != nil { 28 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.diff", params, nil, buf); err != nil { 29 29 return nil, err 30 30 } 31 31
+5 -5
api/tangled/repoforkStatus.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.forkStatus 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.forkStatus 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 35 35 } 36 36 37 37 // RepoForkStatus calls the XRPC method "sh.tangled.repo.forkStatus". 38 - func RepoForkStatus(ctx context.Context, c lexutil.LexClient, input *RepoForkStatus_Input) (*RepoForkStatus_Output, error) { 38 + func RepoForkStatus(ctx context.Context, c util.LexClient, input *RepoForkStatus_Input) (*RepoForkStatus_Output, error) { 39 39 var out RepoForkStatus_Output 40 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.forkStatus", nil, input, &out); err != nil { 40 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.forkStatus", nil, input, &out); err != nil { 41 41 return nil, err 42 42 } 43 43
+5 -5
api/tangled/repoforkSync.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.forkSync 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.forkSync 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 27 27 } 28 28 29 29 // RepoForkSync calls the XRPC method "sh.tangled.repo.forkSync". 30 - func RepoForkSync(ctx context.Context, c lexutil.LexClient, input *RepoForkSync_Input) error { 31 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.forkSync", nil, input, nil); err != nil { 30 + func RepoForkSync(ctx context.Context, c util.LexClient, input *RepoForkSync_Input) error { 31 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.forkSync", nil, input, nil); err != nil { 32 32 return err 33 33 } 34 34
+5 -5
api/tangled/repogetDefaultBranch.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.getDefaultBranch 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.getDefaultBranch 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 42 42 // RepoGetDefaultBranch calls the XRPC method "sh.tangled.repo.getDefaultBranch". 43 43 // 44 44 // repo: Repository identifier in format 'did:plc:.../repoName' 45 - func RepoGetDefaultBranch(ctx context.Context, c lexutil.LexClient, repo string) (*RepoGetDefaultBranch_Output, error) { 45 + func RepoGetDefaultBranch(ctx context.Context, c util.LexClient, repo string) (*RepoGetDefaultBranch_Output, error) { 46 46 var out RepoGetDefaultBranch_Output 47 47 48 48 params := map[string]interface{}{} 49 49 params["repo"] = repo 50 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.getDefaultBranch", params, nil, &out); err != nil { 50 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.getDefaultBranch", params, nil, &out); err != nil { 51 51 return nil, err 52 52 } 53 53
+5 -5
api/tangled/repohiddenRef.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.hiddenRef 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.hiddenRef 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 35 35 } 36 36 37 37 // RepoHiddenRef calls the XRPC method "sh.tangled.repo.hiddenRef". 38 - func RepoHiddenRef(ctx context.Context, c lexutil.LexClient, input *RepoHiddenRef_Input) (*RepoHiddenRef_Output, error) { 38 + func RepoHiddenRef(ctx context.Context, c util.LexClient, input *RepoHiddenRef_Input) (*RepoHiddenRef_Output, error) { 39 39 var out RepoHiddenRef_Output 40 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.hiddenRef", nil, input, &out); err != nil { 40 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.hiddenRef", nil, input, &out); err != nil { 41 41 return nil, err 42 42 } 43 43
+7 -7
api/tangled/repoissue.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.issue 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo.issue 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.issue", &RepoIssue{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.issue", &RepoIssue{}) 17 + } // 18 + // RECORDTYPE: RepoIssue 19 19 type RepoIssue struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.issue"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue" cborgen:"$type,const=sh.tangled.repo.issue"` 21 21 Body *string `json:"body,omitempty" cborgen:"body,omitempty"` 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+5 -5
api/tangled/repolanguages.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.languages 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.languages 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 45 45 // 46 46 // ref: Git reference (branch, tag, or commit SHA) 47 47 // repo: Repository identifier in format 'did:plc:.../repoName' 48 - func RepoLanguages(ctx context.Context, c lexutil.LexClient, ref string, repo string) (*RepoLanguages_Output, error) { 48 + func RepoLanguages(ctx context.Context, c util.LexClient, ref string, repo string) (*RepoLanguages_Output, error) { 49 49 var out RepoLanguages_Output 50 50 51 51 params := map[string]interface{}{} ··· 53 53 params["ref"] = ref 54 54 } 55 55 params["repo"] = repo 56 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.languages", params, nil, &out); err != nil { 56 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.languages", params, nil, &out); err != nil { 57 57 return nil, err 58 58 } 59 59
+5 -5
api/tangled/repolistSecrets.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.listSecrets 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.listSecrets 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 28 28 } 29 29 30 30 // RepoListSecrets calls the XRPC method "sh.tangled.repo.listSecrets". 31 - func RepoListSecrets(ctx context.Context, c lexutil.LexClient, repo string) (*RepoListSecrets_Output, error) { 31 + func RepoListSecrets(ctx context.Context, c util.LexClient, repo string) (*RepoListSecrets_Output, error) { 32 32 var out RepoListSecrets_Output 33 33 34 34 params := map[string]interface{}{} 35 35 params["repo"] = repo 36 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.listSecrets", params, nil, &out); err != nil { 36 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.listSecrets", params, nil, &out); err != nil { 37 37 return nil, err 38 38 } 39 39
+5 -5
api/tangled/repolog.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.log 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.log 6 6 7 7 import ( 8 8 "bytes" 9 9 "context" 10 10 11 - lexutil "github.com/bluesky-social/indigo/lex/util" 11 + "github.com/bluesky-social/indigo/lex/util" 12 12 ) 13 13 14 14 const ( ··· 22 22 // path: Path to filter commits by 23 23 // ref: Git reference (branch, tag, or commit SHA) 24 24 // repo: Repository identifier in format 'did:plc:.../repoName' 25 - func RepoLog(ctx context.Context, c lexutil.LexClient, cursor string, limit int64, path string, ref string, repo string) ([]byte, error) { 25 + func RepoLog(ctx context.Context, c util.LexClient, cursor string, limit int64, path string, ref string, repo string) ([]byte, error) { 26 26 buf := new(bytes.Buffer) 27 27 28 28 params := map[string]interface{}{} ··· 37 37 } 38 38 params["ref"] = ref 39 39 params["repo"] = repo 40 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.log", params, nil, buf); err != nil { 40 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.log", params, nil, buf); err != nil { 41 41 return nil, err 42 42 } 43 43
+5 -5
api/tangled/repomerge.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.merge 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.merge 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 35 35 } 36 36 37 37 // RepoMerge calls the XRPC method "sh.tangled.repo.merge". 38 - func RepoMerge(ctx context.Context, c lexutil.LexClient, input *RepoMerge_Input) error { 39 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.merge", nil, input, nil); err != nil { 38 + func RepoMerge(ctx context.Context, c util.LexClient, input *RepoMerge_Input) error { 39 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.merge", nil, input, nil); err != nil { 40 40 return err 41 41 } 42 42
+5 -5
api/tangled/repomergeCheck.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.mergeCheck 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.mergeCheck 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 47 47 } 48 48 49 49 // RepoMergeCheck calls the XRPC method "sh.tangled.repo.mergeCheck". 50 - func RepoMergeCheck(ctx context.Context, c lexutil.LexClient, input *RepoMergeCheck_Input) (*RepoMergeCheck_Output, error) { 50 + func RepoMergeCheck(ctx context.Context, c util.LexClient, input *RepoMergeCheck_Input) (*RepoMergeCheck_Output, error) { 51 51 var out RepoMergeCheck_Output 52 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.mergeCheck", nil, input, &out); err != nil { 52 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.mergeCheck", nil, input, &out); err != nil { 53 53 return nil, err 54 54 } 55 55
+8 -8
api/tangled/repopull.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.pull 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.pull 6 6 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo.pull", &RepoPull{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo.pull", &RepoPull{}) 17 + } // 18 + // RECORDTYPE: RepoPull 19 19 type RepoPull struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo.pull"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull" cborgen:"$type,const=sh.tangled.repo.pull"` 21 21 Body *string `json:"body,omitempty" cborgen:"body,omitempty"` 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"` 24 24 // patch: (deprecated) use patchBlob instead 25 25 Patch *string `json:"patch,omitempty" cborgen:"patch,omitempty"` 26 26 // patchBlob: patch content 27 - PatchBlob *lexutil.LexBlob `json:"patchBlob" cborgen:"patchBlob"` 27 + PatchBlob *util.LexBlob `json:"patchBlob" cborgen:"patchBlob"` 28 28 References []string `json:"references,omitempty" cborgen:"references,omitempty"` 29 29 Source *RepoPull_Source `json:"source,omitempty" cborgen:"source,omitempty"` 30 30 Target *RepoPull_Target `json:"target" cborgen:"target"`
+5 -5
api/tangled/reporemoveSecret.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.removeSecret 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.removeSecret 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 21 21 } 22 22 23 23 // RepoRemoveSecret calls the XRPC method "sh.tangled.repo.removeSecret". 24 - func RepoRemoveSecret(ctx context.Context, c lexutil.LexClient, input *RepoRemoveSecret_Input) error { 25 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.removeSecret", nil, input, nil); err != nil { 24 + func RepoRemoveSecret(ctx context.Context, c util.LexClient, input *RepoRemoveSecret_Input) error { 25 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.removeSecret", nil, input, nil); err != nil { 26 26 return err 27 27 } 28 28
+5 -5
api/tangled/reposetDefaultBranch.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.setDefaultBranch 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.setDefaultBranch 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 21 21 } 22 22 23 23 // RepoSetDefaultBranch calls the XRPC method "sh.tangled.repo.setDefaultBranch". 24 - func RepoSetDefaultBranch(ctx context.Context, c lexutil.LexClient, input *RepoSetDefaultBranch_Input) error { 25 - if err := c.LexDo(ctx, lexutil.Procedure, "application/json", "sh.tangled.repo.setDefaultBranch", nil, input, nil); err != nil { 24 + func RepoSetDefaultBranch(ctx context.Context, c util.LexClient, input *RepoSetDefaultBranch_Input) error { 25 + if err := c.LexDo(ctx, util.Procedure, "application/json", "sh.tangled.repo.setDefaultBranch", nil, input, nil); err != nil { 26 26 return err 27 27 } 28 28
+33
api/tangled/repotag.go
··· 1 + // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 + 3 + package tangled 4 + 5 + // schema: sh.tangled.repo.tag 6 + 7 + import ( 8 + "bytes" 9 + "context" 10 + 11 + "github.com/bluesky-social/indigo/lex/util" 12 + ) 13 + 14 + const ( 15 + RepoTagNSID = "sh.tangled.repo.tag" 16 + ) 17 + 18 + // RepoTag calls the XRPC method "sh.tangled.repo.tag". 19 + // 20 + // repo: Repository identifier in format 'did:plc:.../repoName' 21 + // tag: Name of tag, such as v1.3.0 22 + func RepoTag(ctx context.Context, c util.LexClient, repo string, tag string) ([]byte, error) { 23 + buf := new(bytes.Buffer) 24 + 25 + params := map[string]interface{}{} 26 + params["repo"] = repo 27 + params["tag"] = tag 28 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.tag", params, nil, buf); err != nil { 29 + return nil, err 30 + } 31 + 32 + return buf.Bytes(), nil 33 + }
+5 -5
api/tangled/repotags.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.tags 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.tags 6 6 7 7 import ( 8 8 "bytes" 9 9 "context" 10 10 11 - lexutil "github.com/bluesky-social/indigo/lex/util" 11 + "github.com/bluesky-social/indigo/lex/util" 12 12 ) 13 13 14 14 const ( ··· 20 20 // cursor: Pagination cursor 21 21 // limit: Maximum number of tags to return 22 22 // repo: Repository identifier in format 'did:plc:.../repoName' 23 - func RepoTags(ctx context.Context, c lexutil.LexClient, cursor string, limit int64, repo string) ([]byte, error) { 23 + func RepoTags(ctx context.Context, c util.LexClient, cursor string, limit int64, repo string) ([]byte, error) { 24 24 buf := new(bytes.Buffer) 25 25 26 26 params := map[string]interface{}{} ··· 31 31 params["limit"] = limit 32 32 } 33 33 params["repo"] = repo 34 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.tags", params, nil, buf); err != nil { 34 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.tags", params, nil, buf); err != nil { 35 35 return nil, err 36 36 } 37 37
+19 -7
api/tangled/repotree.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.tree 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.tree 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 16 16 17 17 // RepoTree_LastCommit is a "lastCommit" in the sh.tangled.repo.tree schema. 18 18 type RepoTree_LastCommit struct { 19 + Author *RepoTree_Signature `json:"author,omitempty" cborgen:"author,omitempty"` 19 20 // hash: Commit hash 20 21 Hash string `json:"hash" cborgen:"hash"` 21 22 // message: Commit message ··· 27 28 // RepoTree_Output is the output of a sh.tangled.repo.tree call. 28 29 type RepoTree_Output struct { 29 30 // dotdot: Parent directory path 30 - Dotdot *string `json:"dotdot,omitempty" cborgen:"dotdot,omitempty"` 31 - Files []*RepoTree_TreeEntry `json:"files" cborgen:"files"` 31 + Dotdot *string `json:"dotdot,omitempty" cborgen:"dotdot,omitempty"` 32 + Files []*RepoTree_TreeEntry `json:"files" cborgen:"files"` 33 + LastCommit *RepoTree_LastCommit `json:"lastCommit,omitempty" cborgen:"lastCommit,omitempty"` 32 34 // parent: The parent path in the tree 33 35 Parent *string `json:"parent,omitempty" cborgen:"parent,omitempty"` 34 36 // readme: Readme for this file tree ··· 45 47 Filename string `json:"filename" cborgen:"filename"` 46 48 } 47 49 50 + // RepoTree_Signature is a "signature" in the sh.tangled.repo.tree schema. 51 + type RepoTree_Signature struct { 52 + // email: Author email 53 + Email string `json:"email" cborgen:"email"` 54 + // name: Author name 55 + Name string `json:"name" cborgen:"name"` 56 + // when: Author timestamp 57 + When string `json:"when" cborgen:"when"` 58 + } 59 + 48 60 // RepoTree_TreeEntry is a "treeEntry" in the sh.tangled.repo.tree schema. 49 61 type RepoTree_TreeEntry struct { 50 62 Last_commit *RepoTree_LastCommit `json:"last_commit,omitempty" cborgen:"last_commit,omitempty"` ··· 61 73 // path: Path within the repository tree 62 74 // ref: Git reference (branch, tag, or commit SHA) 63 75 // repo: Repository identifier in format 'did:plc:.../repoName' 64 - func RepoTree(ctx context.Context, c lexutil.LexClient, path string, ref string, repo string) (*RepoTree_Output, error) { 76 + func RepoTree(ctx context.Context, c util.LexClient, path string, ref string, repo string) (*RepoTree_Output, error) { 65 77 var out RepoTree_Output 66 78 67 79 params := map[string]interface{}{} ··· 70 82 } 71 83 params["ref"] = ref 72 84 params["repo"] = repo 73 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.repo.tree", params, nil, &out); err != nil { 85 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.repo.tree", params, nil, &out); err != nil { 74 86 return nil, err 75 87 } 76 88
+7 -7
api/tangled/spindlemember.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.spindle.member 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.spindle.member 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.spindle.member", &SpindleMember{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.spindle.member", &SpindleMember{}) 17 + } // 18 + // RECORDTYPE: SpindleMember 19 19 type SpindleMember struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.spindle.member"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.spindle.member" cborgen:"$type,const=sh.tangled.spindle.member"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 // instance: spindle instance that the subject is now a member of 23 23 Instance string `json:"instance" cborgen:"instance"`
+2 -2
api/tangled/stateclosed.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.issue.state.closed 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.issue.state.closed 6 6 7 7 const ( 8 8 RepoIssueStateClosedNSID = "sh.tangled.repo.issue.state.closed"
+2 -2
api/tangled/stateopen.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.issue.state.open 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.issue.state.open 6 6 7 7 const ( 8 8 RepoIssueStateOpenNSID = "sh.tangled.repo.issue.state.open"
+2 -2
api/tangled/statusclosed.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.pull.status.closed 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.pull.status.closed 6 6 7 7 const ( 8 8 RepoPullStatusClosedNSID = "sh.tangled.repo.pull.status.closed"
+2 -2
api/tangled/statusmerged.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.pull.status.merged 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.pull.status.merged 6 6 7 7 const ( 8 8 RepoPullStatusMergedNSID = "sh.tangled.repo.pull.status.merged"
+2 -2
api/tangled/statusopen.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo.pull.status.open 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.repo.pull.status.open 6 6 7 7 const ( 8 8 RepoPullStatusOpenNSID = "sh.tangled.repo.pull.status.open"
+7 -7
api/tangled/tangledknot.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.knot 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.knot 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.knot", &Knot{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.knot", &Knot{}) 17 + } // 18 + // RECORDTYPE: Knot 19 19 type Knot struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.knot"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.knot" cborgen:"$type,const=sh.tangled.knot"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 }
+5 -5
api/tangled/tangledowner.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.owner 4 - 5 3 package tangled 4 + 5 + // schema: sh.tangled.owner 6 6 7 7 import ( 8 8 "context" 9 9 10 - lexutil "github.com/bluesky-social/indigo/lex/util" 10 + "github.com/bluesky-social/indigo/lex/util" 11 11 ) 12 12 13 13 const ( ··· 20 20 } 21 21 22 22 // Owner calls the XRPC method "sh.tangled.owner". 23 - func Owner(ctx context.Context, c lexutil.LexClient) (*Owner_Output, error) { 23 + func Owner(ctx context.Context, c util.LexClient) (*Owner_Output, error) { 24 24 var out Owner_Output 25 - if err := c.LexDo(ctx, lexutil.Query, "", "sh.tangled.owner", nil, nil, &out); err != nil { 25 + if err := c.LexDo(ctx, util.Query, "", "sh.tangled.owner", nil, nil, &out); err != nil { 26 26 return nil, err 27 27 } 28 28
+7 -7
api/tangled/tangledpipeline.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.pipeline 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.pipeline 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.pipeline", &Pipeline{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.pipeline", &Pipeline{}) 17 + } // 18 + // RECORDTYPE: Pipeline 19 19 type Pipeline struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.pipeline"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.pipeline" cborgen:"$type,const=sh.tangled.pipeline"` 21 21 TriggerMetadata *Pipeline_TriggerMetadata `json:"triggerMetadata" cborgen:"triggerMetadata"` 22 22 Workflows []*Pipeline_Workflow `json:"workflows" cborgen:"workflows"` 23 23 }
+7 -7
api/tangled/tangledpublicKey.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.publicKey 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.publicKey 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.publicKey", &PublicKey{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.publicKey", &PublicKey{}) 17 + } // 18 + // RECORDTYPE: PublicKey 19 19 type PublicKey struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.publicKey"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.publicKey" cborgen:"$type,const=sh.tangled.publicKey"` 21 21 // createdAt: key upload timestamp 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 // key: public key contents
+7 -7
api/tangled/tangledrepo.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.repo 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.repo 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.repo", &Repo{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.repo", &Repo{}) 17 + } // 18 + // RECORDTYPE: Repo 19 19 type Repo struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.repo"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.repo" cborgen:"$type,const=sh.tangled.repo"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 Description *string `json:"description,omitempty" cborgen:"description,omitempty"` 23 23 // knot: knot where the repo was created
+7 -7
api/tangled/tangledspindle.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.spindle 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.spindle 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.spindle", &Spindle{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.spindle", &Spindle{}) 17 + } // 18 + // RECORDTYPE: Spindle 19 19 type Spindle struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.spindle"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.spindle" cborgen:"$type,const=sh.tangled.spindle"` 21 21 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 22 22 }
+7 -7
api/tangled/tangledstring.go
··· 1 1 // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 2 3 - // Lexicon schema: sh.tangled.string 4 - 5 3 package tangled 6 4 5 + // schema: sh.tangled.string 6 + 7 7 import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 8 + "github.com/bluesky-social/indigo/lex/util" 9 9 ) 10 10 11 11 const ( ··· 13 13 ) 14 14 15 15 func init() { 16 - lexutil.RegisterType("sh.tangled.string", &String{}) 17 - } 18 - 16 + util.RegisterType("sh.tangled.string", &String{}) 17 + } // 18 + // RECORDTYPE: String 19 19 type String struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.string"` 20 + LexiconTypeID string `json:"$type,const=sh.tangled.string" cborgen:"$type,const=sh.tangled.string"` 21 21 Contents string `json:"contents" cborgen:"contents"` 22 22 CreatedAt string `json:"createdAt" cborgen:"createdAt"` 23 23 Description string `json:"description" cborgen:"description"`
-24
api/tangled/workflowrun.go
··· 1 - // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 - 3 - // Lexicon schema: sh.tangled.ci.workflow.run 4 - 5 - package tangled 6 - 7 - import ( 8 - lexutil "github.com/bluesky-social/indigo/lex/util" 9 - ) 10 - 11 - const ( 12 - CiWorkflowRunNSID = "sh.tangled.ci.workflow.run" 13 - ) 14 - 15 - func init() { 16 - lexutil.RegisterType("sh.tangled.ci.workflow.run", &CiWorkflowRun{}) 17 - } 18 - 19 - type CiWorkflowRun struct { 20 - LexiconTypeID string `json:"$type" cborgen:"$type,const=sh.tangled.ci.workflow.run"` 21 - Adapter string `json:"adapter" cborgen:"adapter"` 22 - Name string `json:"name" cborgen:"name"` 23 - Status *string `json:"status" cborgen:"status"` 24 - }
-9
api/tangled/workflowstatus.go
··· 1 - // Code generated by cmd/lexgen (see Makefile's lexgen); DO NOT EDIT. 2 - 3 - // Lexicon schema: sh.tangled.ci.workflow.status 4 - 5 - package tangled 6 - 7 - const ( 8 - CiWorkflowStatusNSID = "sh.tangled.ci.workflow.status" 9 - )
+2 -2
appview/config/config.go
··· 13 13 CookieSecret string `env:"COOKIE_SECRET, default=00000000000000000000000000000000"` 14 14 DbPath string `env:"DB_PATH, default=appview.db"` 15 15 ListenAddr string `env:"LISTEN_ADDR, default=0.0.0.0:3000"` 16 - AppviewHost string `env:"APPVIEW_HOST, default=https://tangled.org"` 16 + AppviewHost string `env:"APPVIEW_HOST, default=tangled.org"` 17 17 AppviewName string `env:"APPVIEW_Name, default=Tangled"` 18 18 Dev bool `env:"DEV, default=false"` 19 19 DisallowedNicknamesFile string `env:"DISALLOWED_NICKNAMES_FILE"` ··· 29 29 return !c.Dev 30 30 } 31 31 32 - func (c *CoreConfig) Url() string { 32 + func (c *CoreConfig) BaseUrl() string { 33 33 if c.UseTLS() { 34 34 return "https://" + c.AppviewHost 35 35 }
+23 -124
appview/db/db.go
··· 3 3 import ( 4 4 "context" 5 5 "database/sql" 6 - "fmt" 7 6 "log/slog" 8 7 "strings" 9 8 ··· 261 260 did text not null, 262 261 263 262 -- data 263 + avatar text, 264 264 description text not null, 265 265 include_bluesky integer not null default 0, 266 266 location text, ··· 1079 1079 // transfer data, constructing pull_at from pulls table 1080 1080 _, err = tx.Exec(` 1081 1081 insert into pull_submissions_new (id, pull_at, round_number, patch, created) 1082 - select 1082 + select 1083 1083 ps.id, 1084 1084 'at://' || p.owner_did || '/sh.tangled.repo.pull/' || p.rkey, 1085 1085 ps.round_number, ··· 1174 1174 return err 1175 1175 }) 1176 1176 1177 - // we cannot modify user-owned record on repository delete 1178 - orm.RunMigration(conn, logger, "remove-foreign-key-profile_pinned_repositories-and-repos", func(tx *sql.Tx) error { 1177 + orm.RunMigration(conn, logger, "add-avatar-to-profile", func(tx *sql.Tx) error { 1179 1178 _, err := tx.Exec(` 1180 - create table profile_pinned_repositories_new ( 1181 - did text not null, 1182 - 1183 - -- data 1184 - at_uri text not null, 1185 - 1186 - -- constraints 1187 - unique(did, at_uri), 1188 - foreign key (did) references profile(did) on delete cascade 1189 - ); 1190 - 1191 - insert into profile_pinned_repositories_new (did, at_uri) 1192 - select did, at_uri from profile_pinned_repositories; 1193 - 1194 - drop table profile_pinned_repositories; 1195 - alter table profile_pinned_repositories_new rename to profile_pinned_repositories; 1179 + alter table profile add column avatar text; 1196 1180 `) 1197 1181 return err 1198 1182 }) 1199 1183 1200 - // several changes here 1201 - // 1. remove autoincrement id for these tables 1202 - // 2. remove unique constraints other than (did, rkey) to handle non-unique atproto records 1203 - // 3. add generated at_uri field 1204 - // 1205 - // see comments below and commit message for details 1206 - orm.RunMigration(conn, logger, "flexible-stars-reactions-follows-public_keys", func(tx *sql.Tx) error { 1207 - // - add at_uri 1208 - // - remove unique constraint (did, subject_at) 1209 - if _, err := tx.Exec(` 1210 - create table stars_new ( 1211 - did text not null, 1212 - rkey text not null, 1213 - at_uri text generated always as ('at://' || did || '/' || 'sh.tangled.feed.star' || '/' || rkey) stored, 1184 + orm.RunMigration(conn, logger, "remove-profile-stats-column-constraint", func(tx *sql.Tx) error { 1185 + _, err := tx.Exec(` 1186 + -- create new table without the check constraint 1187 + create table profile_stats_new ( 1188 + id integer primary key autoincrement, 1189 + did text not null, 1190 + kind text not null, -- no constraint this time 1191 + foreign key (did) references profile(did) on delete cascade 1192 + ); 1214 1193 1215 - subject_at text not null, 1216 - created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), 1194 + -- copy data from old table 1195 + insert into profile_stats_new (id, did, kind) 1196 + select id, did, kind 1197 + from profile_stats; 1217 1198 1218 - unique(did, rkey) 1219 - ); 1199 + -- drop old table 1200 + drop table profile_stats; 1220 1201 1221 - insert into stars_new (did, rkey, subject_at, created) 1222 - select did, rkey, subject_at, created from stars; 1223 - 1224 - drop table stars; 1225 - alter table stars_new rename to stars; 1226 - `); err != nil { 1227 - return fmt.Errorf("migrating stars: %w", err) 1228 - } 1229 - 1230 - // - add at_uri 1231 - // - reacted_by_did -> did 1232 - // - thread_at -> subject_at 1233 - // - remove unique constraint 1234 - if _, err := tx.Exec(` 1235 - create table reactions_new ( 1236 - did text not null, 1237 - rkey text not null, 1238 - at_uri text generated always as ('at://' || did || '/' || 'sh.tangled.feed.reaction' || '/' || rkey) stored, 1239 - 1240 - subject_at text not null, 1241 - kind text not null, 1242 - created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), 1243 - 1244 - unique(did, rkey) 1245 - ); 1246 - 1247 - insert into reactions_new (did, rkey, subject_at, kind, created) 1248 - select reacted_by_did, rkey, thread_at, kind, created from reactions; 1249 - 1250 - drop table reactions; 1251 - alter table reactions_new rename to reactions; 1252 - `); err != nil { 1253 - return fmt.Errorf("migrating reactions: %w", err) 1254 - } 1255 - 1256 - // - add at_uri column 1257 - // - user_did -> did 1258 - // - followed_at -> created 1259 - // - remove unique constraint 1260 - // - remove check constraint 1261 - if _, err := tx.Exec(` 1262 - create table follows_new ( 1263 - did text not null, 1264 - rkey text not null, 1265 - at_uri text generated always as ('at://' || did || '/' || 'sh.tangled.graph.follow' || '/' || rkey) stored, 1266 - 1267 - subject_did text not null, 1268 - created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), 1269 - 1270 - unique(did, rkey) 1271 - ); 1272 - 1273 - insert into follows_new (did, rkey, subject_did, created) 1274 - select user_did, rkey, subject_did, followed_at from follows; 1275 - 1276 - drop table follows; 1277 - alter table follows_new rename to follows; 1278 - `); err != nil { 1279 - return fmt.Errorf("migrating follows: %w", err) 1280 - } 1281 - 1282 - // - add at_uri column 1283 - // - remove foreign key relationship from repos 1284 - if _, err := tx.Exec(` 1285 - create table public_keys_new ( 1286 - did text not null, 1287 - rkey text not null, 1288 - at_uri text generated always as ('at://' || did || '/' || 'sh.tangled.publicKey' || '/' || rkey) stored, 1289 - 1290 - name text not null, 1291 - key text not null, 1292 - created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), 1293 - 1294 - unique(did, rkey) 1295 - ); 1296 - 1297 - insert into public_keys_new (did, rkey, name, key, created) 1298 - select did, rkey, name, key, created from public_keys; 1299 - 1300 - drop table public_keys; 1301 - alter table public_keys_new rename to public_keys; 1302 - `); err != nil { 1303 - return fmt.Errorf("migrating public_keys: %w", err) 1304 - } 1305 - 1306 - return nil 1202 + -- rename new table 1203 + alter table profile_stats_new rename to profile_stats; 1204 + `) 1205 + return err 1307 1206 }) 1308 1207 1309 1208 return &DB{
+35 -41
appview/db/follow.go
··· 6 6 "strings" 7 7 "time" 8 8 9 - "github.com/bluesky-social/indigo/atproto/syntax" 10 9 "tangled.org/core/appview/models" 11 10 "tangled.org/core/orm" 12 11 ) 13 12 14 - func UpsertFollow(e Execer, follow models.Follow) error { 15 - _, err := e.Exec( 16 - `insert into follows (did, rkey, subject_did, created) 17 - values (?, ?, ?, ?) 18 - on conflict(did, rkey) do update set 19 - subject_did = excluded.subject_did, 20 - created = excluded.created`, 21 - follow.UserDid, 22 - follow.Rkey, 23 - follow.SubjectDid, 24 - follow.FollowedAt.Format(time.RFC3339), 25 - ) 13 + func AddFollow(e Execer, follow *models.Follow) error { 14 + query := `insert or ignore into follows (user_did, subject_did, rkey) values (?, ?, ?)` 15 + _, err := e.Exec(query, follow.UserDid, follow.SubjectDid, follow.Rkey) 26 16 return err 27 17 } 28 18 29 - // Remove a follow 30 - func DeleteFollow(e Execer, did, subjectDid syntax.DID) ([]syntax.ATURI, error) { 31 - var deleted []syntax.ATURI 32 - rows, err := e.Query( 33 - `delete from follows 34 - where did = ? and subject_did = ? 35 - returning at_uri`, 36 - did, 37 - subjectDid, 38 - ) 19 + // Get a follow record 20 + func GetFollow(e Execer, userDid, subjectDid string) (*models.Follow, error) { 21 + query := `select user_did, subject_did, followed_at, rkey from follows where user_did = ? and subject_did = ?` 22 + row := e.QueryRow(query, userDid, subjectDid) 23 + 24 + var follow models.Follow 25 + var followedAt string 26 + err := row.Scan(&follow.UserDid, &follow.SubjectDid, &followedAt, &follow.Rkey) 39 27 if err != nil { 40 - return nil, fmt.Errorf("deleting stars: %w", err) 28 + return nil, err 41 29 } 42 - defer rows.Close() 43 30 44 - for rows.Next() { 45 - var aturi syntax.ATURI 46 - if err := rows.Scan(&aturi); err != nil { 47 - return nil, fmt.Errorf("scanning at_uri: %w", err) 48 - } 49 - deleted = append(deleted, aturi) 31 + followedAtTime, err := time.Parse(time.RFC3339, followedAt) 32 + if err != nil { 33 + log.Println("unable to determine followed at time") 34 + follow.FollowedAt = time.Now() 35 + } else { 36 + follow.FollowedAt = followedAtTime 50 37 } 51 - return deleted, nil 38 + 39 + return &follow, nil 40 + } 41 + 42 + // Remove a follow 43 + func DeleteFollow(e Execer, userDid, subjectDid string) error { 44 + _, err := e.Exec(`delete from follows where user_did = ? and subject_did = ?`, userDid, subjectDid) 45 + return err 52 46 } 53 47 54 48 // Remove a follow 55 49 func DeleteFollowByRkey(e Execer, userDid, rkey string) error { 56 - _, err := e.Exec(`delete from follows where did = ? and rkey = ?`, userDid, rkey) 50 + _, err := e.Exec(`delete from follows where user_did = ? and rkey = ?`, userDid, rkey) 57 51 return err 58 52 } 59 53 ··· 62 56 err := e.QueryRow( 63 57 `SELECT 64 58 COUNT(CASE WHEN subject_did = ? THEN 1 END) AS followers, 65 - COUNT(CASE WHEN did = ? THEN 1 END) AS following 59 + COUNT(CASE WHEN user_did = ? THEN 1 END) AS following 66 60 FROM follows;`, did, did).Scan(&followers, &following) 67 61 if err != nil { 68 62 return models.FollowStats{}, err ··· 102 96 group by subject_did 103 97 ) f 104 98 full outer join ( 105 - select did as did, count(*) as following 99 + select user_did as did, count(*) as following 106 100 from follows 107 - where did in (%s) 108 - group by did 101 + where user_did in (%s) 102 + group by user_did 109 103 ) g on f.did = g.did`, 110 104 placeholderStr, placeholderStr) 111 105 ··· 162 156 } 163 157 164 158 query := fmt.Sprintf( 165 - `select did, subject_did, created, rkey 159 + `select user_did, subject_did, followed_at, rkey 166 160 from follows 167 161 %s 168 - order by created desc 162 + order by followed_at desc 169 163 %s 170 164 `, whereClause, limitClause) 171 165 ··· 204 198 } 205 199 206 200 func GetFollowing(e Execer, did string) ([]models.Follow, error) { 207 - return GetFollows(e, 0, orm.FilterEq("did", did)) 201 + return GetFollows(e, 0, orm.FilterEq("user_did", did)) 208 202 } 209 203 210 204 func getFollowStatuses(e Execer, userDid string, subjectDids []string) (map[string]models.FollowStatus, error) { ··· 245 239 query := fmt.Sprintf(` 246 240 SELECT subject_did 247 241 FROM follows 248 - WHERE did = ? AND subject_did IN (%s) 242 + WHERE user_did = ? AND subject_did IN (%s) 249 243 `, strings.Join(placeholders, ",")) 250 244 251 245 rows, err := e.Query(query, args...)
+21 -4
appview/db/profile.go
··· 131 131 } 132 132 133 133 func UpsertProfile(tx *sql.Tx, profile *models.Profile) error { 134 + defer tx.Rollback() 135 + 134 136 // update links 135 137 _, err := tx.Exec(`delete from profile_links where did = ?`, profile.Did) 136 138 if err != nil { ··· 156 158 _, err = tx.Exec( 157 159 `insert or replace into profile ( 158 160 did, 161 + avatar, 159 162 description, 160 163 include_bluesky, 161 164 location, 162 165 pronouns 163 166 ) 164 - values (?, ?, ?, ?, ?)`, 167 + values (?, ?, ?, ?, ?, ?)`, 165 168 profile.Did, 169 + profile.Avatar, 166 170 profile.Description, 167 171 includeBskyValue, 168 172 profile.Location, ··· 224 228 return err 225 229 } 226 230 } 227 - return nil 231 + 232 + return tx.Commit() 228 233 } 229 234 230 235 func GetProfiles(e Execer, filters ...orm.Filter) (map[string]*models.Profile, error) { ··· 344 349 func GetProfile(e Execer, did string) (*models.Profile, error) { 345 350 var profile models.Profile 346 351 var pronouns sql.Null[string] 352 + var avatar sql.Null[string] 347 353 348 354 profile.Did = did 349 355 350 356 includeBluesky := 0 351 357 352 358 err := e.QueryRow( 353 - `select description, include_bluesky, location, pronouns from profile where did = ?`, 359 + `select avatar, description, include_bluesky, location, pronouns from profile where did = ?`, 354 360 did, 355 - ).Scan(&profile.Description, &includeBluesky, &profile.Location, &pronouns) 361 + ).Scan(&avatar, &profile.Description, &includeBluesky, &profile.Location, &pronouns) 356 362 if err == sql.ErrNoRows { 357 363 profile := models.Profile{} 358 364 profile.Did = did ··· 369 375 370 376 if pronouns.Valid { 371 377 profile.Pronouns = pronouns.V 378 + } 379 + 380 + if avatar.Valid { 381 + profile.Avatar = avatar.V 372 382 } 373 383 374 384 rows, err := e.Query(`select link from profile_links where did = ?`, did) ··· 440 450 case models.VanityStatRepositoryCount: 441 451 query = `select count(id) from repos where did = ?` 442 452 args = append(args, did) 453 + case models.VanityStatStarCount: 454 + query = `select count(id) from stars where subject_at like 'at://' || ? || '%'` 455 + args = append(args, did) 456 + case models.VanityStatNone: 457 + return 0, nil 458 + default: 459 + return 0, fmt.Errorf("invalid vanity stat kind: %s", stat) 443 460 } 444 461 445 462 var result uint64
+7 -17
appview/db/pubkeys.go
··· 5 5 "time" 6 6 ) 7 7 8 - func UpsertPublicKey(e Execer, pubKey models.PublicKey) error { 8 + func AddPublicKey(e Execer, did, name, key, rkey string) error { 9 9 _, err := e.Exec( 10 - `insert into public_keys (did, rkey, name, key, created) 11 - values (?, ?, ?, ?, ?) 12 - on conflict(did, rkey) do update set 13 - name = excluded.name, 14 - key = excluded.key, 15 - created = excluded.created`, 16 - pubKey.Did, 17 - pubKey.Rkey, 18 - pubKey.Name, 19 - pubKey.Key, 20 - pubKey.Created.Format(time.RFC3339), 21 - ) 10 + `insert or ignore into public_keys (did, name, key, rkey) 11 + values (?, ?, ?, ?)`, 12 + did, name, key, rkey) 22 13 return err 23 14 } 24 15 25 - // for public_keys with empty rkey 26 - func DeletePublicKeyLegacy(e Execer, did, name string) error { 16 + func DeletePublicKey(e Execer, did, name, key string) error { 27 17 _, err := e.Exec(` 28 18 delete from public_keys 29 - where did = ? and name = ? and rkey = ''`, 30 - did, name) 19 + where did = ? and name = ? and key = ?`, 20 + did, name, key) 31 21 return err 32 22 } 33 23
+48 -62
appview/db/reaction.go
··· 1 1 package db 2 2 3 3 import ( 4 - "fmt" 4 + "log" 5 5 "time" 6 6 7 7 "github.com/bluesky-social/indigo/atproto/syntax" 8 8 "tangled.org/core/appview/models" 9 9 ) 10 10 11 - func UpsertReaction(e Execer, reaction models.Reaction) error { 12 - _, err := e.Exec( 13 - `insert into reactions (did, rkey, subject_at, kind, created) 14 - values (?, ?, ?, ?, ?) 15 - on conflict(did, rkey) do update set 16 - subject_at = excluded.subject_at, 17 - kind = excluded.kind, 18 - created = excluded.created`, 19 - reaction.ReactedByDid, 20 - reaction.Rkey, 21 - reaction.ThreadAt, 22 - reaction.Kind, 23 - reaction.Created.Format(time.RFC3339), 24 - ) 11 + func AddReaction(e Execer, reactedByDid string, threadAt syntax.ATURI, kind models.ReactionKind, rkey string) error { 12 + query := `insert or ignore into reactions (reacted_by_did, thread_at, kind, rkey) values (?, ?, ?, ?)` 13 + _, err := e.Exec(query, reactedByDid, threadAt, kind, rkey) 25 14 return err 26 15 } 27 16 28 - // Remove a reaction 29 - func DeleteReaction(e Execer, did syntax.DID, subjectAt syntax.ATURI, kind models.ReactionKind) ([]syntax.ATURI, error) { 30 - var deleted []syntax.ATURI 31 - rows, err := e.Query( 32 - `delete from reactions 33 - where did = ? and subject_at = ? and kind = ? 34 - returning at_uri`, 35 - did, 36 - subjectAt, 37 - kind, 38 - ) 17 + // Get a reaction record 18 + func GetReaction(e Execer, reactedByDid string, threadAt syntax.ATURI, kind models.ReactionKind) (*models.Reaction, error) { 19 + query := ` 20 + select reacted_by_did, thread_at, created, rkey 21 + from reactions 22 + where reacted_by_did = ? and thread_at = ? and kind = ?` 23 + row := e.QueryRow(query, reactedByDid, threadAt, kind) 24 + 25 + var reaction models.Reaction 26 + var created string 27 + err := row.Scan(&reaction.ReactedByDid, &reaction.ThreadAt, &created, &reaction.Rkey) 39 28 if err != nil { 40 - return nil, fmt.Errorf("deleting stars: %w", err) 29 + return nil, err 41 30 } 42 - defer rows.Close() 43 31 44 - for rows.Next() { 45 - var aturi syntax.ATURI 46 - if err := rows.Scan(&aturi); err != nil { 47 - return nil, fmt.Errorf("scanning at_uri: %w", err) 48 - } 49 - deleted = append(deleted, aturi) 32 + createdAtTime, err := time.Parse(time.RFC3339, created) 33 + if err != nil { 34 + log.Println("unable to determine followed at time") 35 + reaction.Created = time.Now() 36 + } else { 37 + reaction.Created = createdAtTime 50 38 } 51 - return deleted, nil 39 + 40 + return &reaction, nil 52 41 } 53 42 54 43 // Remove a reaction 55 - func DeleteReactionByRkey(e Execer, did string, rkey string) error { 56 - _, err := e.Exec(`delete from reactions where did = ? and rkey = ?`, did, rkey) 44 + func DeleteReaction(e Execer, reactedByDid string, threadAt syntax.ATURI, kind models.ReactionKind) error { 45 + _, err := e.Exec(`delete from reactions where reacted_by_did = ? and thread_at = ? and kind = ?`, reactedByDid, threadAt, kind) 57 46 return err 58 47 } 59 48 60 - func GetReactionCount(e Execer, subjectAt syntax.ATURI, kind models.ReactionKind) (int, error) { 49 + // Remove a reaction 50 + func DeleteReactionByRkey(e Execer, reactedByDid string, rkey string) error { 51 + _, err := e.Exec(`delete from reactions where reacted_by_did = ? and rkey = ?`, reactedByDid, rkey) 52 + return err 53 + } 54 + 55 + func GetReactionCount(e Execer, threadAt syntax.ATURI, kind models.ReactionKind) (int, error) { 61 56 count := 0 62 57 err := e.QueryRow( 63 - `select count(did) from reactions where subject_at = ? and kind = ?`, subjectAt, kind).Scan(&count) 58 + `select count(reacted_by_did) from reactions where thread_at = ? and kind = ?`, threadAt, kind).Scan(&count) 64 59 if err != nil { 65 60 return 0, err 66 61 } 67 62 return count, nil 68 63 } 69 64 70 - func GetReactionMap(e Execer, userLimit int, subjectAt syntax.ATURI) (map[models.ReactionKind]models.ReactionDisplayData, error) { 65 + func GetReactionMap(e Execer, userLimit int, threadAt syntax.ATURI) (map[models.ReactionKind]models.ReactionDisplayData, error) { 71 66 query := ` 72 - select kind, did, 67 + select kind, reacted_by_did, 73 68 row_number() over (partition by kind order by created asc) as rn, 74 69 count(*) over (partition by kind) as total 75 70 from reactions 76 - where subject_at = ? 71 + where thread_at = ? 77 72 order by kind, created asc` 78 73 79 - rows, err := e.Query(query, subjectAt) 74 + rows, err := e.Query(query, threadAt) 80 75 if err != nil { 81 76 return nil, err 82 77 } ··· 106 101 return reactionMap, rows.Err() 107 102 } 108 103 109 - func GetReactionStatus(e Execer, userDid string, threadAt syntax.ATURI, kind models.ReactionKind) (bool, error) { 110 - var exists bool 111 - err := e.QueryRow( 112 - `select exists ( 113 - select 1 from reactions 114 - where did = ? and subject_at = ? and kind = ? 115 - )`, 116 - userDid, 117 - threadAt, 118 - kind, 119 - ).Scan(&exists) 120 - return exists, err 104 + func GetReactionStatus(e Execer, userDid string, threadAt syntax.ATURI, kind models.ReactionKind) bool { 105 + if _, err := GetReaction(e, userDid, threadAt, kind); err != nil { 106 + return false 107 + } else { 108 + return true 109 + } 121 110 } 122 111 123 - func GetReactionStatusMap(e Execer, userDid string, threadAt syntax.ATURI) (map[models.ReactionKind]bool, error) { 112 + func GetReactionStatusMap(e Execer, userDid string, threadAt syntax.ATURI) map[models.ReactionKind]bool { 124 113 statusMap := map[models.ReactionKind]bool{} 125 114 for _, kind := range models.OrderedReactionKinds { 126 - reacted, err := GetReactionStatus(e, userDid, threadAt, kind) 127 - if err != nil { 128 - return nil, err 129 - } 130 - statusMap[kind] = reacted 115 + count := GetReactionStatus(e, userDid, threadAt, kind) 116 + statusMap[kind] = count 131 117 } 132 - return statusMap, nil 118 + return statusMap 133 119 }
+31 -27
appview/db/star.go
··· 4 4 "database/sql" 5 5 "errors" 6 6 "fmt" 7 + "log" 7 8 "slices" 8 9 "strings" 9 10 "time" ··· 13 14 "tangled.org/core/orm" 14 15 ) 15 16 16 - func UpsertStar(e Execer, star models.Star) error { 17 + func AddStar(e Execer, star *models.Star) error { 18 + query := `insert or ignore into stars (did, subject_at, rkey) values (?, ?, ?)` 17 19 _, err := e.Exec( 18 - `insert into stars (did, rkey, subject_at, created) 19 - values (?, ?, ?, ?) 20 - on conflict(did, rkey) do update set 21 - subject_at = excluded.subject_at, 22 - created = excluded.created`, 20 + query, 23 21 star.Did, 22 + star.RepoAt.String(), 24 23 star.Rkey, 25 - star.RepoAt, 26 - star.Created.Format(time.RFC3339), 27 24 ) 28 25 return err 29 26 } 30 27 31 - // Remove a star 32 - func DeleteStar(tx *sql.Tx, did syntax.DID, subjectAt syntax.ATURI) ([]syntax.ATURI, error) { 33 - var deleted []syntax.ATURI 34 - rows, err := tx.Query( 35 - `delete from stars 36 - where did = ? and subject_at = ? 37 - returning at_uri`, 38 - did, 39 - subjectAt, 40 - ) 28 + // Get a star record 29 + func GetStar(e Execer, did string, subjectAt syntax.ATURI) (*models.Star, error) { 30 + query := ` 31 + select did, subject_at, created, rkey 32 + from stars 33 + where did = ? and subject_at = ?` 34 + row := e.QueryRow(query, did, subjectAt) 35 + 36 + var star models.Star 37 + var created string 38 + err := row.Scan(&star.Did, &star.RepoAt, &created, &star.Rkey) 41 39 if err != nil { 42 - return nil, fmt.Errorf("deleting stars: %w", err) 40 + return nil, err 43 41 } 44 - defer rows.Close() 45 42 46 - for rows.Next() { 47 - var aturi syntax.ATURI 48 - if err := rows.Scan(&aturi); err != nil { 49 - return nil, fmt.Errorf("scanning at_uri: %w", err) 50 - } 51 - deleted = append(deleted, aturi) 43 + createdAtTime, err := time.Parse(time.RFC3339, created) 44 + if err != nil { 45 + log.Println("unable to determine followed at time") 46 + star.Created = time.Now() 47 + } else { 48 + star.Created = createdAtTime 52 49 } 53 - return deleted, nil 50 + 51 + return &star, nil 52 + } 53 + 54 + // Remove a star 55 + func DeleteStar(e Execer, did string, subjectAt syntax.ATURI) error { 56 + _, err := e.Exec(`delete from stars where did = ? and subject_at = ?`, did, subjectAt) 57 + return err 54 58 } 55 59 56 60 // Remove a star
+1 -1
appview/db/timeline.go
··· 183 183 func getTimelineFollows(e Execer, limit int, loggedInUserDid string, userIsFollowing []string) ([]models.TimelineEvent, error) { 184 184 filters := make([]orm.Filter, 0) 185 185 if userIsFollowing != nil { 186 - filters = append(filters, orm.FilterIn("did", userIsFollowing)) 186 + filters = append(filters, orm.FilterIn("user_did", userIsFollowing)) 187 187 } 188 188 189 189 follows, err := GetFollows(e, limit, filters...)
+19 -39
appview/ingester.go
··· 19 19 "tangled.org/core/appview/db" 20 20 "tangled.org/core/appview/models" 21 21 "tangled.org/core/appview/serververify" 22 + "tangled.org/core/appview/validator" 22 23 "tangled.org/core/idresolver" 23 24 "tangled.org/core/orm" 24 25 "tangled.org/core/rbac" ··· 30 31 IdResolver *idresolver.Resolver 31 32 Config *config.Config 32 33 Logger *slog.Logger 34 + Validator *validator.Validator 33 35 } 34 36 35 37 type processFunc func(ctx context.Context, e *jmodels.Event) error ··· 119 121 l.Error("invalid record", "err", err) 120 122 return err 121 123 } 122 - err = db.UpsertStar(i.Db, models.Star{ 124 + err = db.AddStar(i.Db, &models.Star{ 123 125 Did: did, 124 126 RepoAt: subjectUri, 125 127 Rkey: e.Commit.RKey, ··· 131 133 if err != nil { 132 134 return fmt.Errorf("failed to %s star record: %w", e.Commit.Operation, err) 133 135 } 134 - l.Info("processed star", "operation", e.Commit.Operation, "rkey", e.Commit.RKey) 135 136 136 137 return nil 137 138 } ··· 153 154 return err 154 155 } 155 156 156 - err = db.UpsertFollow(i.Db, models.Follow{ 157 + err = db.AddFollow(i.Db, &models.Follow{ 157 158 UserDid: did, 158 159 SubjectDid: record.Subject, 159 160 Rkey: e.Commit.RKey, ··· 165 166 if err != nil { 166 167 return fmt.Errorf("failed to %s follow record: %w", e.Commit.Operation, err) 167 168 } 168 - l.Info("processed follow", "operation", e.Commit.Operation, "rkey", e.Commit.RKey) 169 169 170 170 return nil 171 171 } ··· 187 187 l.Error("invalid record", "err", err) 188 188 return err 189 189 } 190 - pubKey, err := models.PublicKeyFromRecord(syntax.DID(did), syntax.RecordKey(e.Commit.RKey), record) 191 - if err != nil { 192 - l.Error("invalid record", "err", err) 193 - return err 194 - } 195 - if err := pubKey.Validate(); err != nil { 196 - l.Error("invalid record", "err", err) 197 - return err 198 - } 199 190 200 - err = db.UpsertPublicKey(i.Db, pubKey) 191 + name := record.Name 192 + key := record.Key 193 + err = db.AddPublicKey(i.Db, did, name, key, e.Commit.RKey) 201 194 case jmodels.CommitOperationDelete: 202 195 l.Debug("processing delete of pubkey") 203 196 err = db.DeletePublicKeyByRkey(i.Db, did, e.Commit.RKey) ··· 206 199 if err != nil { 207 200 return fmt.Errorf("failed to %s pubkey record: %w", e.Commit.Operation, err) 208 201 } 209 - l.Info("processed pubkey", "operation", e.Commit.Operation, "rkey", e.Commit.RKey) 210 202 211 203 return nil 212 204 } ··· 293 285 return err 294 286 } 295 287 288 + avatar := "" 289 + if record.Avatar != nil { 290 + avatar = record.Avatar.Ref.String() 291 + } 292 + 296 293 description := "" 297 294 if record.Description != nil { 298 295 description = *record.Description ··· 320 317 var stats [2]models.VanityStat 321 318 for i, s := range record.Stats { 322 319 if i < 2 { 323 - stats[i].Kind = models.VanityStatKind(s) 320 + stats[i].Kind = models.ParseVanityStatKind(s) 324 321 } 325 322 } 326 323 ··· 333 330 334 331 profile := models.Profile{ 335 332 Did: did, 333 + Avatar: avatar, 336 334 Description: description, 337 335 IncludeBluesky: includeBluesky, 338 336 Location: location, ··· 351 349 if err != nil { 352 350 return fmt.Errorf("failed to start transaction") 353 351 } 354 - defer tx.Rollback() 355 352 356 353 err = db.ValidateProfile(tx, &profile) 357 354 if err != nil { ··· 359 356 } 360 357 361 358 err = db.UpsertProfile(tx, &profile) 362 - if err != nil { 363 - return fmt.Errorf("upserting profile: %w", err) 364 - } 365 - 366 - err = tx.Commit() 367 359 case jmodels.CommitOperationDelete: 368 360 err = db.DeleteArtifact(i.Db, orm.FilterEq("did", did), orm.FilterEq("rkey", e.Commit.RKey)) 369 361 } ··· 621 613 622 614 string := models.StringFromRecord(did, rkey, record) 623 615 624 - if err = string.Validate(); err != nil { 616 + if err = i.Validator.ValidateString(&string); err != nil { 625 617 l.Error("invalid record", "err", err) 626 618 return err 627 619 } ··· 830 822 831 823 issue := models.IssueFromRecord(did, rkey, record) 832 824 833 - if err := issue.Validate(); err != nil { 825 + if err := i.Validator.ValidateIssue(&issue); err != nil { 834 826 return fmt.Errorf("failed to validate issue: %w", err) 835 827 } 836 828 ··· 910 902 return fmt.Errorf("failed to parse comment from record: %w", err) 911 903 } 912 904 913 - if err := comment.Validate(); err != nil { 905 + if err := i.Validator.ValidateIssueComment(comment); err != nil { 914 906 return fmt.Errorf("failed to validate comment: %w", err) 915 907 } 916 908 ··· 970 962 return fmt.Errorf("failed to parse labeldef from record: %w", err) 971 963 } 972 964 973 - if err := def.Validate(); err != nil { 965 + if err := i.Validator.ValidateLabelDefinition(def); err != nil { 974 966 return fmt.Errorf("failed to validate labeldef: %w", err) 975 967 } 976 968 ··· 1046 1038 if !ok { 1047 1039 return fmt.Errorf("failed to find label def for key: %s, expected: %q", o.OperandKey, slices.Collect(maps.Keys(actx.Defs))) 1048 1040 } 1049 - 1050 - // validate permissions: only collaborators can apply labels currently 1051 - // 1052 - // TODO: introduce a repo:triage permission 1053 - ok, err := i.Enforcer.IsPushAllowed(o.Did, repo.Knot, repo.DidSlashRepo()) 1054 - if err != nil { 1055 - return fmt.Errorf("enforcing permission: %w", err) 1056 - } 1057 - if !ok { 1058 - return fmt.Errorf("unauthorized label operation") 1059 - } 1060 - 1061 - if err := def.ValidateOperandValue(&o); err != nil { 1041 + if err := i.Validator.ValidateLabelOp(def, repo, &o); err != nil { 1062 1042 return fmt.Errorf("failed to validate labelop: %w", err) 1063 1043 } 1064 1044 }
+34 -17
appview/issues/issues.go
··· 27 27 "tangled.org/core/appview/pages/repoinfo" 28 28 "tangled.org/core/appview/pagination" 29 29 "tangled.org/core/appview/reporesolver" 30 + "tangled.org/core/appview/validator" 30 31 "tangled.org/core/idresolver" 31 32 "tangled.org/core/orm" 32 33 "tangled.org/core/rbac" ··· 44 45 config *config.Config 45 46 notifier notify.Notifier 46 47 logger *slog.Logger 48 + validator *validator.Validator 47 49 indexer *issues_indexer.Indexer 48 50 } 49 51 ··· 57 59 db *db.DB, 58 60 config *config.Config, 59 61 notifier notify.Notifier, 62 + validator *validator.Validator, 60 63 indexer *issues_indexer.Indexer, 61 64 logger *slog.Logger, 62 65 ) *Issues { ··· 71 74 config: config, 72 75 notifier: notifier, 73 76 logger: logger, 77 + validator: validator, 74 78 indexer: indexer, 75 79 } 76 80 } ··· 98 102 99 103 userReactions := map[models.ReactionKind]bool{} 100 104 if user != nil { 101 - userReactions, err = db.GetReactionStatusMap(rp.db, user.Active.Did, issue.AtUri()) 102 - if err != nil { 103 - l.Error("failed to get issue reaction status", "err", err) 104 - } 105 + userReactions = db.GetReactionStatusMap(rp.db, user.Active.Did, issue.AtUri()) 105 106 } 106 107 107 108 backlinks, err := db.GetBacklinks(rp.db, issue.AtUri()) ··· 128 129 } 129 130 130 131 rp.pages.RepoSingleIssue(w, pages.RepoSingleIssueParams{ 131 - LoggedInUser: user, 132 - RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 133 - Issue: issue, 134 - CommentList: issue.CommentList(), 135 - Backlinks: backlinks, 136 - OrderedReactionKinds: models.OrderedReactionKinds, 137 - Reactions: reactionMap, 138 - UserReacted: userReactions, 139 - LabelDefs: defs, 132 + LoggedInUser: user, 133 + RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 134 + Issue: issue, 135 + CommentList: issue.CommentList(), 136 + Backlinks: backlinks, 137 + Reactions: reactionMap, 138 + UserReacted: userReactions, 139 + LabelDefs: defs, 140 140 }) 141 141 } 142 142 ··· 165 165 newIssue.Body = r.FormValue("body") 166 166 newIssue.Mentions, newIssue.References = rp.mentionsResolver.Resolve(r.Context(), newIssue.Body) 167 167 168 - if err := newIssue.Validate(); err != nil { 168 + if err := rp.validator.ValidateIssue(newIssue); err != nil { 169 169 l.Error("validation error", "err", err) 170 170 rp.pages.Notice(w, noticeId, fmt.Sprintf("Failed to edit issue: %s", err)) 171 171 return ··· 424 424 Mentions: mentions, 425 425 References: references, 426 426 } 427 - if err = comment.Validate(); err != nil { 427 + if err = rp.validator.ValidateIssueComment(&comment); err != nil { 428 428 l.Error("failed to validate comment", "err", err) 429 429 rp.pages.Notice(w, "issue-comment", "Failed to create comment.") 430 430 return ··· 822 822 823 823 keyword := params.Get("q") 824 824 825 + repoInfo := rp.repoResolver.GetRepoInfo(r, user) 826 + 825 827 var issues []models.Issue 826 828 searchOpts := models.IssueSearchOptions{ 827 829 Keyword: keyword, ··· 838 840 l.Debug("searched issues with indexer", "count", len(res.Hits)) 839 841 totalIssues = int(res.Total) 840 842 843 + // count matching issues in the opposite state to display correct counts 844 + countRes, err := rp.indexer.Search(r.Context(), models.IssueSearchOptions{ 845 + Keyword: keyword, RepoAt: f.RepoAt().String(), IsOpen: !isOpen, 846 + Page: pagination.Page{Limit: 1}, 847 + }) 848 + if err == nil { 849 + if isOpen { 850 + repoInfo.Stats.IssueCount.Open = int(res.Total) 851 + repoInfo.Stats.IssueCount.Closed = int(countRes.Total) 852 + } else { 853 + repoInfo.Stats.IssueCount.Closed = int(res.Total) 854 + repoInfo.Stats.IssueCount.Open = int(countRes.Total) 855 + } 856 + } 857 + 841 858 issues, err = db.GetIssues( 842 859 rp.db, 843 860 orm.FilterIn("id", res.Hits), ··· 884 901 885 902 rp.pages.RepoIssues(w, pages.RepoIssuesParams{ 886 903 LoggedInUser: rp.oauth.GetMultiAccountUser(r), 887 - RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 904 + RepoInfo: repoInfo, 888 905 Issues: issues, 889 906 IssueCount: totalIssues, 890 907 LabelDefs: defs, ··· 927 944 Repo: f, 928 945 } 929 946 930 - if err := issue.Validate(); err != nil { 947 + if err := rp.validator.ValidateIssue(issue); err != nil { 931 948 l.Error("validation error", "err", err) 932 949 rp.pages.Notice(w, "issues", fmt.Sprintf("Failed to create issue: %s", err)) 933 950 return
+25 -17
appview/issues/opengraph.go
··· 124 124 } 125 125 126 126 // Split stats area: left side for status/comments (80%), right side for dolly (20%) 127 - statusCommentsArea, dollyArea := statsArea.Split(true, 80) 127 + statusArea, dollyArea := statsArea.Split(true, 80) 128 128 129 129 // Draw status and comment count in status/comments area 130 - statsBounds := statusCommentsArea.Img.Bounds() 130 + statsBounds := statusArea.Img.Bounds() 131 131 statsX := statsBounds.Min.X + 60 // left padding 132 132 statsY := statsBounds.Min.Y 133 133 ··· 140 140 // Draw status (open/closed) with colored icon and text 141 141 var statusIcon string 142 142 var statusText string 143 - var statusBgColor color.RGBA 143 + var statusColor color.RGBA 144 144 145 145 if issue.Open { 146 146 statusIcon = "circle-dot" 147 147 statusText = "open" 148 - statusBgColor = color.RGBA{34, 139, 34, 255} // green 148 + statusColor = color.RGBA{34, 139, 34, 255} // green 149 149 } else { 150 150 statusIcon = "ban" 151 151 statusText = "closed" 152 - statusBgColor = color.RGBA{52, 58, 64, 255} // dark gray 152 + statusColor = color.RGBA{52, 58, 64, 255} // dark gray 153 153 } 154 154 155 - badgeIconSize := 36 155 + statusTextWidth := statusArea.TextWidth(statusText, textSize) 156 + badgePadding := 12 157 + badgeHeight := int(textSize) + (badgePadding * 2) 158 + badgeWidth := iconSize + badgePadding + statusTextWidth + (badgePadding * 2) 159 + cornerRadius := 8 160 + badgeX := 60 161 + badgeY := 0 156 162 157 - // Draw icon with status color (no background) 158 - err = statusCommentsArea.DrawLucideIcon(statusIcon, statsX, statsY+iconBaselineOffset-badgeIconSize/2+5, badgeIconSize, statusBgColor) 163 + statusArea.DrawRoundedRect(badgeX, badgeY, badgeWidth, badgeHeight, cornerRadius, statusColor) 164 + 165 + whiteColor := color.RGBA{255, 255, 255, 255} 166 + iconX := statsX + badgePadding 167 + iconY := statsY + (badgeHeight-iconSize)/2 168 + err = statusArea.DrawLucideIcon(statusIcon, iconX, iconY, iconSize, whiteColor) 159 169 if err != nil { 160 170 log.Printf("failed to draw status icon: %v", err) 161 171 } 162 172 163 - // Draw text with status color (no background) 164 - textX := statsX + badgeIconSize + 12 165 - badgeTextSize := 32.0 166 - err = statusCommentsArea.DrawTextAt(statusText, textX, statsY+iconBaselineOffset, statusBgColor, badgeTextSize, ogcard.Middle, ogcard.Left) 173 + textX := statsX + badgePadding + iconSize + badgePadding 174 + textY := statsY + (badgeHeight-int(textSize))/2 - 5 175 + err = statusArea.DrawTextAt(statusText, textX, textY, whiteColor, textSize, ogcard.Top, ogcard.Left) 167 176 if err != nil { 168 177 log.Printf("failed to draw status text: %v", err) 169 178 } 170 179 171 - statusTextWidth := len(statusText) * 20 172 - currentX := statsX + badgeIconSize + 12 + statusTextWidth + 50 180 + currentX := statsX + badgeWidth + 50 173 181 174 182 // Draw comment count 175 - err = statusCommentsArea.DrawLucideIcon("message-square", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor) 183 + err = statusArea.DrawLucideIcon("message-square", currentX, iconY, iconSize, iconColor) 176 184 if err != nil { 177 185 log.Printf("failed to draw comment icon: %v", err) 178 186 } ··· 182 190 if commentCount == 1 { 183 191 commentText = "1 comment" 184 192 } 185 - err = statusCommentsArea.DrawTextAt(commentText, currentX, statsY+iconBaselineOffset, iconColor, textSize, ogcard.Middle, ogcard.Left) 193 + err = statusArea.DrawTextAt(commentText, currentX, textY, iconColor, textSize, ogcard.Top, ogcard.Left) 186 194 if err != nil { 187 195 log.Printf("failed to draw comment text: %v", err) 188 196 } ··· 205 213 openedDate := issue.Created.Format("Jan 2, 2006") 206 214 metaText := fmt.Sprintf("opened by %s ยท %s", authorHandle, openedDate) 207 215 208 - err = statusCommentsArea.DrawTextAt(metaText, statsX, labelY, iconColor, labelSize, ogcard.Top, ogcard.Left) 216 + err = statusArea.DrawTextAt(metaText, statsX, labelY, iconColor, labelSize, ogcard.Top, ogcard.Left) 209 217 if err != nil { 210 218 log.Printf("failed to draw metadata: %v", err) 211 219 }
-17
appview/knots/knots.go
··· 40 40 Knotstream *eventconsumer.Consumer 41 41 } 42 42 43 - type tab = map[string]any 44 - 45 - var ( 46 - knotsTabs []tab = []tab{ 47 - {"Name": "profile", "Icon": "user"}, 48 - {"Name": "keys", "Icon": "key"}, 49 - {"Name": "emails", "Icon": "mail"}, 50 - {"Name": "notifications", "Icon": "bell"}, 51 - {"Name": "knots", "Icon": "volleyball"}, 52 - {"Name": "spindles", "Icon": "spool"}, 53 - } 54 - ) 55 - 56 43 func (k *Knots) Router() http.Handler { 57 44 r := chi.NewRouter() 58 45 ··· 84 71 k.Pages.Knots(w, pages.KnotsParams{ 85 72 LoggedInUser: user, 86 73 Registrations: registrations, 87 - Tabs: knotsTabs, 88 - Tab: "knots", 89 74 }) 90 75 } 91 76 ··· 148 133 Members: members, 149 134 Repos: repoMap, 150 135 IsOwner: true, 151 - Tabs: knotsTabs, 152 - Tab: "knots", 153 136 }) 154 137 } 155 138
+15 -27
appview/labels/labels.go
··· 15 15 "tangled.org/core/appview/models" 16 16 "tangled.org/core/appview/oauth" 17 17 "tangled.org/core/appview/pages" 18 + "tangled.org/core/appview/validator" 18 19 "tangled.org/core/orm" 19 20 "tangled.org/core/rbac" 20 21 "tangled.org/core/tid" ··· 27 28 ) 28 29 29 30 type Labels struct { 30 - oauth *oauth.OAuth 31 - pages *pages.Pages 32 - db *db.DB 33 - logger *slog.Logger 34 - enforcer *rbac.Enforcer 31 + oauth *oauth.OAuth 32 + pages *pages.Pages 33 + db *db.DB 34 + logger *slog.Logger 35 + validator *validator.Validator 36 + enforcer *rbac.Enforcer 35 37 } 36 38 37 39 func New( 38 40 oauth *oauth.OAuth, 39 41 pages *pages.Pages, 40 42 db *db.DB, 43 + validator *validator.Validator, 41 44 enforcer *rbac.Enforcer, 42 45 logger *slog.Logger, 43 46 ) *Labels { 44 47 return &Labels{ 45 - oauth: oauth, 46 - pages: pages, 47 - db: db, 48 - logger: logger, 49 - enforcer: enforcer, 48 + oauth: oauth, 49 + pages: pages, 50 + db: db, 51 + logger: logger, 52 + validator: validator, 53 + enforcer: enforcer, 50 54 } 51 55 } 52 56 ··· 159 163 160 164 for i := range labelOps { 161 165 def := actx.Defs[labelOps[i].OperandKey] 162 - op := labelOps[i] 163 - 164 - // validate permissions: only collaborators can apply labels currently 165 - // 166 - // TODO: introduce a repo:triage permission 167 - ok, err := l.enforcer.IsPushAllowed(op.Did, repo.Knot, repo.DidSlashRepo()) 168 - if err != nil { 169 - fail("Failed to enforce permissions. Please try again later", fmt.Errorf("enforcing permission: %w", err)) 170 - return 171 - } 172 - if !ok { 173 - fail("Unauthorized label operation", fmt.Errorf("unauthorized label operation")) 174 - return 175 - } 176 - 177 - if err := def.ValidateOperandValue(&op); err != nil { 166 + if err := l.validator.ValidateLabelOp(def, repo, &labelOps[i]); err != nil { 178 167 fail(fmt.Sprintf("Invalid form data: %s", err), err) 179 168 return 180 169 } 181 - labelOps[i] = op 182 170 } 183 171 184 172 // reduce the opset
-9
appview/models/follow.go
··· 2 2 3 3 import ( 4 4 "time" 5 - 6 - "tangled.org/core/api/tangled" 7 5 ) 8 6 9 7 type Follow struct { ··· 11 9 SubjectDid string 12 10 FollowedAt time.Time 13 11 Rkey string 14 - } 15 - 16 - func (f *Follow) AsRecord() tangled.GraphFollow { 17 - return tangled.GraphFollow{ 18 - Subject: f.SubjectDid, 19 - CreatedAt: f.FollowedAt.Format(time.RFC3339), 20 - } 21 12 } 22 13 23 14 type FollowStats struct {
-32
appview/models/issue.go
··· 3 3 import ( 4 4 "fmt" 5 5 "sort" 6 - "strings" 7 6 "time" 8 7 9 8 "github.com/bluesky-social/indigo/atproto/syntax" 10 9 "tangled.org/core/api/tangled" 11 - "tangled.org/core/appview/pages/markup/sanitizer" 12 10 ) 13 11 14 12 type Issue struct { ··· 61 59 return "open" 62 60 } 63 61 return "closed" 64 - } 65 - 66 - var _ Validator = new(Issue) 67 - 68 - func (i *Issue) Validate() error { 69 - if i.Title == "" { 70 - return fmt.Errorf("issue title is empty") 71 - } 72 - if i.Body == "" { 73 - return fmt.Errorf("issue body is empty") 74 - } 75 - 76 - if st := strings.TrimSpace(sanitizer.SanitizeDescription(i.Title)); st == "" { 77 - return fmt.Errorf("title is empty after HTML sanitization") 78 - } 79 - 80 - if st := strings.TrimSpace(sanitizer.SanitizeDefault(i.Body)); st == "" { 81 - return fmt.Errorf("body is empty after HTML sanitization") 82 - } 83 - return nil 84 62 } 85 63 86 64 type CommentListItem struct { ··· 237 215 238 216 func (i *IssueComment) IsReply() bool { 239 217 return i.ReplyTo != nil 240 - } 241 - 242 - var _ Validator = new(IssueComment) 243 - 244 - func (i *IssueComment) Validate() error { 245 - if sb := strings.TrimSpace(sanitizer.SanitizeDefault(i.Body)); sb == "" { 246 - return fmt.Errorf("body is empty after HTML sanitization") 247 - } 248 - 249 - return nil 250 218 } 251 219 252 220 func IssueCommentFromRecord(did, rkey string, record tangled.RepoIssueComment) (*IssueComment, error) {
+4 -183
appview/models/label.go
··· 7 7 "encoding/json" 8 8 "errors" 9 9 "fmt" 10 - "regexp" 11 10 "slices" 12 - "strings" 13 11 "time" 14 12 15 13 "github.com/bluesky-social/indigo/api/atproto" ··· 122 120 } 123 121 } 124 122 125 - var ( 126 - // Label name should be alphanumeric with hyphens/underscores, but not start/end with them 127 - labelNameRegex = regexp.MustCompile(`^[a-zA-Z0-9]([a-zA-Z0-9_-]*[a-zA-Z0-9])?$`) 128 - // Color should be a valid hex color 129 - colorRegex = regexp.MustCompile(`^#[a-fA-F0-9]{6}$`) 130 - // You can only label issues and pulls presently 131 - validScopes = []string{tangled.RepoIssueNSID, tangled.RepoPullNSID} 132 - ) 133 - 134 - var _ Validator = new(LabelDefinition) 135 - 136 - func (l *LabelDefinition) Validate() error { 137 - if l.Name == "" { 138 - return fmt.Errorf("label name is empty") 139 - } 140 - if len(l.Name) > 40 { 141 - return fmt.Errorf("label name too long (max 40 graphemes)") 142 - } 143 - if len(l.Name) < 1 { 144 - return fmt.Errorf("label name too short (min 1 grapheme)") 145 - } 146 - if !labelNameRegex.MatchString(l.Name) { 147 - return fmt.Errorf("label name contains invalid characters (use only letters, numbers, hyphens, and underscores)") 148 - } 149 - 150 - if !l.ValueType.IsConcreteType() { 151 - return fmt.Errorf("invalid value type: %q (must be one of: null, boolean, integer, string)", l.ValueType.Type) 152 - } 153 - 154 - // null type checks: cannot be enums, multiple or explicit format 155 - if l.ValueType.IsNull() && l.ValueType.IsEnum() { 156 - return fmt.Errorf("null type cannot be used in conjunction with enum type") 157 - } 158 - if l.ValueType.IsNull() && l.Multiple { 159 - return fmt.Errorf("null type labels cannot be multiple") 160 - } 161 - if l.ValueType.IsNull() && !l.ValueType.IsAnyFormat() { 162 - return fmt.Errorf("format cannot be used in conjunction with null type") 163 - } 164 - 165 - // format checks: cannot be used with enum, or integers 166 - if !l.ValueType.IsAnyFormat() && l.ValueType.IsEnum() { 167 - return fmt.Errorf("enum types cannot be used in conjunction with format specification") 168 - } 169 - 170 - if !l.ValueType.IsAnyFormat() && !l.ValueType.IsString() { 171 - return fmt.Errorf("format specifications are only permitted on string types") 172 - } 173 - 174 - // validate scope (nsid format) 175 - if l.Scope == nil { 176 - return fmt.Errorf("scope is required") 177 - } 178 - for _, s := range l.Scope { 179 - if _, err := syntax.ParseNSID(s); err != nil { 180 - return fmt.Errorf("failed to parse scope: %w", err) 181 - } 182 - if !slices.Contains(validScopes, s) { 183 - return fmt.Errorf("invalid scope: scope must be present in %q", validScopes) 184 - } 185 - } 186 - 187 - // validate color if provided 188 - if l.Color != nil { 189 - color := strings.TrimSpace(*l.Color) 190 - if color == "" { 191 - // empty color is fine, set to nil 192 - l.Color = nil 193 - } else { 194 - if !colorRegex.MatchString(color) { 195 - return fmt.Errorf("color must be a valid hex color (e.g. #79FFE1 or #000)") 196 - } 197 - // expand 3-digit hex to 6-digit hex 198 - if len(color) == 4 { // #ABC 199 - color = fmt.Sprintf("#%c%c%c%c%c%c", color[1], color[1], color[2], color[2], color[3], color[3]) 200 - } 201 - // convert to uppercase for consistency 202 - color = strings.ToUpper(color) 203 - l.Color = &color 204 - } 205 - } 206 - 207 - return nil 208 - } 209 - 210 - // ValidateOperandValue validates the label operation operand value based on 211 - // label definition. 212 - // 213 - // NOTE: This can modify the [LabelOp] 214 - func (def *LabelDefinition) ValidateOperandValue(op *LabelOp) error { 215 - expectedKey := def.AtUri().String() 216 - if op.OperandKey != def.AtUri().String() { 217 - return fmt.Errorf("operand key %q does not match label definition URI %q", op.OperandKey, expectedKey) 218 - } 219 - 220 - valueType := def.ValueType 221 - 222 - // this is permitted, it "unsets" a label 223 - if op.OperandValue == "" { 224 - op.Operation = LabelOperationDel 225 - return nil 226 - } 227 - 228 - switch valueType.Type { 229 - case ConcreteTypeNull: 230 - // For null type, value should be empty 231 - if op.OperandValue != "null" { 232 - return fmt.Errorf("null type requires empty value, got %q", op.OperandValue) 233 - } 234 - 235 - case ConcreteTypeString: 236 - // For string type, validate enum constraints if present 237 - if valueType.IsEnum() { 238 - if !slices.Contains(valueType.Enum, op.OperandValue) { 239 - return fmt.Errorf("value %q is not in allowed enum values %v", op.OperandValue, valueType.Enum) 240 - } 241 - } 242 - 243 - switch valueType.Format { 244 - case ValueTypeFormatDid: 245 - if _, err := syntax.ParseDID(op.OperandValue); err != nil { 246 - return fmt.Errorf("failed to resolve did/handle: %w", err) 247 - } 248 - case ValueTypeFormatAny, "": 249 - default: 250 - return fmt.Errorf("unsupported format constraint: %q", valueType.Format) 251 - } 252 - 253 - case ConcreteTypeInt: 254 - if op.OperandValue == "" { 255 - return fmt.Errorf("integer type requires non-empty value") 256 - } 257 - if _, err := fmt.Sscanf(op.OperandValue, "%d", new(int)); err != nil { 258 - return fmt.Errorf("value %q is not a valid integer", op.OperandValue) 259 - } 260 - 261 - if valueType.IsEnum() { 262 - if !slices.Contains(valueType.Enum, op.OperandValue) { 263 - return fmt.Errorf("value %q is not in allowed enum values %v", op.OperandValue, valueType.Enum) 264 - } 265 - } 266 - 267 - case ConcreteTypeBool: 268 - if op.OperandValue != "true" && op.OperandValue != "false" { 269 - return fmt.Errorf("boolean type requires value to be 'true' or 'false', got %q", op.OperandValue) 270 - } 271 - 272 - // validate enum constraints if present (though uncommon for booleans) 273 - if valueType.IsEnum() { 274 - if !slices.Contains(valueType.Enum, op.OperandValue) { 275 - return fmt.Errorf("value %q is not in allowed enum values %v", op.OperandValue, valueType.Enum) 276 - } 277 - } 278 - 279 - default: 280 - return fmt.Errorf("unsupported value type: %q", valueType.Type) 281 - } 282 - 283 - return nil 284 - } 285 - 286 123 // random color for a given seed 287 124 func randomColor(seed string) string { 288 125 hash := sha1.Sum([]byte(seed)) ··· 294 131 return fmt.Sprintf("#%s%s%s", r, g, b) 295 132 } 296 133 297 - func (l LabelDefinition) GetColor() string { 298 - if l.Color == nil { 299 - seed := fmt.Sprintf("%d:%s:%s", l.Id, l.Did, l.Rkey) 134 + func (ld LabelDefinition) GetColor() string { 135 + if ld.Color == nil { 136 + seed := fmt.Sprintf("%d:%s:%s", ld.Id, ld.Did, ld.Rkey) 300 137 color := randomColor(seed) 301 138 return color 302 139 } 303 140 304 - return *l.Color 141 + return *ld.Color 305 142 } 306 143 307 144 func LabelDefinitionFromRecord(did, rkey string, record tangled.LabelDefinition) (*LabelDefinition, error) { ··· 366 203 367 204 // otherwise, createdat is in the future relative to indexedat -> use indexedat 368 205 return indexedAt 369 - } 370 - 371 - var _ Validator = new(LabelOp) 372 - 373 - func (l *LabelOp) Validate() error { 374 - if _, err := syntax.ParseATURI(string(l.Subject)); err != nil { 375 - return fmt.Errorf("invalid subject URI: %w", err) 376 - } 377 - if l.Operation != LabelOperationAdd && l.Operation != LabelOperationDel { 378 - return fmt.Errorf("invalid operation: %q (must be 'add' or 'del')", l.Operation) 379 - } 380 - // Validate performed time is not zero/invalid 381 - if l.PerformedAt.IsZero() { 382 - return fmt.Errorf("performed_at timestamp is required") 383 - } 384 - return nil 385 206 } 386 207 387 208 type LabelOperation string
+28 -1
appview/models/profile.go
··· 13 13 Did string 14 14 15 15 // data 16 + Avatar string // CID of the avatar blob 16 17 Description string 17 18 IncludeBluesky bool 18 19 Location string ··· 58 59 VanityStatOpenIssueCount VanityStatKind = "open-issue-count" 59 60 VanityStatClosedIssueCount VanityStatKind = "closed-issue-count" 60 61 VanityStatRepositoryCount VanityStatKind = "repository-count" 62 + VanityStatStarCount VanityStatKind = "star-count" 63 + VanityStatNone VanityStatKind = "" 61 64 ) 62 65 66 + func ParseVanityStatKind(s string) VanityStatKind { 67 + switch s { 68 + case "merged-pull-request-count": 69 + return VanityStatMergedPRCount 70 + case "closed-pull-request-count": 71 + return VanityStatClosedPRCount 72 + case "open-pull-request-count": 73 + return VanityStatOpenPRCount 74 + case "open-issue-count": 75 + return VanityStatOpenIssueCount 76 + case "closed-issue-count": 77 + return VanityStatClosedIssueCount 78 + case "repository-count": 79 + return VanityStatRepositoryCount 80 + case "star-count": 81 + return VanityStatStarCount 82 + default: 83 + return VanityStatNone 84 + } 85 + } 86 + 63 87 func (v VanityStatKind) String() string { 64 88 switch v { 65 89 case VanityStatMergedPRCount: ··· 74 98 return "Closed Issues" 75 99 case VanityStatRepositoryCount: 76 100 return "Repositories" 101 + case VanityStatStarCount: 102 + return "Stars Received" 103 + default: 104 + return "" 77 105 } 78 - return "" 79 106 } 80 107 81 108 type VanityStat struct {
-38
appview/models/pubkey.go
··· 2 2 3 3 import ( 4 4 "encoding/json" 5 - "fmt" 6 5 "time" 7 - 8 - "github.com/bluesky-social/indigo/atproto/syntax" 9 - "github.com/gliderlabs/ssh" 10 - "tangled.org/core/api/tangled" 11 6 ) 12 7 13 8 type PublicKey struct { ··· 28 23 Alias: (*Alias)(&p), 29 24 }) 30 25 } 31 - 32 - func (p *PublicKey) AsRecord() tangled.PublicKey { 33 - return tangled.PublicKey{ 34 - Name: p.Name, 35 - Key: p.Key, 36 - CreatedAt: p.Created.Format(time.RFC3339), 37 - } 38 - } 39 - 40 - var _ Validator = new(PublicKey) 41 - 42 - func (p *PublicKey) Validate() error { 43 - if _, _, _, _, err := ssh.ParseAuthorizedKey([]byte(p.Key)); err != nil { 44 - return fmt.Errorf("invalid ssh key format: %w", err) 45 - } 46 - 47 - return nil 48 - } 49 - 50 - func PublicKeyFromRecord(did syntax.DID, rkey syntax.RecordKey, record tangled.PublicKey) (PublicKey, error) { 51 - created, err := time.Parse(time.RFC3339, record.CreatedAt) 52 - if err != nil { 53 - return PublicKey{}, fmt.Errorf("invalid time format '%s'", record.CreatedAt) 54 - } 55 - 56 - return PublicKey{ 57 - Did: did.String(), 58 - Rkey: rkey.String(), 59 - Name: record.Name, 60 - Key: record.Key, 61 - Created: &created, 62 - }, nil 63 - }
-9
appview/models/reaction.go
··· 4 4 "time" 5 5 6 6 "github.com/bluesky-social/indigo/atproto/syntax" 7 - "tangled.org/core/api/tangled" 8 7 ) 9 8 10 9 type ReactionKind string ··· 55 54 Created time.Time 56 55 Rkey string 57 56 Kind ReactionKind 58 - } 59 - 60 - func (r *Reaction) AsRecord() tangled.FeedReaction { 61 - return tangled.FeedReaction{ 62 - Subject: r.ThreadAt.String(), 63 - Reaction: r.Kind.String(), 64 - CreatedAt: r.Created.Format(time.RFC3339), 65 - } 66 57 } 67 58 68 59 type ReactionDisplayData struct {
+4 -1
appview/models/repo.go
··· 130 130 131 131 // current display mode 132 132 ShowingRendered bool // currently in rendered mode 133 - ShowingText bool // currently in text/code mode 134 133 135 134 // content type flags 136 135 ContentType BlobContentType ··· 151 150 // no view available, only raw 152 151 return !(b.HasRenderedView || b.HasTextView) 153 152 } 153 + 154 + func (b BlobView) ShowingText() bool { 155 + return !b.ShowingRendered 156 + }
-8
appview/models/star.go
··· 4 4 "time" 5 5 6 6 "github.com/bluesky-social/indigo/atproto/syntax" 7 - "tangled.org/core/api/tangled" 8 7 ) 9 8 10 9 type Star struct { ··· 12 11 RepoAt syntax.ATURI 13 12 Created time.Time 14 13 Rkey string 15 - } 16 - 17 - func (s *Star) AsRecord() tangled.FeedStar { 18 - return tangled.FeedStar{ 19 - Subject: s.RepoAt.String(), 20 - CreatedAt: s.Created.Format(time.RFC3339), 21 - } 22 14 } 23 15 24 16 // RepoStar is used for reverse mapping to repos
-21
appview/models/string.go
··· 2 2 3 3 import ( 4 4 "bytes" 5 - "errors" 6 5 "fmt" 7 6 "io" 8 7 "strings" 9 8 "time" 10 - "unicode/utf8" 11 9 12 10 "github.com/bluesky-social/indigo/atproto/syntax" 13 11 "tangled.org/core/api/tangled" ··· 35 33 Contents: s.Contents, 36 34 CreatedAt: s.Created.Format(time.RFC3339), 37 35 } 38 - } 39 - 40 - var _ Validator = new(String) 41 - 42 - func (s *String) Validate() error { 43 - var err error 44 - if utf8.RuneCountInString(s.Filename) > 140 { 45 - err = errors.Join(err, fmt.Errorf("filename too long")) 46 - } 47 - 48 - if utf8.RuneCountInString(s.Description) > 280 { 49 - err = errors.Join(err, fmt.Errorf("description too long")) 50 - } 51 - 52 - if len(s.Contents) == 0 { 53 - err = errors.Join(err, fmt.Errorf("contents is empty")) 54 - } 55 - 56 - return err 57 36 } 58 37 59 38 func StringFromRecord(did, rkey string, record tangled.String) String {
-6
appview/models/validator.go
··· 1 - package models 2 - 3 - type Validator interface { 4 - // Validate checks the object and returns any error. 5 - Validate() error 6 - }
+42 -14
appview/notify/db/db.go
··· 2 2 3 3 import ( 4 4 "context" 5 - "log" 6 5 "slices" 7 6 8 7 "github.com/bluesky-social/indigo/atproto/syntax" ··· 11 10 "tangled.org/core/appview/models" 12 11 "tangled.org/core/appview/notify" 13 12 "tangled.org/core/idresolver" 13 + "tangled.org/core/log" 14 14 "tangled.org/core/orm" 15 15 "tangled.org/core/sets" 16 16 ) ··· 38 38 } 39 39 40 40 func (n *databaseNotifier) NewStar(ctx context.Context, star *models.Star) { 41 + l := log.FromContext(ctx) 42 + 41 43 if star.RepoAt.Collection().String() != tangled.RepoNSID { 42 44 // skip string stars for now 43 45 return ··· 45 47 var err error 46 48 repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", string(star.RepoAt))) 47 49 if err != nil { 48 - log.Printf("NewStar: failed to get repos: %v", err) 50 + l.Error("failed to get repos", "err", err) 49 51 return 50 52 } 51 53 ··· 59 61 var pullId *int64 60 62 61 63 n.notifyEvent( 64 + ctx, 62 65 actorDid, 63 66 recipients, 64 67 eventType, ··· 75 78 } 76 79 77 80 func (n *databaseNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) { 81 + l := log.FromContext(ctx) 82 + 78 83 collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", issue.Repo.RepoAt())) 79 84 if err != nil { 80 - log.Printf("failed to fetch collaborators: %v", err) 85 + l.Error("failed to fetch collaborators", "err", err) 81 86 return 82 87 } 83 88 ··· 101 106 var pullId *int64 102 107 103 108 n.notifyEvent( 109 + ctx, 104 110 actorDid, 105 111 recipients, 106 112 models.NotificationTypeIssueCreated, ··· 111 117 pullId, 112 118 ) 113 119 n.notifyEvent( 120 + ctx, 114 121 actorDid, 115 122 sets.Collect(slices.Values(mentions)), 116 123 models.NotificationTypeUserMentioned, ··· 123 130 } 124 131 125 132 func (n *databaseNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) { 133 + l := log.FromContext(ctx) 134 + 126 135 issues, err := db.GetIssues(n.db, orm.FilterEq("at_uri", comment.IssueAt)) 127 136 if err != nil { 128 - log.Printf("NewIssueComment: failed to get issues: %v", err) 137 + l.Error("failed to get issues", "err", err) 129 138 return 130 139 } 131 140 if len(issues) == 0 { 132 - log.Printf("NewIssueComment: no issue found for %s", comment.IssueAt) 141 + l.Error("no issue found for", "err", comment.IssueAt) 133 142 return 134 143 } 135 144 issue := issues[0] ··· 170 179 var pullId *int64 171 180 172 181 n.notifyEvent( 182 + ctx, 173 183 actorDid, 174 184 recipients, 175 185 models.NotificationTypeIssueCommented, ··· 180 190 pullId, 181 191 ) 182 192 n.notifyEvent( 193 + ctx, 183 194 actorDid, 184 195 sets.Collect(slices.Values(mentions)), 185 196 models.NotificationTypeUserMentioned, ··· 204 215 var repoId, issueId, pullId *int64 205 216 206 217 n.notifyEvent( 218 + ctx, 207 219 actorDid, 208 220 recipients, 209 221 eventType, ··· 220 232 } 221 233 222 234 func (n *databaseNotifier) NewPull(ctx context.Context, pull *models.Pull) { 235 + l := log.FromContext(ctx) 236 + 223 237 repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", string(pull.RepoAt))) 224 238 if err != nil { 225 - log.Printf("NewPull: failed to get repos: %v", err) 239 + l.Error("failed to get repos", "err", err) 226 240 return 227 241 } 228 242 collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", repo.RepoAt())) 229 243 if err != nil { 230 - log.Printf("failed to fetch collaborators: %v", err) 244 + l.Error("failed to fetch collaborators", "err", err) 231 245 return 232 246 } 233 247 ··· 249 263 pullId := &p 250 264 251 265 n.notifyEvent( 266 + ctx, 252 267 actorDid, 253 268 recipients, 254 269 eventType, ··· 261 276 } 262 277 263 278 func (n *databaseNotifier) NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID) { 279 + l := log.FromContext(ctx) 280 + 264 281 pull, err := db.GetPull(n.db, 265 282 syntax.ATURI(comment.RepoAt), 266 283 comment.PullId, 267 284 ) 268 285 if err != nil { 269 - log.Printf("NewPullComment: failed to get pulls: %v", err) 286 + l.Error("failed to get pulls", "err", err) 270 287 return 271 288 } 272 289 273 290 repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", comment.RepoAt)) 274 291 if err != nil { 275 - log.Printf("NewPullComment: failed to get repos: %v", err) 292 + l.Error("failed to get repos", "err", err) 276 293 return 277 294 } 278 295 ··· 298 315 pullId := &p 299 316 300 317 n.notifyEvent( 318 + ctx, 301 319 actorDid, 302 320 recipients, 303 321 eventType, ··· 308 326 pullId, 309 327 ) 310 328 n.notifyEvent( 329 + ctx, 311 330 actorDid, 312 331 sets.Collect(slices.Values(mentions)), 313 332 models.NotificationTypeUserMentioned, ··· 336 355 } 337 356 338 357 func (n *databaseNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) { 358 + l := log.FromContext(ctx) 359 + 339 360 collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", issue.Repo.RepoAt())) 340 361 if err != nil { 341 - log.Printf("failed to fetch collaborators: %v", err) 362 + l.Error("failed to fetch collaborators", "err", err) 342 363 return 343 364 } 344 365 ··· 368 389 } 369 390 370 391 n.notifyEvent( 392 + ctx, 371 393 actor, 372 394 recipients, 373 395 eventType, ··· 380 402 } 381 403 382 404 func (n *databaseNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) { 405 + l := log.FromContext(ctx) 406 + 383 407 // Get repo details 384 408 repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", string(pull.RepoAt))) 385 409 if err != nil { 386 - log.Printf("NewPullState: failed to get repos: %v", err) 410 + l.Error("failed to get repos", "err", err) 387 411 return 388 412 } 389 413 390 414 collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", repo.RepoAt())) 391 415 if err != nil { 392 - log.Printf("failed to fetch collaborators: %v", err) 416 + l.Error("failed to fetch collaborators", "err", err) 393 417 return 394 418 } 395 419 ··· 417 441 case models.PullMerged: 418 442 eventType = models.NotificationTypePullMerged 419 443 default: 420 - log.Println("NewPullState: unexpected new PR state:", pull.State) 444 + l.Error("unexpected new PR state", "state", pull.State) 421 445 return 422 446 } 423 447 p := int64(pull.ID) 424 448 pullId := &p 425 449 426 450 n.notifyEvent( 451 + ctx, 427 452 actor, 428 453 recipients, 429 454 eventType, ··· 436 461 } 437 462 438 463 func (n *databaseNotifier) notifyEvent( 464 + ctx context.Context, 439 465 actorDid syntax.DID, 440 466 recipients sets.Set[syntax.DID], 441 467 eventType models.NotificationType, ··· 445 471 issueId *int64, 446 472 pullId *int64, 447 473 ) { 474 + l := log.FromContext(ctx) 475 + 448 476 // if the user is attempting to mention >maxMentions users, this is probably spam, do not mention anybody 449 477 if eventType == models.NotificationTypeUserMentioned && recipients.Len() > maxMentions { 450 478 return ··· 494 522 } 495 523 496 524 if err := db.CreateNotification(tx, notif); err != nil { 497 - log.Printf("notifyEvent: failed to create notification for %s: %v", recipientDid, err) 525 + l.Error("failed to create notification", "recipientDid", recipientDid, "err", err) 498 526 } 499 527 } 500 528
+105
appview/notify/logging_notifier.go
··· 1 + package notify 2 + 3 + import ( 4 + "context" 5 + "log/slog" 6 + 7 + "tangled.org/core/appview/models" 8 + tlog "tangled.org/core/log" 9 + 10 + "github.com/bluesky-social/indigo/atproto/syntax" 11 + ) 12 + 13 + type loggingNotifier struct { 14 + inner Notifier 15 + logger *slog.Logger 16 + } 17 + 18 + func NewLoggingNotifier(inner Notifier, logger *slog.Logger) Notifier { 19 + return &loggingNotifier{ 20 + inner, 21 + logger, 22 + } 23 + } 24 + 25 + var _ Notifier = &loggingNotifier{} 26 + 27 + func (l *loggingNotifier) NewRepo(ctx context.Context, repo *models.Repo) { 28 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewRepo")) 29 + l.inner.NewRepo(ctx, repo) 30 + } 31 + 32 + func (l *loggingNotifier) NewStar(ctx context.Context, star *models.Star) { 33 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewStar")) 34 + l.inner.NewStar(ctx, star) 35 + } 36 + 37 + func (l *loggingNotifier) DeleteStar(ctx context.Context, star *models.Star) { 38 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "DeleteStar")) 39 + l.inner.DeleteStar(ctx, star) 40 + } 41 + 42 + func (l *loggingNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) { 43 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewIssue")) 44 + l.inner.NewIssue(ctx, issue, mentions) 45 + } 46 + 47 + func (l *loggingNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) { 48 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewIssueComment")) 49 + l.inner.NewIssueComment(ctx, comment, mentions) 50 + } 51 + 52 + func (l *loggingNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) { 53 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewIssueState")) 54 + l.inner.NewIssueState(ctx, actor, issue) 55 + } 56 + 57 + func (l *loggingNotifier) DeleteIssue(ctx context.Context, issue *models.Issue) { 58 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "DeleteIssue")) 59 + l.inner.DeleteIssue(ctx, issue) 60 + } 61 + 62 + func (l *loggingNotifier) NewFollow(ctx context.Context, follow *models.Follow) { 63 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewFollow")) 64 + l.inner.NewFollow(ctx, follow) 65 + } 66 + 67 + func (l *loggingNotifier) DeleteFollow(ctx context.Context, follow *models.Follow) { 68 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "DeleteFollow")) 69 + l.inner.DeleteFollow(ctx, follow) 70 + } 71 + 72 + func (l *loggingNotifier) NewPull(ctx context.Context, pull *models.Pull) { 73 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewPull")) 74 + l.inner.NewPull(ctx, pull) 75 + } 76 + 77 + func (l *loggingNotifier) NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID) { 78 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewPullComment")) 79 + l.inner.NewPullComment(ctx, comment, mentions) 80 + } 81 + 82 + func (l *loggingNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) { 83 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewPullState")) 84 + l.inner.NewPullState(ctx, actor, pull) 85 + } 86 + 87 + func (l *loggingNotifier) UpdateProfile(ctx context.Context, profile *models.Profile) { 88 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "UpdateProfile")) 89 + l.inner.UpdateProfile(ctx, profile) 90 + } 91 + 92 + func (l *loggingNotifier) NewString(ctx context.Context, s *models.String) { 93 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "NewString")) 94 + l.inner.NewString(ctx, s) 95 + } 96 + 97 + func (l *loggingNotifier) EditString(ctx context.Context, s *models.String) { 98 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "EditString")) 99 + l.inner.EditString(ctx, s) 100 + } 101 + 102 + func (l *loggingNotifier) DeleteString(ctx context.Context, did, rkey string) { 103 + ctx = tlog.IntoContext(ctx, tlog.SubLogger(l.logger, "DeleteString")) 104 + l.inner.DeleteString(ctx, did, rkey) 105 + }
+20 -31
appview/notify/merged_notifier.go
··· 2 2 3 3 import ( 4 4 "context" 5 - "log/slog" 6 - "reflect" 7 5 "sync" 8 6 9 7 "github.com/bluesky-social/indigo/atproto/syntax" 10 8 "tangled.org/core/appview/models" 11 - "tangled.org/core/log" 12 9 ) 13 10 14 11 type mergedNotifier struct { 15 12 notifiers []Notifier 16 - logger *slog.Logger 17 13 } 18 14 19 - func NewMergedNotifier(notifiers []Notifier, logger *slog.Logger) Notifier { 20 - return &mergedNotifier{notifiers, logger} 15 + func NewMergedNotifier(notifiers []Notifier) Notifier { 16 + return &mergedNotifier{notifiers} 21 17 } 22 18 23 19 var _ Notifier = &mergedNotifier{} 24 20 25 21 // fanout calls the same method on all notifiers concurrently 26 - func (m *mergedNotifier) fanout(method string, ctx context.Context, args ...any) { 27 - ctx = log.IntoContext(ctx, m.logger.With("method", method)) 22 + func (m *mergedNotifier) fanout(callback func(Notifier)) { 28 23 var wg sync.WaitGroup 29 24 for _, n := range m.notifiers { 30 25 wg.Add(1) 31 26 go func(notifier Notifier) { 32 27 defer wg.Done() 33 - v := reflect.ValueOf(notifier).MethodByName(method) 34 - in := make([]reflect.Value, len(args)+1) 35 - in[0] = reflect.ValueOf(ctx) 36 - for i, arg := range args { 37 - in[i+1] = reflect.ValueOf(arg) 38 - } 39 - v.Call(in) 28 + callback(n) 40 29 }(n) 41 30 } 42 31 } 43 32 44 33 func (m *mergedNotifier) NewRepo(ctx context.Context, repo *models.Repo) { 45 - m.fanout("NewRepo", ctx, repo) 34 + m.fanout(func(n Notifier) { n.NewRepo(ctx, repo) }) 46 35 } 47 36 48 37 func (m *mergedNotifier) NewStar(ctx context.Context, star *models.Star) { 49 - m.fanout("NewStar", ctx, star) 38 + m.fanout(func(n Notifier) { n.NewStar(ctx, star) }) 50 39 } 51 40 52 41 func (m *mergedNotifier) DeleteStar(ctx context.Context, star *models.Star) { 53 - m.fanout("DeleteStar", ctx, star) 42 + m.fanout(func(n Notifier) { n.DeleteStar(ctx, star) }) 54 43 } 55 44 56 45 func (m *mergedNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) { 57 - m.fanout("NewIssue", ctx, issue, mentions) 46 + m.fanout(func(n Notifier) { n.NewIssue(ctx, issue, mentions) }) 58 47 } 59 48 60 49 func (m *mergedNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) { 61 - m.fanout("NewIssueComment", ctx, comment, mentions) 50 + m.fanout(func(n Notifier) { n.NewIssueComment(ctx, comment, mentions) }) 62 51 } 63 52 64 53 func (m *mergedNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) { 65 - m.fanout("NewIssueState", ctx, actor, issue) 54 + m.fanout(func(n Notifier) { n.NewIssueState(ctx, actor, issue) }) 66 55 } 67 56 68 57 func (m *mergedNotifier) DeleteIssue(ctx context.Context, issue *models.Issue) { 69 - m.fanout("DeleteIssue", ctx, issue) 58 + m.fanout(func(n Notifier) { n.DeleteIssue(ctx, issue) }) 70 59 } 71 60 72 61 func (m *mergedNotifier) NewFollow(ctx context.Context, follow *models.Follow) { 73 - m.fanout("NewFollow", ctx, follow) 62 + m.fanout(func(n Notifier) { n.NewFollow(ctx, follow) }) 74 63 } 75 64 76 65 func (m *mergedNotifier) DeleteFollow(ctx context.Context, follow *models.Follow) { 77 - m.fanout("DeleteFollow", ctx, follow) 66 + m.fanout(func(n Notifier) { n.DeleteFollow(ctx, follow) }) 78 67 } 79 68 80 69 func (m *mergedNotifier) NewPull(ctx context.Context, pull *models.Pull) { 81 - m.fanout("NewPull", ctx, pull) 70 + m.fanout(func(n Notifier) { n.NewPull(ctx, pull) }) 82 71 } 83 72 84 73 func (m *mergedNotifier) NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID) { 85 - m.fanout("NewPullComment", ctx, comment, mentions) 74 + m.fanout(func(n Notifier) { n.NewPullComment(ctx, comment, mentions) }) 86 75 } 87 76 88 77 func (m *mergedNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) { 89 - m.fanout("NewPullState", ctx, actor, pull) 78 + m.fanout(func(n Notifier) { n.NewPullState(ctx, actor, pull) }) 90 79 } 91 80 92 81 func (m *mergedNotifier) UpdateProfile(ctx context.Context, profile *models.Profile) { 93 - m.fanout("UpdateProfile", ctx, profile) 82 + m.fanout(func(n Notifier) { n.UpdateProfile(ctx, profile) }) 94 83 } 95 84 96 85 func (m *mergedNotifier) NewString(ctx context.Context, s *models.String) { 97 - m.fanout("NewString", ctx, s) 86 + m.fanout(func(n Notifier) { n.NewString(ctx, s) }) 98 87 } 99 88 100 89 func (m *mergedNotifier) EditString(ctx context.Context, s *models.String) { 101 - m.fanout("EditString", ctx, s) 90 + m.fanout(func(n Notifier) { n.EditString(ctx, s) }) 102 91 } 103 92 104 93 func (m *mergedNotifier) DeleteString(ctx context.Context, did, rkey string) { 105 - m.fanout("DeleteString", ctx, did, rkey) 94 + m.fanout(func(n Notifier) { n.DeleteString(ctx, did, rkey) }) 106 95 }
+53 -1
appview/oauth/handler.go
··· 10 10 "slices" 11 11 "time" 12 12 13 + comatproto "github.com/bluesky-social/indigo/api/atproto" 13 14 "github.com/bluesky-social/indigo/atproto/auth/oauth" 15 + lexutil "github.com/bluesky-social/indigo/lex/util" 14 16 "github.com/go-chi/chi/v5" 15 17 "github.com/posthog/posthog-go" 16 18 "tangled.org/core/api/tangled" 17 19 "tangled.org/core/appview/db" 20 + "tangled.org/core/appview/models" 18 21 "tangled.org/core/consts" 19 22 "tangled.org/core/orm" 20 23 "tangled.org/core/tid" ··· 82 85 } 83 86 84 87 o.Logger.Debug("session saved successfully") 88 + 85 89 go o.addToDefaultKnot(sessData.AccountDID.String()) 86 90 go o.addToDefaultSpindle(sessData.AccountDID.String()) 91 + go o.ensureTangledProfile(sessData) 87 92 88 93 if !o.Config.Core.Dev { 89 94 err = o.Posthog.Enqueue(posthog.Capture{ ··· 162 167 return 163 168 } 164 169 165 - l.Debug("addings to default knot") 170 + l.Debug("adding to default knot") 166 171 session, err := o.createAppPasswordSession(o.Config.Core.TmpAltAppPassword, consts.IcyDid) 167 172 if err != nil { 168 173 l.Error("failed to create session", "err", err) ··· 187 192 } 188 193 189 194 l.Debug("successfully addeds to default Knot") 195 + } 196 + 197 + func (o *OAuth) ensureTangledProfile(sessData *oauth.ClientSessionData) { 198 + ctx := context.Background() 199 + did := sessData.AccountDID.String() 200 + l := o.Logger.With("did", did) 201 + 202 + _, err := db.GetProfile(o.Db, did) 203 + if err == nil { 204 + l.Debug("profile already exists in DB") 205 + return 206 + } 207 + 208 + l.Debug("creating empty Tangled profile") 209 + 210 + sess, err := o.ClientApp.ResumeSession(ctx, sessData.AccountDID, sessData.SessionID) 211 + if err != nil { 212 + l.Error("failed to resume session for profile creation", "err", err) 213 + return 214 + } 215 + client := sess.APIClient() 216 + 217 + _, err = comatproto.RepoPutRecord(ctx, client, &comatproto.RepoPutRecord_Input{ 218 + Collection: tangled.ActorProfileNSID, 219 + Repo: did, 220 + Rkey: "self", 221 + Record: &lexutil.LexiconTypeDecoder{Val: &tangled.ActorProfile{}}, 222 + }) 223 + 224 + if err != nil { 225 + l.Error("failed to create empty profile on PDS", "err", err) 226 + return 227 + } 228 + 229 + tx, err := o.Db.BeginTx(ctx, nil) 230 + if err != nil { 231 + l.Error("failed to start transaction", "err", err) 232 + return 233 + } 234 + 235 + emptyProfile := &models.Profile{Did: did} 236 + if err := db.UpsertProfile(tx, emptyProfile); err != nil { 237 + l.Error("failed to create empty profile in DB", "err", err) 238 + return 239 + } 240 + 241 + l.Debug("successfully created empty Tangled profile on PDS and DB") 190 242 } 191 243 192 244 // create a session using apppasswords
+13 -4
appview/oauth/oauth.go
··· 37 37 38 38 func New(config *config.Config, ph posthog.Client, db *db.DB, enforcer *rbac.Enforcer, res *idresolver.Resolver, logger *slog.Logger) (*OAuth, error) { 39 39 var oauthConfig oauth.ClientConfig 40 - clientUri := config.Core.Url() 41 - callbackUri := clientUri + "/oauth/callback" 40 + var clientUri string 42 41 if config.Core.Dev { 43 - oauthConfig = oauth.NewLocalhostConfig(callbackUri, []string{"atproto", "transition:generic"}) 42 + clientUri = "http://127.0.0.1:3000" 43 + callbackUri := clientUri + "/oauth/callback" 44 + oauthConfig = oauth.NewLocalhostConfig(callbackUri, TangledScopes) 44 45 } else { 46 + clientUri = config.Core.AppviewHost 45 47 clientId := fmt.Sprintf("%s/oauth/client-metadata.json", clientUri) 46 - oauthConfig = oauth.NewPublicConfig(clientId, callbackUri, []string{"atproto", "transition:generic"}) 48 + callbackUri := clientUri + "/oauth/callback" 49 + oauthConfig = oauth.NewPublicConfig(clientId, callbackUri, TangledScopes) 47 50 } 48 51 49 52 // configure client secret ··· 166 169 167 170 // delete the session 168 171 err1 := o.ClientApp.Logout(r.Context(), sessDid, sessId) 172 + if err1 != nil { 173 + err1 = fmt.Errorf("failed to logout: %w", err1) 174 + } 169 175 170 176 // remove the cookie 171 177 userSession.Options.MaxAge = -1 172 178 err2 := o.SessStore.Save(r, w, userSession) 179 + if err2 != nil { 180 + err2 = fmt.Errorf("failed to save into session store: %w", err2) 181 + } 173 182 174 183 return errors.Join(err1, err2) 175 184 }
+41
appview/oauth/scopes.go
··· 1 + package oauth 2 + 3 + var TangledScopes = []string{ 4 + "atproto", 5 + 6 + "repo:sh.tangled.publicKey", 7 + "repo:sh.tangled.repo", 8 + "repo:sh.tangled.repo.pull", 9 + "repo:sh.tangled.repo.pull.comment", 10 + "repo:sh.tangled.repo.artifact", 11 + "repo:sh.tangled.repo.issue", 12 + "repo:sh.tangled.repo.issue.comment", 13 + "repo:sh.tangled.repo.collaborator", 14 + "repo:sh.tangled.knot", 15 + "repo:sh.tangled.knot.member", 16 + "repo:sh.tangled.spindle", 17 + "repo:sh.tangled.spindle.member", 18 + "repo:sh.tangled.graph.follow", 19 + "repo:sh.tangled.feed.star", 20 + "repo:sh.tangled.feed.reaction", 21 + "repo:sh.tangled.label.definition", 22 + "repo:sh.tangled.label.op", 23 + "repo:sh.tangled.string", 24 + "repo:sh.tangled.actor.profile", 25 + 26 + "blob:*/*", 27 + 28 + "rpc:sh.tangled.repo.create?aud=*", 29 + "rpc:sh.tangled.repo.delete?aud=*", 30 + "rpc:sh.tangled.repo.merge?aud=*", 31 + "rpc:sh.tangled.repo.hiddenRef?aud=*", 32 + "rpc:sh.tangled.repo.deleteBranch?aud=*", 33 + "rpc:sh.tangled.repo.setDefaultBranch?aud=*", 34 + "rpc:sh.tangled.repo.forkSync?aud=*", 35 + "rpc:sh.tangled.repo.forkStatus?aud=*", 36 + "rpc:sh.tangled.repo.mergeCheck?aud=*", 37 + "rpc:sh.tangled.pipeline.cancelPipeline?aud=*", 38 + "rpc:sh.tangled.repo.addSecret?aud=*", 39 + "rpc:sh.tangled.repo.removeSecret?aud=*", 40 + "rpc:sh.tangled.repo.listSecrets?aud=*", 41 + }
+56
appview/ogcard/card.go
··· 257 257 return textWidth, err 258 258 } 259 259 260 + func (c *Card) FontHeight(sizePt float64) int { 261 + ft := freetype.NewContext() 262 + ft.SetDPI(72) 263 + ft.SetFont(c.Font) 264 + ft.SetFontSize(sizePt) 265 + return ft.PointToFixed(sizePt).Ceil() 266 + } 267 + 268 + func (c *Card) TextWidth(text string, sizePt float64) int { 269 + face := truetype.NewFace(c.Font, &truetype.Options{Size: sizePt, DPI: 72}) 270 + lineWidth := font.MeasureString(face, text) 271 + textWidth := lineWidth.Ceil() 272 + return textWidth 273 + } 274 + 260 275 // DrawBoldText draws bold text by rendering multiple times with slight offsets 261 276 func (c *Card) DrawBoldText(text string, x, y int, textColor color.Color, sizePt float64, valign VAlign, halign HAlign) (int, error) { 262 277 // Draw the text multiple times with slight offsets to create bold effect ··· 582 597 func (c *Card) DrawRect(startX, startY, endX, endY int, color color.Color) { 583 598 draw.Draw(c.Img, image.Rect(startX, startY, endX, endY), &image.Uniform{color}, image.Point{}, draw.Src) 584 599 } 600 + 601 + // drawRoundedRect draws a filled rounded rectangle on the given card 602 + func (card *Card) DrawRoundedRect(x, y, width, height, cornerRadius int, fillColor color.RGBA) { 603 + cardBounds := card.Img.Bounds() 604 + for py := y; py < y+height; py++ { 605 + for px := x; px < x+width; px++ { 606 + // calculate distance from corners 607 + dx := 0 608 + dy := 0 609 + 610 + // check which corner region we're in 611 + if px < x+cornerRadius && py < y+cornerRadius { 612 + // top-left corner 613 + dx = x + cornerRadius - px 614 + dy = y + cornerRadius - py 615 + } else if px >= x+width-cornerRadius && py < y+cornerRadius { 616 + // top-right corner 617 + dx = px - (x + width - cornerRadius - 1) 618 + dy = y + cornerRadius - py 619 + } else if px < x+cornerRadius && py >= y+height-cornerRadius { 620 + // bottom-left corner 621 + dx = x + cornerRadius - px 622 + dy = py - (y + height - cornerRadius - 1) 623 + } else if px >= x+width-cornerRadius && py >= y+height-cornerRadius { 624 + // Bottom-right corner 625 + dx = px - (x + width - cornerRadius - 1) 626 + dy = py - (y + height - cornerRadius - 1) 627 + } 628 + 629 + // if we're in a corner, check if we're within the radius 630 + inCorner := (dx > 0 || dy > 0) 631 + withinRadius := dx*dx+dy*dy <= cornerRadius*cornerRadius 632 + 633 + // draw pixel if not in corner, or in corner and within radius 634 + // check bounds relative to the card's image bounds 635 + if (!inCorner || withinRadius) && px >= 0 && px < cardBounds.Dx() && py >= 0 && py < cardBounds.Dy() { 636 + card.Img.Set(px+cardBounds.Min.X, py+cardBounds.Min.Y, fillColor) 637 + } 638 + } 639 + } 640 + }
+97 -11
appview/pages/funcmap.go
··· 26 26 "github.com/go-enry/go-enry/v2" 27 27 "github.com/yuin/goldmark" 28 28 emoji "github.com/yuin/goldmark-emoji" 29 + "tangled.org/core/appview/db" 29 30 "tangled.org/core/appview/models" 30 31 "tangled.org/core/appview/oauth" 31 32 "tangled.org/core/appview/pages/markup" 32 - "tangled.org/core/appview/pages/markup/sanitizer" 33 33 "tangled.org/core/crypto" 34 34 ) 35 + 36 + type tab map[string]string 35 37 36 38 func (p *Pages) funcMap() template.FuncMap { 37 39 return template.FuncMap{ ··· 258 260 "markdown": func(text string) template.HTML { 259 261 p.rctx.RendererType = markup.RendererTypeDefault 260 262 htmlString := p.rctx.RenderMarkdown(text) 261 - sanitized := sanitizer.SanitizeDefault(htmlString) 263 + sanitized := p.rctx.SanitizeDefault(htmlString) 262 264 return template.HTML(sanitized) 263 265 }, 264 266 "description": func(text string) template.HTML { ··· 268 270 emoji.Emoji, 269 271 ), 270 272 )) 271 - sanitized := sanitizer.SanitizeDescription(htmlString) 273 + sanitized := p.rctx.SanitizeDescription(htmlString) 272 274 return template.HTML(sanitized) 273 275 }, 274 276 "readme": func(text string) template.HTML { 275 277 p.rctx.RendererType = markup.RendererTypeRepoMarkdown 276 278 htmlString := p.rctx.RenderMarkdown(text) 277 - sanitized := sanitizer.SanitizeDefault(htmlString) 279 + sanitized := p.rctx.SanitizeDefault(htmlString) 278 280 return template.HTML(sanitized) 279 281 }, 280 282 "code": func(content, path string) string { ··· 333 335 } 334 336 return dict, nil 335 337 }, 338 + "queryParams": func(params ...any) (url.Values, error) { 339 + if len(params)%2 != 0 { 340 + return nil, errors.New("invalid queryParams call") 341 + } 342 + vals := make(url.Values, len(params)/2) 343 + for i := 0; i < len(params); i += 2 { 344 + key, ok := params[i].(string) 345 + if !ok { 346 + return nil, errors.New("queryParams keys must be strings") 347 + } 348 + v, ok := params[i+1].(string) 349 + if !ok { 350 + return nil, errors.New("queryParams values must be strings") 351 + } 352 + vals.Add(key, v) 353 + } 354 + return vals, nil 355 + }, 336 356 "deref": func(v any) any { 337 357 val := reflect.ValueOf(v) 338 358 if val.Kind() == reflect.Pointer && !val.IsNil() { ··· 365 385 "fullAvatar": func(handle string) string { 366 386 return p.AvatarUrl(handle, "") 367 387 }, 388 + "placeholderAvatar": func(size string) template.HTML { 389 + sizeClass := "size-6" 390 + iconSize := "size-4" 391 + if size == "tiny" { 392 + sizeClass = "size-6" 393 + iconSize = "size-4" 394 + } else if size == "small" { 395 + sizeClass = "size-8" 396 + iconSize = "size-5" 397 + } else { 398 + sizeClass = "size-12" 399 + iconSize = "size-8" 400 + } 401 + icon, _ := p.icon("user-round", []string{iconSize, "text-gray-400", "dark:text-gray-500"}) 402 + return template.HTML(fmt.Sprintf(`<div class="%s rounded-full bg-gray-200 dark:bg-gray-700 flex items-center justify-center flex-shrink-0">%s</div>`, sizeClass, icon)) 403 + }, 404 + "profileAvatarUrl": func(profile *models.Profile, size string) string { 405 + if profile != nil { 406 + return p.AvatarUrl(profile.Did, size) 407 + } 408 + return "" 409 + }, 368 410 "langColor": enry.GetColor, 369 411 "reverse": func(s any) any { 370 412 if s == nil { ··· 407 449 } 408 450 return result 409 451 }, 452 + // constant values used to define a template 453 + "const": func() map[string]any { 454 + return map[string]any{ 455 + "OrderedReactionKinds": models.OrderedReactionKinds, 456 + // would be great to have ordered maps right about now 457 + "UserSettingsTabs": []tab{ 458 + {"Name": "profile", "Icon": "user"}, 459 + {"Name": "keys", "Icon": "key"}, 460 + {"Name": "emails", "Icon": "mail"}, 461 + {"Name": "notifications", "Icon": "bell"}, 462 + {"Name": "knots", "Icon": "volleyball"}, 463 + {"Name": "spindles", "Icon": "spool"}, 464 + }, 465 + "RepoSettingsTabs": []tab{ 466 + {"Name": "general", "Icon": "sliders-horizontal"}, 467 + {"Name": "access", "Icon": "users"}, 468 + {"Name": "pipelines", "Icon": "layers-2"}, 469 + }, 470 + } 471 + }, 410 472 } 411 473 } 412 474 ··· 424 486 return identity.Handle.String() 425 487 } 426 488 427 - func (p *Pages) AvatarUrl(handle, size string) string { 428 - handle = strings.TrimPrefix(handle, "@") 489 + func (p *Pages) AvatarUrl(actor, size string) string { 490 + actor = strings.TrimPrefix(actor, "@") 429 491 430 - handle = p.resolveDid(handle) 492 + identity, err := p.resolver.ResolveIdent(context.Background(), actor) 493 + var did string 494 + if err != nil { 495 + did = actor 496 + } else { 497 + did = identity.DID.String() 498 + } 431 499 432 500 secret := p.avatar.SharedSecret 433 501 h := hmac.New(sha256.New, []byte(secret)) 434 - h.Write([]byte(handle)) 502 + h.Write([]byte(did)) 435 503 signature := hex.EncodeToString(h.Sum(nil)) 436 504 437 - sizeArg := "" 505 + // Get avatar CID for cache busting 506 + profile, err := db.GetProfile(p.db, did) 507 + version := "" 508 + if err == nil && profile != nil && profile.Avatar != "" { 509 + // Use first 8 chars of avatar CID as version 510 + if len(profile.Avatar) > 8 { 511 + version = profile.Avatar[:8] 512 + } else { 513 + version = profile.Avatar 514 + } 515 + } 516 + 517 + baseUrl := fmt.Sprintf("%s/%s/%s", p.avatar.Host, signature, did) 438 518 if size != "" { 439 - sizeArg = fmt.Sprintf("size=%s", size) 519 + if version != "" { 520 + return fmt.Sprintf("%s?size=%s&v=%s", baseUrl, size, version) 521 + } 522 + return fmt.Sprintf("%s?size=%s", baseUrl, size) 440 523 } 441 - return fmt.Sprintf("%s/%s/%s?%s", p.avatar.Host, signature, handle, sizeArg) 524 + if version != "" { 525 + return fmt.Sprintf("%s?v=%s", baseUrl, version) 526 + } 527 + return baseUrl 442 528 } 443 529 444 530 func (p *Pages) icon(name string, classes []string) (template.HTML, error) {
+1 -1
appview/pages/funcmap_test.go
··· 22 22 } 23 23 for _, tt := range tests { 24 24 t.Run(tt.name, func(t *testing.T) { 25 - p := NewPages(tt.config, tt.res, tt.l) 25 + p := NewPages(tt.config, tt.res, nil, tt.l) 26 26 got := p.funcMap() 27 27 // TODO: update the condition below to compare got with tt.want. 28 28 if true {
+9
appview/pages/markup/markdown.go
··· 48 48 IsDev bool 49 49 Hostname string 50 50 RendererType RendererType 51 + Sanitizer Sanitizer 51 52 Files fs.FS 52 53 } 53 54 ··· 176 177 } 177 178 default: 178 179 } 180 + } 181 + 182 + func (rctx *RenderContext) SanitizeDefault(html string) string { 183 + return rctx.Sanitizer.SanitizeDefault(html) 184 + } 185 + 186 + func (rctx *RenderContext) SanitizeDescription(html string) string { 187 + return rctx.Sanitizer.SanitizeDescription(html) 179 188 } 180 189 181 190 type MarkdownTransformer struct {
-133
appview/pages/markup/sanitizer/sanitizer.go
··· 1 - package sanitizer 2 - 3 - import ( 4 - "maps" 5 - "regexp" 6 - "slices" 7 - "strings" 8 - 9 - "github.com/alecthomas/chroma/v2" 10 - "github.com/microcosm-cc/bluemonday" 11 - ) 12 - 13 - var ( 14 - defaultPolicy = newDefaultPolicy() 15 - descriptionPolicy = newDescriptionPolicy() 16 - ) 17 - 18 - func SanitizeDefault(html string) string { 19 - return defaultPolicy.Sanitize(html) 20 - } 21 - func SanitizeDescription(html string) string { 22 - return descriptionPolicy.Sanitize(html) 23 - } 24 - 25 - func newDefaultPolicy() *bluemonday.Policy { 26 - policy := bluemonday.UGCPolicy() 27 - 28 - // Allow generally safe attributes 29 - generalSafeAttrs := []string{ 30 - "abbr", "accept", "accept-charset", 31 - "accesskey", "action", "align", "alt", 32 - "aria-describedby", "aria-hidden", "aria-label", "aria-labelledby", 33 - "axis", "border", "cellpadding", "cellspacing", "char", 34 - "charoff", "charset", "checked", 35 - "clear", "cols", "colspan", "color", 36 - "compact", "coords", "datetime", "dir", 37 - "disabled", "enctype", "for", "frame", 38 - "headers", "height", "hreflang", 39 - "hspace", "ismap", "label", "lang", 40 - "maxlength", "media", "method", 41 - "multiple", "name", "nohref", "noshade", 42 - "nowrap", "open", "prompt", "readonly", "rel", "rev", 43 - "rows", "rowspan", "rules", "scope", 44 - "selected", "shape", "size", "span", 45 - "start", "summary", "tabindex", "target", 46 - "title", "type", "usemap", "valign", "value", 47 - "vspace", "width", "itemprop", 48 - } 49 - 50 - generalSafeElements := []string{ 51 - "h1", "h2", "h3", "h4", "h5", "h6", "h7", "h8", "br", "b", "i", "strong", "em", "a", "pre", "code", "img", "tt", 52 - "div", "ins", "del", "sup", "sub", "p", "ol", "ul", "table", "thead", "tbody", "tfoot", "blockquote", "label", 53 - "dl", "dt", "dd", "kbd", "q", "samp", "var", "hr", "ruby", "rt", "rp", "li", "tr", "td", "th", "s", "strike", "summary", 54 - "details", "caption", "figure", "figcaption", 55 - "abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "video", "wbr", 56 - } 57 - 58 - policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...) 59 - 60 - // video 61 - policy.AllowAttrs("src", "autoplay", "controls").OnElements("video") 62 - 63 - // checkboxes 64 - policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") 65 - policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input") 66 - 67 - // for code blocks 68 - policy.AllowAttrs("class").Matching(regexp.MustCompile(`chroma`)).OnElements("pre") 69 - policy.AllowAttrs("class").Matching(regexp.MustCompile(`anchor|footnote-ref|footnote-backref`)).OnElements("a") 70 - policy.AllowAttrs("class").Matching(regexp.MustCompile(`heading`)).OnElements("h1", "h2", "h3", "h4", "h5", "h6", "h7", "h8") 71 - policy.AllowAttrs("class").Matching(regexp.MustCompile(strings.Join(slices.Collect(maps.Values(chroma.StandardTypes)), "|"))).OnElements("span") 72 - 73 - // at-mentions 74 - policy.AllowAttrs("class").Matching(regexp.MustCompile(`mention`)).OnElements("a") 75 - 76 - // centering content 77 - policy.AllowElements("center") 78 - 79 - policy.AllowAttrs("align", "style", "width", "height").Globally() 80 - policy.AllowStyles( 81 - "margin", 82 - "padding", 83 - "text-align", 84 - "font-weight", 85 - "text-decoration", 86 - "padding-left", 87 - "padding-right", 88 - "padding-top", 89 - "padding-bottom", 90 - "margin-left", 91 - "margin-right", 92 - "margin-top", 93 - "margin-bottom", 94 - ) 95 - 96 - // math 97 - mathAttrs := []string{ 98 - "accent", "columnalign", "columnlines", "columnspan", "dir", "display", 99 - "displaystyle", "encoding", "fence", "form", "largeop", "linebreak", 100 - "linethickness", "lspace", "mathcolor", "mathsize", "mathvariant", "minsize", 101 - "movablelimits", "notation", "rowalign", "rspace", "rowspacing", "rowspan", 102 - "scriptlevel", "stretchy", "symmetric", "title", "voffset", "width", 103 - } 104 - mathElements := []string{ 105 - "annotation", "math", "menclose", "merror", "mfrac", "mi", "mmultiscripts", 106 - "mn", "mo", "mover", "mpadded", "mprescripts", "mroot", "mrow", "mspace", 107 - "msqrt", "mstyle", "msub", "msubsup", "msup", "mtable", "mtd", "mtext", 108 - "mtr", "munder", "munderover", "semantics", 109 - } 110 - policy.AllowNoAttrs().OnElements(mathElements...) 111 - policy.AllowAttrs(mathAttrs...).OnElements(mathElements...) 112 - 113 - // goldmark-callout 114 - policy.AllowAttrs("data-callout").OnElements("details") 115 - 116 - return policy 117 - } 118 - 119 - func newDescriptionPolicy() *bluemonday.Policy { 120 - policy := bluemonday.NewPolicy() 121 - policy.AllowStandardURLs() 122 - 123 - // allow italics and bold. 124 - policy.AllowElements("i", "b", "em", "strong") 125 - 126 - // allow code. 127 - policy.AllowElements("code") 128 - 129 - // allow links 130 - policy.AllowAttrs("href", "target", "rel").OnElements("a") 131 - 132 - return policy 133 - }
+140
appview/pages/markup/sanitizer.go
··· 1 + package markup 2 + 3 + import ( 4 + "maps" 5 + "regexp" 6 + "slices" 7 + "strings" 8 + 9 + "github.com/alecthomas/chroma/v2" 10 + "github.com/microcosm-cc/bluemonday" 11 + ) 12 + 13 + type Sanitizer struct { 14 + defaultPolicy *bluemonday.Policy 15 + descriptionPolicy *bluemonday.Policy 16 + } 17 + 18 + func NewSanitizer() Sanitizer { 19 + return Sanitizer{ 20 + defaultPolicy: defaultPolicy(), 21 + descriptionPolicy: descriptionPolicy(), 22 + } 23 + } 24 + 25 + func (s *Sanitizer) SanitizeDefault(html string) string { 26 + return s.defaultPolicy.Sanitize(html) 27 + } 28 + func (s *Sanitizer) SanitizeDescription(html string) string { 29 + return s.descriptionPolicy.Sanitize(html) 30 + } 31 + 32 + func defaultPolicy() *bluemonday.Policy { 33 + policy := bluemonday.UGCPolicy() 34 + 35 + // Allow generally safe attributes 36 + generalSafeAttrs := []string{ 37 + "abbr", "accept", "accept-charset", 38 + "accesskey", "action", "align", "alt", 39 + "aria-describedby", "aria-hidden", "aria-label", "aria-labelledby", 40 + "axis", "border", "cellpadding", "cellspacing", "char", 41 + "charoff", "charset", "checked", 42 + "clear", "cols", "colspan", "color", 43 + "compact", "coords", "datetime", "dir", 44 + "disabled", "enctype", "for", "frame", 45 + "headers", "height", "hreflang", 46 + "hspace", "ismap", "label", "lang", 47 + "maxlength", "media", "method", 48 + "multiple", "name", "nohref", "noshade", 49 + "nowrap", "open", "prompt", "readonly", "rel", "rev", 50 + "rows", "rowspan", "rules", "scope", 51 + "selected", "shape", "size", "span", 52 + "start", "summary", "tabindex", "target", 53 + "title", "type", "usemap", "valign", "value", 54 + "vspace", "width", "itemprop", 55 + } 56 + 57 + generalSafeElements := []string{ 58 + "h1", "h2", "h3", "h4", "h5", "h6", "h7", "h8", "br", "b", "i", "strong", "em", "a", "pre", "code", "img", "tt", 59 + "div", "ins", "del", "sup", "sub", "p", "ol", "ul", "table", "thead", "tbody", "tfoot", "blockquote", "label", 60 + "dl", "dt", "dd", "kbd", "q", "samp", "var", "hr", "ruby", "rt", "rp", "li", "tr", "td", "th", "s", "strike", "summary", 61 + "details", "caption", "figure", "figcaption", 62 + "abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "video", "wbr", 63 + } 64 + 65 + policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...) 66 + 67 + // video 68 + policy.AllowAttrs("src", "autoplay", "controls").OnElements("video") 69 + 70 + // checkboxes 71 + policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") 72 + policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input") 73 + 74 + // for code blocks 75 + policy.AllowAttrs("class").Matching(regexp.MustCompile(`chroma`)).OnElements("pre") 76 + policy.AllowAttrs("class").Matching(regexp.MustCompile(`anchor|footnote-ref|footnote-backref`)).OnElements("a") 77 + policy.AllowAttrs("class").Matching(regexp.MustCompile(`heading`)).OnElements("h1", "h2", "h3", "h4", "h5", "h6", "h7", "h8") 78 + policy.AllowAttrs("class").Matching(regexp.MustCompile(strings.Join(slices.Collect(maps.Values(chroma.StandardTypes)), "|"))).OnElements("span") 79 + 80 + // at-mentions 81 + policy.AllowAttrs("class").Matching(regexp.MustCompile(`mention`)).OnElements("a") 82 + 83 + // centering content 84 + policy.AllowElements("center") 85 + 86 + policy.AllowAttrs("align", "style", "width", "height").Globally() 87 + policy.AllowStyles( 88 + "margin", 89 + "padding", 90 + "text-align", 91 + "font-weight", 92 + "text-decoration", 93 + "padding-left", 94 + "padding-right", 95 + "padding-top", 96 + "padding-bottom", 97 + "margin-left", 98 + "margin-right", 99 + "margin-top", 100 + "margin-bottom", 101 + ) 102 + 103 + // math 104 + mathAttrs := []string{ 105 + "accent", "columnalign", "columnlines", "columnspan", "dir", "display", 106 + "displaystyle", "encoding", "fence", "form", "largeop", "linebreak", 107 + "linethickness", "lspace", "mathcolor", "mathsize", "mathvariant", "minsize", 108 + "movablelimits", "notation", "rowalign", "rspace", "rowspacing", "rowspan", 109 + "scriptlevel", "stretchy", "symmetric", "title", "voffset", "width", 110 + } 111 + mathElements := []string{ 112 + "annotation", "math", "menclose", "merror", "mfrac", "mi", "mmultiscripts", 113 + "mn", "mo", "mover", "mpadded", "mprescripts", "mroot", "mrow", "mspace", 114 + "msqrt", "mstyle", "msub", "msubsup", "msup", "mtable", "mtd", "mtext", 115 + "mtr", "munder", "munderover", "semantics", 116 + } 117 + policy.AllowNoAttrs().OnElements(mathElements...) 118 + policy.AllowAttrs(mathAttrs...).OnElements(mathElements...) 119 + 120 + // goldmark-callout 121 + policy.AllowAttrs("data-callout").OnElements("details") 122 + 123 + return policy 124 + } 125 + 126 + func descriptionPolicy() *bluemonday.Policy { 127 + policy := bluemonday.NewPolicy() 128 + policy.AllowStandardURLs() 129 + 130 + // allow italics and bold. 131 + policy.AllowElements("i", "b", "em", "strong") 132 + 133 + // allow code. 134 + policy.AllowElements("code") 135 + 136 + // allow links 137 + policy.AllowAttrs("href", "target", "rel").OnElements("a") 138 + 139 + return policy 140 + }
+88 -56
appview/pages/pages.go
··· 19 19 "tangled.org/core/api/tangled" 20 20 "tangled.org/core/appview/commitverify" 21 21 "tangled.org/core/appview/config" 22 + "tangled.org/core/appview/db" 22 23 "tangled.org/core/appview/models" 23 24 "tangled.org/core/appview/oauth" 24 25 "tangled.org/core/appview/pages/markup" 25 - "tangled.org/core/appview/pages/markup/sanitizer" 26 26 "tangled.org/core/appview/pages/repoinfo" 27 27 "tangled.org/core/appview/pagination" 28 28 "tangled.org/core/idresolver" ··· 43 43 44 44 avatar config.AvatarConfig 45 45 resolver *idresolver.Resolver 46 + db *db.DB 46 47 dev bool 47 48 embedFS fs.FS 48 49 templateDir string // Path to templates on disk for dev mode ··· 50 51 logger *slog.Logger 51 52 } 52 53 53 - func NewPages(config *config.Config, res *idresolver.Resolver, logger *slog.Logger) *Pages { 54 + func NewPages(config *config.Config, res *idresolver.Resolver, database *db.DB, logger *slog.Logger) *Pages { 54 55 // initialized with safe defaults, can be overriden per use 55 56 rctx := &markup.RenderContext{ 56 57 IsDev: config.Core.Dev, 57 58 Hostname: config.Core.AppviewHost, 58 59 CamoUrl: config.Camo.Host, 59 60 CamoSecret: config.Camo.SharedSecret, 61 + Sanitizer: markup.NewSanitizer(), 60 62 Files: Files, 61 63 } 62 64 ··· 67 69 avatar: config.Avatar, 68 70 rctx: rctx, 69 71 resolver: res, 72 + db: database, 70 73 templateDir: "appview/pages", 71 74 logger: logger, 72 75 } ··· 175 178 return p.parse(stack...) 176 179 } 177 180 181 + func (p *Pages) parseLoginBase(top string) (*template.Template, error) { 182 + stack := []string{ 183 + "layouts/base", 184 + "layouts/loginbase", 185 + top, 186 + } 187 + return p.parse(stack...) 188 + } 189 + 178 190 func (p *Pages) executePlain(name string, w io.Writer, params any) error { 179 191 tpl, err := p.parse(name) 180 192 if err != nil { ··· 184 196 return tpl.Execute(w, params) 185 197 } 186 198 199 + func (p *Pages) executeLogin(name string, w io.Writer, params any) error { 200 + tpl, err := p.parseLoginBase(name) 201 + if err != nil { 202 + return err 203 + } 204 + 205 + return tpl.ExecuteTemplate(w, "layouts/base", params) 206 + } 207 + 187 208 func (p *Pages) execute(name string, w io.Writer, params any) error { 188 209 tpl, err := p.parseBase(name) 189 210 if err != nil { ··· 234 255 } 235 256 236 257 func (p *Pages) Login(w io.Writer, params LoginParams) error { 237 - return p.executePlain("user/login", w, params) 258 + return p.executeLogin("user/login", w, params) 238 259 } 239 260 240 261 type SignupParams struct { ··· 242 263 } 243 264 244 265 func (p *Pages) Signup(w io.Writer, params SignupParams) error { 245 - return p.executePlain("user/signup", w, params) 266 + return p.executeLogin("user/signup", w, params) 246 267 } 247 268 248 269 func (p *Pages) CompleteSignup(w io.Writer) error { 249 - return p.executePlain("user/completeSignup", w, nil) 270 + return p.executeLogin("user/completeSignup", w, nil) 250 271 } 251 272 252 273 type TermsOfServiceParams struct { ··· 271 292 272 293 p.rctx.RendererType = markup.RendererTypeDefault 273 294 htmlString := p.rctx.RenderMarkdown(string(markdownBytes)) 274 - sanitized := sanitizer.SanitizeDefault(htmlString) 295 + sanitized := p.rctx.SanitizeDefault(htmlString) 275 296 params.Content = template.HTML(sanitized) 276 297 277 298 return p.execute("legal/terms", w, params) ··· 299 320 300 321 p.rctx.RendererType = markup.RendererTypeDefault 301 322 htmlString := p.rctx.RenderMarkdown(string(markdownBytes)) 302 - sanitized := sanitizer.SanitizeDefault(htmlString) 323 + sanitized := p.rctx.SanitizeDefault(htmlString) 303 324 params.Content = template.HTML(sanitized) 304 325 305 326 return p.execute("legal/privacy", w, params) ··· 339 360 340 361 type UserProfileSettingsParams struct { 341 362 LoggedInUser *oauth.MultiAccountUser 342 - Tabs []map[string]any 343 363 Tab string 344 364 } 345 365 346 366 func (p *Pages) UserProfileSettings(w io.Writer, params UserProfileSettingsParams) error { 367 + params.Tab = "profile" 347 368 return p.execute("user/settings/profile", w, params) 348 369 } 349 370 ··· 378 399 type UserKeysSettingsParams struct { 379 400 LoggedInUser *oauth.MultiAccountUser 380 401 PubKeys []models.PublicKey 381 - Tabs []map[string]any 382 402 Tab string 383 403 } 384 404 385 405 func (p *Pages) UserKeysSettings(w io.Writer, params UserKeysSettingsParams) error { 406 + params.Tab = "keys" 386 407 return p.execute("user/settings/keys", w, params) 387 408 } 388 409 389 410 type UserEmailsSettingsParams struct { 390 411 LoggedInUser *oauth.MultiAccountUser 391 412 Emails []models.Email 392 - Tabs []map[string]any 393 413 Tab string 394 414 } 395 415 396 416 func (p *Pages) UserEmailsSettings(w io.Writer, params UserEmailsSettingsParams) error { 417 + params.Tab = "emails" 397 418 return p.execute("user/settings/emails", w, params) 398 419 } 399 420 400 421 type UserNotificationSettingsParams struct { 401 422 LoggedInUser *oauth.MultiAccountUser 402 423 Preferences *models.NotificationPreferences 403 - Tabs []map[string]any 404 424 Tab string 405 425 } 406 426 407 427 func (p *Pages) UserNotificationSettings(w io.Writer, params UserNotificationSettingsParams) error { 428 + params.Tab = "notifications" 408 429 return p.execute("user/settings/notifications", w, params) 409 430 } 410 431 ··· 420 441 type KnotsParams struct { 421 442 LoggedInUser *oauth.MultiAccountUser 422 443 Registrations []models.Registration 423 - Tabs []map[string]any 424 444 Tab string 425 445 } 426 446 427 447 func (p *Pages) Knots(w io.Writer, params KnotsParams) error { 448 + params.Tab = "knots" 428 449 return p.execute("knots/index", w, params) 429 450 } 430 451 ··· 434 455 Members []string 435 456 Repos map[string][]models.Repo 436 457 IsOwner bool 437 - Tabs []map[string]any 438 458 Tab string 439 459 } 440 460 ··· 453 473 type SpindlesParams struct { 454 474 LoggedInUser *oauth.MultiAccountUser 455 475 Spindles []models.Spindle 456 - Tabs []map[string]any 457 476 Tab string 458 477 } 459 478 460 479 func (p *Pages) Spindles(w io.Writer, params SpindlesParams) error { 480 + params.Tab = "spindles" 461 481 return p.execute("spindles/index", w, params) 462 482 } 463 483 464 484 type SpindleListingParams struct { 465 485 models.Spindle 466 - Tabs []map[string]any 467 - Tab string 486 + Tab string 468 487 } 469 488 470 489 func (p *Pages) SpindleListing(w io.Writer, params SpindleListingParams) error { ··· 476 495 Spindle models.Spindle 477 496 Members []string 478 497 Repos map[string][]models.Repo 479 - Tabs []map[string]any 480 498 Tab string 481 499 } 482 500 ··· 699 717 case ".md", ".markdown", ".mdown", ".mkdn", ".mkd": 700 718 params.Raw = false 701 719 htmlString := p.rctx.RenderMarkdown(params.Readme) 702 - sanitized := sanitizer.SanitizeDefault(htmlString) 720 + sanitized := p.rctx.SanitizeDefault(htmlString) 703 721 params.HTMLReadme = template.HTML(sanitized) 704 722 default: 705 723 params.Raw = true ··· 746 764 } 747 765 748 766 type RepoTreeParams struct { 749 - LoggedInUser *oauth.MultiAccountUser 750 - RepoInfo repoinfo.RepoInfo 751 - Active string 752 - BreadCrumbs [][]string 753 - TreePath string 754 - Raw bool 755 - HTMLReadme template.HTML 767 + LoggedInUser *oauth.MultiAccountUser 768 + RepoInfo repoinfo.RepoInfo 769 + Active string 770 + BreadCrumbs [][]string 771 + TreePath string 772 + Raw bool 773 + HTMLReadme template.HTML 774 + EmailToDid map[string]string 775 + LastCommitInfo *types.LastCommitInfo 756 776 types.RepoTreeResponse 757 777 } 758 778 ··· 790 810 case ".md", ".markdown", ".mdown", ".mkdn", ".mkd": 791 811 params.Raw = false 792 812 htmlString := p.rctx.RenderMarkdown(params.Readme) 793 - sanitized := sanitizer.SanitizeDefault(htmlString) 813 + sanitized := p.rctx.SanitizeDefault(htmlString) 794 814 params.HTMLReadme = template.HTML(sanitized) 795 815 default: 796 816 params.Raw = true ··· 826 846 return p.executeRepo("repo/tags", w, params) 827 847 } 828 848 849 + type RepoTagParams struct { 850 + LoggedInUser *oauth.MultiAccountUser 851 + RepoInfo repoinfo.RepoInfo 852 + Active string 853 + types.RepoTagResponse 854 + ArtifactMap map[plumbing.Hash][]models.Artifact 855 + DanglingArtifacts []models.Artifact 856 + } 857 + 858 + func (p *Pages) RepoTag(w io.Writer, params RepoTagParams) error { 859 + params.Active = "overview" 860 + return p.executeRepo("repo/tag", w, params) 861 + } 862 + 829 863 type RepoArtifactParams struct { 830 864 LoggedInUser *oauth.MultiAccountUser 831 865 RepoInfo repoinfo.RepoInfo ··· 837 871 } 838 872 839 873 type RepoBlobParams struct { 840 - LoggedInUser *oauth.MultiAccountUser 841 - RepoInfo repoinfo.RepoInfo 842 - Active string 843 - BreadCrumbs [][]string 844 - BlobView models.BlobView 874 + LoggedInUser *oauth.MultiAccountUser 875 + RepoInfo repoinfo.RepoInfo 876 + Active string 877 + BreadCrumbs [][]string 878 + BlobView models.BlobView 879 + EmailToDid map[string]string 880 + LastCommitInfo *types.LastCommitInfo 845 881 *tangled.RepoBlob_Output 846 882 } 847 883 ··· 887 923 SubscribedLabels map[string]struct{} 888 924 ShouldSubscribeAll bool 889 925 Active string 890 - Tabs []map[string]any 891 926 Tab string 892 927 Branches []types.Branch 893 928 } 894 929 895 930 func (p *Pages) RepoGeneralSettings(w io.Writer, params RepoGeneralSettingsParams) error { 896 931 params.Active = "settings" 932 + params.Tab = "general" 897 933 return p.executeRepo("repo/settings/general", w, params) 898 934 } 899 935 ··· 901 937 LoggedInUser *oauth.MultiAccountUser 902 938 RepoInfo repoinfo.RepoInfo 903 939 Active string 904 - Tabs []map[string]any 905 940 Tab string 906 941 Collaborators []Collaborator 907 942 } 908 943 909 944 func (p *Pages) RepoAccessSettings(w io.Writer, params RepoAccessSettingsParams) error { 910 945 params.Active = "settings" 946 + params.Tab = "access" 911 947 return p.executeRepo("repo/settings/access", w, params) 912 948 } 913 949 ··· 915 951 LoggedInUser *oauth.MultiAccountUser 916 952 RepoInfo repoinfo.RepoInfo 917 953 Active string 918 - Tabs []map[string]any 919 954 Tab string 920 955 Spindles []string 921 956 CurrentSpindle string ··· 924 959 925 960 func (p *Pages) RepoPipelineSettings(w io.Writer, params RepoPipelineSettingsParams) error { 926 961 params.Active = "settings" 962 + params.Tab = "pipelines" 927 963 return p.executeRepo("repo/settings/pipelines", w, params) 928 964 } 929 965 ··· 953 989 Backlinks []models.RichReferenceLink 954 990 LabelDefs map[string]*models.LabelDefinition 955 991 956 - OrderedReactionKinds []models.ReactionKind 957 - Reactions map[models.ReactionKind]models.ReactionDisplayData 958 - UserReacted map[models.ReactionKind]bool 992 + Reactions map[models.ReactionKind]models.ReactionDisplayData 993 + UserReacted map[models.ReactionKind]bool 959 994 } 960 995 961 996 func (p *Pages) RepoSingleIssue(w io.Writer, params RepoSingleIssueParams) error { ··· 1116 1151 ActiveRound int 1117 1152 IsInterdiff bool 1118 1153 1119 - OrderedReactionKinds []models.ReactionKind 1120 - Reactions map[models.ReactionKind]models.ReactionDisplayData 1121 - UserReacted map[models.ReactionKind]bool 1154 + Reactions map[models.ReactionKind]models.ReactionDisplayData 1155 + UserReacted map[models.ReactionKind]bool 1122 1156 1123 1157 LabelDefs map[string]*models.LabelDefinition 1124 1158 } ··· 1129 1163 } 1130 1164 1131 1165 type RepoPullPatchParams struct { 1132 - LoggedInUser *oauth.MultiAccountUser 1133 - RepoInfo repoinfo.RepoInfo 1134 - Pull *models.Pull 1135 - Stack models.Stack 1136 - Diff *types.NiceDiff 1137 - Round int 1138 - Submission *models.PullSubmission 1139 - OrderedReactionKinds []models.ReactionKind 1140 - DiffOpts types.DiffOpts 1166 + LoggedInUser *oauth.MultiAccountUser 1167 + RepoInfo repoinfo.RepoInfo 1168 + Pull *models.Pull 1169 + Stack models.Stack 1170 + Diff *types.NiceDiff 1171 + Round int 1172 + Submission *models.PullSubmission 1173 + DiffOpts types.DiffOpts 1141 1174 } 1142 1175 1143 1176 // this name is a mouthful ··· 1146 1179 } 1147 1180 1148 1181 type RepoPullInterdiffParams struct { 1149 - LoggedInUser *oauth.MultiAccountUser 1150 - RepoInfo repoinfo.RepoInfo 1151 - Pull *models.Pull 1152 - Round int 1153 - Interdiff *patchutil.InterdiffResult 1154 - OrderedReactionKinds []models.ReactionKind 1155 - DiffOpts types.DiffOpts 1182 + LoggedInUser *oauth.MultiAccountUser 1183 + RepoInfo repoinfo.RepoInfo 1184 + Pull *models.Pull 1185 + Round int 1186 + Interdiff *patchutil.InterdiffResult 1187 + DiffOpts types.DiffOpts 1156 1188 } 1157 1189 1158 1190 // this name is a mouthful
+2 -2
appview/pages/templates/fragments/pagination.html
··· 1 1 {{ define "fragments/pagination" }} 2 - {{/* Params: Page (pagination.Page), TotalCount (int), BasePath (string), QueryParams (string) */}} 2 + {{/* Params: Page (pagination.Page), TotalCount (int), BasePath (string), QueryParams (url.Values) */}} 3 3 {{ $page := .Page }} 4 4 {{ $totalCount := .TotalCount }} 5 5 {{ $basePath := .BasePath }} 6 - {{ $queryParams := .QueryParams }} 6 + {{ $queryParams := safeUrl .QueryParams.Encode }} 7 7 8 8 {{ $prev := $page.Previous.Offset }} 9 9 {{ $next := $page.Next.Offset }}
+113
appview/pages/templates/fragments/resizeable.html
··· 1 + {{ define "fragments/resizable" }} 2 + <script> 3 + class ResizablePanel { 4 + constructor(resizerElement) { 5 + this.resizer = resizerElement; 6 + this.isResizing = false; 7 + this.type = resizerElement.dataset.resizer; 8 + this.targetId = resizerElement.dataset.target; 9 + this.target = document.getElementById(this.targetId); 10 + this.min = parseInt(resizerElement.dataset.min) || 100; 11 + this.max = parseInt(resizerElement.dataset.max) || Infinity; 12 + 13 + this.direction = resizerElement.dataset.direction || 'before'; // 'before' or 'after' 14 + 15 + this.handleMouseDown = this.handleMouseDown.bind(this); 16 + this.handleMouseMove = this.handleMouseMove.bind(this); 17 + this.handleMouseUp = this.handleMouseUp.bind(this); 18 + 19 + this.init(); 20 + } 21 + 22 + init() { 23 + this.resizer.addEventListener('mousedown', this.handleMouseDown); 24 + } 25 + 26 + handleMouseDown(e) { 27 + e.preventDefault(); 28 + this.isResizing = true; 29 + this.resizer.classList.add('resizing'); 30 + document.body.style.cursor = this.type === 'vertical' ? 'col-resize' : 'row-resize'; 31 + document.body.style.userSelect = 'none'; 32 + 33 + this.startX = e.clientX; 34 + this.startY = e.clientY; 35 + this.startWidth = this.target.offsetWidth; 36 + this.startHeight = this.target.offsetHeight; 37 + 38 + document.addEventListener('mousemove', this.handleMouseMove); 39 + document.addEventListener('mouseup', this.handleMouseUp); 40 + } 41 + 42 + handleMouseMove(e) { 43 + if (!this.isResizing) return; 44 + 45 + if (this.type === 'vertical') { 46 + let newWidth; 47 + 48 + if (this.direction === 'after') { 49 + const deltaX = this.startX - e.clientX; 50 + newWidth = this.startWidth + deltaX; 51 + } else { 52 + const deltaX = e.clientX - this.startX; 53 + newWidth = this.startWidth + deltaX; 54 + } 55 + 56 + if (newWidth >= this.min && newWidth <= this.max) { 57 + this.target.style.width = newWidth + 'px'; 58 + this.target.style.flexShrink = '0'; 59 + } 60 + } else { 61 + let newHeight; 62 + 63 + if (this.direction === 'after') { 64 + const deltaY = this.startY - e.clientY; 65 + newHeight = this.startHeight + deltaY; 66 + } else { 67 + const deltaY = e.clientY - this.startY; 68 + newHeight = this.startHeight + deltaY; 69 + } 70 + 71 + if (newHeight >= this.min && newHeight <= this.max) { 72 + this.target.style.height = newHeight + 'px'; 73 + } 74 + } 75 + } 76 + 77 + handleMouseUp() { 78 + if (!this.isResizing) return; 79 + 80 + this.isResizing = false; 81 + this.resizer.classList.remove('resizing'); 82 + document.body.style.cursor = ''; 83 + document.body.style.userSelect = ''; 84 + 85 + document.removeEventListener('mousemove', this.handleMouseMove); 86 + document.removeEventListener('mouseup', this.handleMouseUp); 87 + } 88 + 89 + destroy() { 90 + this.resizer.removeEventListener('mousedown', this.handleMouseDown); 91 + document.removeEventListener('mousemove', this.handleMouseMove); 92 + document.removeEventListener('mouseup', this.handleMouseUp); 93 + } 94 + } 95 + 96 + function initializeResizers() { 97 + const resizers = document.querySelectorAll('[data-resizer]'); 98 + const instances = []; 99 + 100 + resizers.forEach(resizer => { 101 + instances.push(new ResizablePanel(resizer)); 102 + }); 103 + 104 + return instances; 105 + } 106 + 107 + if (document.readyState === 'loading') { 108 + document.addEventListener('DOMContentLoaded', initializeResizers); 109 + } else { 110 + initializeResizers(); 111 + } 112 + </script> 113 + {{ end }}
+3 -3
appview/pages/templates/fragments/starBtn.html
··· 15 15 hx-disabled-elt="#starBtn" 16 16 > 17 17 {{ if .IsStarred }} 18 - {{ i "star" "w-4 h-4 fill-current" }} 18 + {{ i "star" "w-4 h-4 fill-current inline group-[.htmx-request]:hidden" }} 19 19 {{ else }} 20 - {{ i "star" "w-4 h-4" }} 20 + {{ i "star" "w-4 h-4 inline group-[.htmx-request]:hidden" }} 21 21 {{ end }} 22 + {{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }} 22 23 <span class="text-sm"> 23 24 {{ .StarCount }} 24 25 </span> 25 - {{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }} 26 26 </button> 27 27 {{ end }}
+3 -7
appview/pages/templates/fragments/tinyAvatarList.html
··· 5 5 <div class="inline-flex items-center -space-x-3"> 6 6 {{ $c := "z-50 z-40 z-30 z-20 z-10" }} 7 7 {{ range $i, $p := $ps }} 8 - <a href="/{{ resolve . }}" title="{{ resolve . }}"> 9 - <img 10 - src="{{ tinyAvatar . }}" 11 - alt="" 12 - class="rounded-full size-8 mr-1 border-2 border-gray-100 dark:border-gray-900 z-{{sub 5 $i}}0 {{ $classes }}" 13 - /> 14 - </a> 8 + {{ $zIdx := printf "z-%d0" (sub 5 $i) }} 9 + {{ $classes = printf "%s %s" $zIdx $classes }} 10 + {{ template "user/fragments/picLink" (list . $classes ) }} 15 11 {{ end }} 16 12 17 13 {{ if gt (len $all) 5 }}
+2 -6
appview/pages/templates/layouts/fragments/topbar.html
··· 46 46 <details class="relative inline-block text-left nav-dropdown"> 47 47 <summary class="cursor-pointer list-none flex items-center gap-1"> 48 48 {{ $user := .Active.Did }} 49 - <img 50 - src="{{ tinyAvatar $user }}" 51 - alt="" 52 - class="rounded-full h-6 w-6 border border-gray-300 dark:border-gray-700" 53 - /> 49 + {{ template "user/fragments/pic" (list $user "size-6") }} 54 50 <span class="hidden md:inline">{{ $user | resolve | truncateAt30 }}</span> 55 51 </summary> 56 52 <div class="absolute right-0 mt-4 rounded bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700 shadow-lg z-50 text-sm" style="width: 14rem;"> ··· 68 64 hx-swap="none" 69 65 class="{{$linkStyle}} w-full text-left pl-3" 70 66 > 71 - <img src="{{ tinyAvatar .Did }}" alt="" class="rounded-full size-6 flex-shrink-0 border border-gray-300 dark:border-gray-700" /> 67 + {{ template "user/fragments/pic" (list .Did "size-6") }} 72 68 <span class="truncate flex-1">{{ .Did | resolve }}</span> 73 69 </button> 74 70 {{ end }}
+26
appview/pages/templates/layouts/loginbase.html
··· 1 + {{ define "mainLayout" }} 2 + <div class="w-full h-screen flex items-center justify-center bg-white dark:bg-transparent"> 3 + <main class="max-w-md px-7 mt-4"> 4 + {{ template "logo" }} 5 + {{ block "content" . }}{{ end }} 6 + </main> 7 + </div> 8 + {{ end }} 9 + 10 + {{ define "topbarLayout" }} 11 + <div class="hidden"></div> 12 + {{ end }} 13 + 14 + {{ define "footerLayout" }} 15 + <div class="hidden"></div> 16 + {{ end }} 17 + 18 + {{ define "logo" }} 19 + <h1 class="flex place-content-center text-3xl font-semibold italic dark:text-white" > 20 + {{ template "fragments/logotype" }} 21 + </h1> 22 + <h2 class="text-center text-xl italic dark:text-white"> 23 + tightly-knit social coding. 24 + </h2> 25 + {{ end }} 26 +
+1 -1
appview/pages/templates/layouts/profilebase.html
··· 2 2 3 3 {{ define "extrameta" }} 4 4 {{ $handle := resolve .Card.UserDid }} 5 - {{ $avatarUrl := fullAvatar $handle }} 5 + {{ $avatarUrl := profileAvatarUrl .Card.Profile "" }} 6 6 <meta property="og:title" content="{{ $handle }}" /> 7 7 <meta property="og:type" content="profile" /> 8 8 <meta property="og:url" content="https://tangled.org/{{ $handle }}?tab={{ .Active }}" />
+7 -1
appview/pages/templates/repo/blob.html
··· 12 12 13 13 {{ define "repoContent" }} 14 14 {{ $linkstyle := "no-underline hover:underline" }} 15 + 15 16 <div class="pb-2 mb-3 text-base border-b border-gray-200 dark:border-gray-700"> 16 17 <div class="flex flex-col md:flex-row md:justify-between gap-2"> 17 18 <div id="breadcrumbs" class="overflow-x-auto whitespace-nowrap text-gray-400 dark:text-gray-500"> ··· 35 36 36 37 {{ if .BlobView.ShowingText }} 37 38 <span class="select-none px-1 md:px-2 [&:before]:content-['ยท']"></span> 38 - <span>{{ .Lines }} lines</span> 39 + <span>{{ .BlobView.Lines }} lines</span> 39 40 {{ end }} 40 41 41 42 {{ if .BlobView.SizeHint }} ··· 57 58 </div> 58 59 </div> 59 60 </div> 61 + 62 + {{ if .LastCommitInfo }} 63 + {{ template "repo/fragments/lastCommitPanel" $ }} 64 + {{ end }} 65 + 60 66 {{ if .BlobView.IsUnsupported }} 61 67 <p class="text-center text-gray-400 dark:text-gray-500"> 62 68 Previews are not supported for this file type.
+4 -1
appview/pages/templates/repo/commit.html
··· 100 100 {{ if $did }} 101 101 {{ template "user/fragments/picHandleLink" $did }} 102 102 {{ else }} 103 - <a href="mailto:{{ $email }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ $name }}</a> 103 + <span class="flex items-center gap-1"> 104 + {{ placeholderAvatar "tiny" }} 105 + <a href="mailto:{{ $email }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ $name }}</a> 106 + </span> 104 107 {{ end }} 105 108 {{ end }} 106 109
+3 -2
appview/pages/templates/repo/fragments/artifact.html
··· 19 19 {{ if and .LoggedInUser (eq .LoggedInUser.Did .Artifact.Did) }} 20 20 <button 21 21 id="delete-{{ $unique }}" 22 - class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2" 22 + class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group" 23 23 title="Delete artifact" 24 24 hx-delete="/{{ .RepoInfo.FullName }}/tags/{{ .Artifact.Tag.String }}/{{ .Artifact.Name | urlquery }}" 25 25 hx-swap="outerHTML" 26 26 hx-target="#artifact-{{ $unique }}" 27 27 hx-disabled-elt="#delete-{{ $unique }}" 28 28 hx-confirm="Are you sure you want to delete the artifact '{{ .Artifact.Name }}'?"> 29 - {{ i "trash-2" "w-4 h-4" }} 29 + {{ i "trash-2" "size-4 inline group-[.htmx-request]:hidden" }} 30 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 30 31 </button> 31 32 {{ end }} 32 33 </div>
+70
appview/pages/templates/repo/fragments/artifactList.html
··· 1 + {{ define "repo/fragments/artifactList" }} 2 + {{ $root := index . 0 }} 3 + {{ $tag := index . 1 }} 4 + {{ $isPushAllowed := $root.RepoInfo.Roles.IsPushAllowed }} 5 + {{ $artifacts := index $root.ArtifactMap $tag.Tag.Hash }} 6 + 7 + <h2 class="my-4 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold">artifacts</h2> 8 + <div class="flex flex-col rounded border border-gray-200 dark:border-gray-700"> 9 + {{ range $artifact := $artifacts }} 10 + {{ $args := dict "LoggedInUser" $root.LoggedInUser "RepoInfo" $root.RepoInfo "Artifact" $artifact }} 11 + {{ template "repo/fragments/artifact" $args }} 12 + {{ end }} 13 + <div id="artifact-git-source" class="flex items-center justify-between p-2 border-b border-gray-200 dark:border-gray-700"> 14 + <div id="left-side" class="flex items-center gap-2 min-w-0 max-w-[60%]"> 15 + {{ i "archive" "w-4 h-4" }} 16 + <a href="/{{ $root.RepoInfo.FullName }}/archive/{{ pathEscape (print "refs/tags/" $tag.Name) }}" class="no-underline hover:no-underline"> 17 + Source code (.tar.gz) 18 + </a> 19 + </div> 20 + </div> 21 + {{ if $isPushAllowed }} 22 + {{ template "uploadArtifact" (list $root $tag) }} 23 + {{ end }} 24 + </div> 25 + {{ end }} 26 + 27 + {{ define "uploadArtifact" }} 28 + {{ $root := index . 0 }} 29 + {{ $tag := index . 1 }} 30 + {{ $unique := $tag.Tag.Target.String }} 31 + <form 32 + id="upload-{{$unique}}" 33 + method="post" 34 + enctype="multipart/form-data" 35 + hx-post="/{{ $root.RepoInfo.FullName }}/tags/{{ $tag.Name | urlquery }}/upload" 36 + hx-on::after-request="if(event.detail.successful) this.reset()" 37 + hx-disabled-elt="#upload-btn-{{$unique}}" 38 + hx-swap="beforebegin" 39 + hx-target="#artifact-git-source" 40 + class="flex items-center gap-2 px-2 group"> 41 + <div class="flex-grow"> 42 + <input type="file" 43 + name="artifact" 44 + required 45 + class="block py-2 px-0 w-full border-none 46 + text-black dark:text-white 47 + bg-white dark:bg-gray-800 48 + file:mr-4 file:px-2 file:py-2 49 + file:rounded file:border-0 50 + file:text-sm file:font-medium 51 + file:text-gray-700 file:dark:text-gray-300 52 + file:bg-gray-200 file:dark:bg-gray-700 53 + file:hover:bg-gray-100 file:hover:dark:bg-gray-600 54 + "> 55 + </input> 56 + </div> 57 + <div class="flex justify-end"> 58 + <button 59 + type="submit" 60 + class="btn-create gap-2" 61 + id="upload-btn-{{$unique}}" 62 + title="Upload artifact"> 63 + {{ i "upload" "size-4 inline group-[.htmx-request]:hidden" }} 64 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 65 + <span class="hidden md:inline">upload</span> 66 + </button> 67 + </div> 68 + </form> 69 + {{ end }} 70 +
+19 -2
appview/pages/templates/repo/fragments/diff.html
··· 3 3 #filesToggle:checked ~ div label[for="filesToggle"] .show-text { display: none; } 4 4 #filesToggle:checked ~ div label[for="filesToggle"] .hide-text { display: inline; } 5 5 #filesToggle:not(:checked) ~ div label[for="filesToggle"] .hide-text { display: none; } 6 - #filesToggle:checked ~ div div#files { width: fit-content; max-width: 15vw; margin-right: 1rem; } 6 + #filesToggle:checked ~ div div#files { width: fit-content; max-width: 15vw; } 7 7 #filesToggle:not(:checked) ~ div div#files { width: 0; display: none; margin-right: 0; } 8 + #filesToggle:not(:checked) ~ div div#resize-files { display: none; } 8 9 </style> 9 10 10 11 {{ template "diffTopbar" . }} 11 12 {{ block "diffLayout" . }} {{ end }} 13 + {{ template "fragments/resizable" }} 12 14 {{ end }} 13 15 14 16 {{ define "diffTopbar" }} ··· 78 80 79 81 {{ end }} 80 82 83 + {{ define "resize-grip" }} 84 + {{ $id := index . 0 }} 85 + {{ $target := index . 1 }} 86 + {{ $direction := index . 2 }} 87 + <div id="{{ $id }}" 88 + data-resizer="vertical" 89 + data-target="{{ $target }}" 90 + data-direction="{{ $direction }}" 91 + class="resizer-vertical hidden md:flex w-4 sticky top-12 max-h-screen flex-col items-center justify-center group"> 92 + <div class="w-1 h-16 group-hover:h-24 group-[.resizing]:h-24 transition-all rounded-full bg-gray-400 dark:bg-gray-500 group-hover:bg-gray-500 group-hover:dark:bg-gray-400"></div> 93 + </div> 94 + {{ end }} 95 + 81 96 {{ define "diffLayout" }} 82 97 {{ $diff := index . 0 }} 83 98 {{ $opts := index . 1 }} ··· 90 105 </section> 91 106 </div> 92 107 108 + {{ template "resize-grip" (list "resize-files" "files" "before") }} 109 + 93 110 <!-- main content --> 94 - <div class="flex-1 min-w-0 sticky top-12 pb-12"> 111 + <div id="diff-files" class="flex-1 min-w-0 sticky top-12 pb-12"> 95 112 {{ template "diffFiles" (list $diff $opts) }} 96 113 </div> 97 114
+29
appview/pages/templates/repo/fragments/lastCommitPanel.html
··· 1 + {{ define "repo/fragments/lastCommitPanel" }} 2 + {{ $messageParts := splitN .LastCommitInfo.Message "\n\n" 2 }} 3 + <div class="pb-2 mb-3 border-b border-gray-200 dark:border-gray-700 flex items-center justify-between text-sm"> 4 + <div class="flex items-center gap-1"> 5 + {{ if .LastCommitInfo.Author }} 6 + {{ $authorDid := index .EmailToDid .LastCommitInfo.Author.Email }} 7 + <span class="flex items-center gap-1"> 8 + {{ if $authorDid }} 9 + {{ template "user/fragments/picHandleLink" $authorDid }} 10 + {{ else }} 11 + {{ placeholderAvatar "tiny" }} 12 + <a href="mailto:{{ .LastCommitInfo.Author.Email }}" class="no-underline hover:underline">{{ .LastCommitInfo.Author.Name }}</a> 13 + {{ end }} 14 + </span> 15 + <span class="px-1 select-none before:content-['\00B7']"></span> 16 + {{ end }} 17 + <a href="/{{ .RepoInfo.FullName }}/commit/{{ .LastCommitInfo.Hash }}" 18 + class="inline no-underline hover:underline dark:text-white"> 19 + {{ index $messageParts 0 }} 20 + </a> 21 + <span class="px-1 select-none before:content-['\00B7']"></span> 22 + <span class="text-gray-400 dark:text-gray-500">{{ template "repo/fragments/time" .LastCommitInfo.When }}</span> 23 + </div> 24 + <a href="/{{ .RepoInfo.FullName }}/commit/{{ .LastCommitInfo.Hash.String }}" 25 + class="no-underline hover:underline text-gray-700 dark:text-gray-300 bg-gray-100 dark:bg-gray-900 px-2 py-1 rounded font-mono text-xs"> 26 + {{ slice .LastCommitInfo.Hash.String 0 8 }} 27 + </a> 28 + </div> 29 + {{ end }}
+50
appview/pages/templates/repo/fragments/reactions.html
··· 1 + {{ define "repo/fragments/reactions" }} 2 + <div class="flex flex-wrap items-center gap-2"> 3 + {{- $reactions := .Reactions -}} 4 + {{- $userReacted := .UserReacted -}} 5 + {{- $threadAt := .ThreadAt -}} 6 + 7 + {{ template "reactionsPopup" }} 8 + {{ range $kind := const.OrderedReactionKinds }} 9 + {{ $reactionData := index $reactions $kind }} 10 + {{ template "repo/fragments/reaction" 11 + (dict 12 + "Kind" $kind 13 + "Count" $reactionData.Count 14 + "IsReacted" (index $userReacted $kind) 15 + "ThreadAt" $threadAt 16 + "Users" $reactionData.Users) }} 17 + {{ end }} 18 + </div> 19 + {{ end }} 20 + 21 + {{ define "reactionsPopup" }} 22 + <details 23 + id="reactionsPopUp" 24 + class="relative inline-block" 25 + > 26 + <summary 27 + class="flex justify-center items-center min-w-8 min-h-8 rounded border border-gray-200 dark:border-gray-700 28 + hover:bg-gray-50 29 + hover:border-gray-300 30 + dark:hover:bg-gray-700 31 + dark:hover:border-gray-600 32 + cursor-pointer list-none" 33 + > 34 + {{ i "smile" "size-4" }} 35 + </summary> 36 + <div 37 + class="absolute flex left-0 z-10 mt-4 rounded bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700 shadow-lg" 38 + > 39 + {{ range $kind := const.OrderedReactionKinds }} 40 + <button 41 + id="reactBtn-{{ $kind }}" 42 + class="size-12 hover:bg-gray-100 dark:hover:bg-gray-700" 43 + hx-on:click="this.parentElement.parentElement.removeAttribute('open')" 44 + > 45 + {{ $kind }} 46 + </button> 47 + {{ end }} 48 + </div> 49 + </details> 50 + {{ end }}
-30
appview/pages/templates/repo/fragments/reactionsPopUp.html
··· 1 - {{ define "repo/fragments/reactionsPopUp" }} 2 - <details 3 - id="reactionsPopUp" 4 - class="relative inline-block" 5 - > 6 - <summary 7 - class="flex justify-center items-center min-w-8 min-h-8 rounded border border-gray-200 dark:border-gray-700 8 - hover:bg-gray-50 9 - hover:border-gray-300 10 - dark:hover:bg-gray-700 11 - dark:hover:border-gray-600 12 - cursor-pointer list-none" 13 - > 14 - {{ i "smile" "size-4" }} 15 - </summary> 16 - <div 17 - class="absolute flex left-0 z-10 mt-4 rounded bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700 shadow-lg" 18 - > 19 - {{ range $kind := . }} 20 - <button 21 - id="reactBtn-{{ $kind }}" 22 - class="size-12 hover:bg-gray-100 dark:hover:bg-gray-700" 23 - hx-on:click="this.parentElement.parentElement.removeAttribute('open')" 24 - > 25 - {{ $kind }} 26 - </button> 27 - {{ end }} 28 - </div> 29 - </details> 30 - {{ end }}
+67
appview/pages/templates/repo/fragments/singleTag.html
··· 1 + {{ define "repo/fragments/singleTag" }} 2 + {{ $root := index . 0 }} 3 + {{ $item := index . 1 }} 4 + {{ with $item }} 5 + <div class="md:grid md:grid-cols-12 md:items-start flex flex-col"> 6 + <!-- Header column (top on mobile, left on md+) --> 7 + <div class="md:col-span-2 md:border-r border-b md:border-b-0 border-gray-200 dark:border-gray-700 w-full md:h-full"> 8 + <!-- Mobile layout: horizontal --> 9 + <div class="flex md:hidden flex-col py-2 px-2 text-xl"> 10 + <a href="/{{ $root.RepoInfo.FullName }}/tags/{{ .Name | urlquery }}" class="no-underline hover:underline flex items-center gap-2 font-bold"> 11 + {{ i "tag" "w-4 h-4" }} 12 + {{ .Name }} 13 + </a> 14 + 15 + <div class="flex items-center gap-3 text-gray-500 dark:text-gray-400 text-sm"> 16 + {{ if .Tag }} 17 + <a href="/{{ $root.RepoInfo.FullName }}/commit/{{ .Tag.Target.String }}" 18 + class="no-underline hover:underline text-gray-500 dark:text-gray-400"> 19 + {{ slice .Tag.Target.String 0 8 }} 20 + </a> 21 + 22 + <span class="px-1 text-gray-500 dark:text-gray-400 select-none after:content-['ยท']"></span> 23 + <span>{{ .Tag.Tagger.Name }}</span> 24 + 25 + <span class="px-1 text-gray-500 dark:text-gray-400 select-none after:content-['ยท']"></span> 26 + {{ template "repo/fragments/shortTime" .Tag.Tagger.When }} 27 + {{ end }} 28 + </div> 29 + </div> 30 + 31 + <!-- Desktop layout: vertical and left-aligned --> 32 + <div class="hidden md:block text-left px-2 pb-6"> 33 + <a href="/{{ $root.RepoInfo.FullName }}/tags/{{ .Name | urlquery }}" class="no-underline hover:underline flex items-center gap-2 font-bold"> 34 + {{ i "tag" "w-4 h-4" }} 35 + {{ .Name }} 36 + </a> 37 + <div class="flex flex-grow flex-col text-gray-500 dark:text-gray-400 text-sm"> 38 + {{ if .Tag }} 39 + <a href="/{{ $root.RepoInfo.FullName }}/commit/{{ .Tag.Target.String }}" 40 + class="no-underline hover:underline text-gray-500 dark:text-gray-400 flex items-center gap-2"> 41 + {{ i "git-commit-horizontal" "w-4 h-4" }} 42 + {{ slice .Tag.Target.String 0 8 }} 43 + </a> 44 + <span>{{ .Tag.Tagger.Name }}</span> 45 + {{ template "repo/fragments/time" .Tag.Tagger.When }} 46 + {{ end }} 47 + </div> 48 + </div> 49 + </div> 50 + 51 + <!-- Content column (bottom on mobile, right on md+) --> 52 + <div class="md:col-span-10 px-2 py-3 md:py-0 md:pb-6"> 53 + {{ if .Tag }} 54 + {{ $messageParts := splitN .Tag.Message "\n\n" 2 }} 55 + <p class="font-bold text-lg">{{ index $messageParts 0 }}</p> 56 + {{ if gt (len $messageParts) 1 }} 57 + <p class="cursor-text py-2">{{ nl2br (index $messageParts 1) }}</p> 58 + {{ end }} 59 + {{ template "repo/fragments/artifactList" (list $root .) }} 60 + {{ else }} 61 + <p class="italic text-gray-500 dark:text-gray-400">no message</p> 62 + {{ end }} 63 + </div> 64 + </div> 65 + {{ end }} 66 + {{ end }} 67 +
+3 -3
appview/pages/templates/repo/fragments/splitDiff.html
··· 1 1 {{ define "repo/fragments/splitDiff" }} 2 2 {{ $name := .Id }} 3 - {{- $lineNrStyle := "min-w-[3.5rem] flex-shrink-0 select-none text-right bg-white dark:bg-gray-800" -}} 4 - {{- $linkStyle := "text-gray-400 dark:text-gray-500 hover:underline" -}} 3 + {{- $lineNrStyle := "min-w-[3.5rem] flex-shrink-0 select-none text-right bg-white dark:bg-gray-800 group-target/line:bg-yellow-200/30 group-target/line:dark:bg-yellow-600/30" -}} 4 + {{- $linkStyle := "text-gray-400 dark:text-gray-500 hover:underline group-target/line:text-black group-target/line:dark:text-white" -}} 5 5 {{- $lineNrSepStyle := "pr-2 border-r border-gray-200 dark:border-gray-700" -}} 6 - {{- $containerStyle := "inline-flex w-full items-center target:bg-yellow-200 target:dark:bg-yellow-700 scroll-mt-48" -}} 6 + {{- $containerStyle := "inline-flex w-full items-center target:bg-yellow-200/50 target:dark:bg-yellow-700/50 scroll-mt-48 group/line" -}} 7 7 {{- $emptyStyle := "bg-gray-200/30 dark:bg-gray-700/30" -}} 8 8 {{- $addStyle := "bg-green-100 dark:bg-green-800/30 text-green-700 dark:text-green-400" -}} 9 9 {{- $delStyle := "bg-red-100 dark:bg-red-800/30 text-red-700 dark:text-red-400 " -}}
+3 -3
appview/pages/templates/repo/fragments/unifiedDiff.html
··· 3 3 <div class="overflow-x-auto font-mono leading-normal"><div class="overflow-x-auto"><div class="inline-flex flex-col min-w-full">{{- range .TextFragments -}}<span class="block bg-gray-100 dark:bg-gray-700 text-gray-500 dark:text-gray-400 select-none text-center">&middot;&middot;&middot;</span> 4 4 {{- $oldStart := .OldPosition -}} 5 5 {{- $newStart := .NewPosition -}} 6 - {{- $lineNrStyle := "min-w-[3.5rem] flex-shrink-0 select-none text-right bg-white dark:bg-gray-800 target:bg-yellow-200 target:dark:bg-yellow-600" -}} 7 - {{- $linkStyle := "text-gray-400 dark:text-gray-500 hover:underline" -}} 6 + {{- $lineNrStyle := "min-w-[3.5rem] flex-shrink-0 select-none text-right bg-white dark:bg-gray-800 group-target/line:bg-yellow-200/30 group-target/line:dark:bg-yellow-600/30" -}} 7 + {{- $linkStyle := "text-gray-400 dark:text-gray-500 hover:underline group-target/line:text-black group-target/line:dark:text-white" -}} 8 8 {{- $lineNrSepStyle1 := "" -}} 9 9 {{- $lineNrSepStyle2 := "pr-2 border-r border-gray-200 dark:border-gray-700" -}} 10 - {{- $containerStyle := "inline-flex w-full items-center target:bg-yellow-200 target:dark:bg-yellow-700 scroll-mt-48" -}} 10 + {{- $containerStyle := "inline-flex w-full items-center target:bg-yellow-200/30 target:dark:bg-yellow-700/30 scroll-mt-48 group/line" -}} 11 11 {{- $addStyle := "bg-green-100 dark:bg-green-800/30 text-green-700 dark:text-green-400 " -}} 12 12 {{- $delStyle := "bg-red-100 dark:bg-red-800/30 text-red-700 dark:text-red-400 " -}} 13 13 {{- $ctxStyle := "bg-white dark:bg-gray-800 text-gray-500 dark:text-gray-400" -}}
+7 -3
appview/pages/templates/repo/index.html
··· 254 254 {{ define "attribution" }} 255 255 {{ $commit := index . 0 }} 256 256 {{ $map := index . 1 }} 257 - <span class="flex items-center"> 257 + <span class="flex items-center gap-1"> 258 258 {{ $author := index $map $commit.Author.Email }} 259 259 {{ $coauthors := $commit.CoAuthors }} 260 260 {{ $all := list }} ··· 269 269 {{ end }} 270 270 {{ end }} 271 271 272 - {{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "size-6") }} 272 + {{ if $author }} 273 + {{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "size-6") }} 274 + {{ else }} 275 + {{ placeholderAvatar "tiny" }} 276 + {{ end }} 273 277 <a href="{{ if $author }}/{{ $author }}{{ else }}mailto:{{ $commit.Author.Email }}{{ end }}" 274 278 class="no-underline hover:underline"> 275 279 {{ if $author }}{{ resolve $author }}{{ else }}{{ $commit.Author.Name }}{{ end }} ··· 330 334 {{ with $tag }} 331 335 <div> 332 336 <div class="text-base flex items-center gap-2"> 333 - <a href="/{{ $.RepoInfo.FullName }}/tree/{{ .Reference.Name | urlquery }}" 337 + <a href="/{{ $.RepoInfo.FullName }}/tags/{{ .Reference.Name | urlquery }}" 334 338 class="inline no-underline hover:underline dark:text-white"> 335 339 {{ .Reference.Name }} 336 340 </a>
+2 -10
appview/pages/templates/repo/issues/fragments/commentList.html
··· 41 41 {{ define "topLevelComment" }} 42 42 <div class="rounded px-6 py-4 bg-white dark:bg-gray-800 flex gap-2 "> 43 43 <div class="flex-shrink-0"> 44 - <img 45 - src="{{ tinyAvatar .Comment.Did }}" 46 - alt="" 47 - class="rounded-full size-8 mr-1 border-2 border-gray-100 dark:border-gray-900" 48 - /> 44 + {{ template "user/fragments/picLink" (list .Comment.Did "size-8 mr-1") }} 49 45 </div> 50 46 <div class="flex-1 min-w-0"> 51 47 {{ template "repo/issues/fragments/issueCommentHeader" . }} ··· 57 53 {{ define "replyComment" }} 58 54 <div class="py-4 pr-4 w-full mx-auto overflow-hidden flex gap-2 "> 59 55 <div class="flex-shrink-0"> 60 - <img 61 - src="{{ tinyAvatar .Comment.Did }}" 62 - alt="" 63 - class="rounded-full size-8 mr-1 border-2 border-gray-100 dark:border-gray-900" 64 - /> 56 + {{ template "user/fragments/picLink" (list .Comment.Did "size-8 mr-1") }} 65 57 </div> 66 58 <div class="flex-1 min-w-0"> 67 59 {{ template "repo/issues/fragments/issueCommentHeader" . }}
+2 -1
appview/pages/templates/repo/issues/fragments/issueCommentHeader.html
··· 1 1 {{ define "repo/issues/fragments/issueCommentHeader" }} 2 2 <div class="flex flex-wrap items-center gap-2 text-sm text-gray-500 dark:text-gray-400 "> 3 - {{ resolve .Comment.Did }} 3 + {{ $handle := resolve .Comment.Did }} 4 + <a class="text-gray-500 dark:text-gray-400 hover:text-gray-500 dark:hover:text-gray-300" href="/{{ $handle }}">{{ $handle }}</a> 4 5 {{ template "hats" $ }} 5 6 <span class="before:content-['ยท']"></span> 6 7 {{ template "timestamp" . }}
+1 -1
appview/pages/templates/repo/issues/fragments/newComment.html
··· 12 12 <textarea 13 13 id="comment-textarea" 14 14 name="body" 15 - class="w-full p-2 rounded border border-gray-200 dark:border-gray-700" 15 + class="w-full p-2 rounded" 16 16 placeholder="Add to the discussion. Markdown is supported." 17 17 onkeyup="updateCommentForm()" 18 18 rows="5"
+2 -6
appview/pages/templates/repo/issues/fragments/replyIssueCommentPlaceholder.html
··· 1 1 {{ define "repo/issues/fragments/replyIssueCommentPlaceholder" }} 2 2 <div class="py-2 px-6 border-t flex gap-2 items-center border-gray-300 dark:border-gray-700"> 3 3 {{ if .LoggedInUser }} 4 - <img 5 - src="{{ tinyAvatar .LoggedInUser.Did }}" 6 - alt="" 7 - class="rounded-full size-8 mr-1 border-2 border-gray-300 dark:border-gray-700" 8 - /> 4 + {{ template "user/fragments/pic" (list .LoggedInUser.Did "size-8 mr-1") }} 9 5 {{ end }} 10 6 <input 11 - class="w-full p-0 border-none focus:outline-none" 7 + class="w-full p-0 border-none focus:outline-none bg-transparent" 12 8 placeholder="Leave a reply..." 13 9 hx-get="/{{ .RepoInfo.FullName }}/issues/{{ .Issue.IssueId }}/comment/{{ .Comment.Id }}/reply" 14 10 hx-trigger="focus"
+5 -21
appview/pages/templates/repo/issues/issue.html
··· 35 35 {{ if .Issue.Body }} 36 36 <article id="body" class="mt-4 prose dark:prose-invert">{{ .Issue.Body | markdown }}</article> 37 37 {{ end }} 38 - <div class="flex flex-wrap gap-2 items-stretch mt-4"> 39 - {{ template "issueReactions" . }} 38 + <div class="mt-4"> 39 + {{ template "repo/fragments/reactions" 40 + (dict "Reactions" .Reactions 41 + "UserReacted" .UserReacted 42 + "ThreadAt" .Issue.AtUri) }} 40 43 </div> 41 44 </section> 42 45 {{ end }} ··· 106 109 {{ i "loader-circle" "size-3 animate-spin hidden group-[.htmx-request]:inline" }} 107 110 </a> 108 111 {{ end }} 109 - 110 - {{ define "issueReactions" }} 111 - <div class="flex items-center gap-2"> 112 - {{ template "repo/fragments/reactionsPopUp" .OrderedReactionKinds }} 113 - {{ range $kind := .OrderedReactionKinds }} 114 - {{ $reactionData := index $.Reactions $kind }} 115 - {{ 116 - template "repo/fragments/reaction" 117 - (dict 118 - "Kind" $kind 119 - "Count" $reactionData.Count 120 - "IsReacted" (index $.UserReacted $kind) 121 - "ThreadAt" $.Issue.AtUri 122 - "Users" $reactionData.Users) 123 - }} 124 - {{ end }} 125 - </div> 126 - {{ end }} 127 - 128 112 129 113 {{ define "repoAfter" }} 130 114 <div class="flex flex-col gap-4 mt-4">
+4 -4
appview/pages/templates/repo/issues/issues.html
··· 33 33 <div class="flex-1 flex relative"> 34 34 <input 35 35 id="search-q" 36 - class="flex-1 py-1 pl-2 pr-10 mr-[-1px] rounded-r-none focus:border-0 focus:outline-none focus:ring focus:ring-blue-400 ring-inset peer" 36 + class="flex-1 py-1 pl-2 pr-10 mr-[-1px] rounded-r-none peer" 37 37 type="text" 38 38 name="q" 39 39 value="{{ .FilterQuery }}" 40 - placeholder=" " 40 + placeholder="search issues..." 41 41 > 42 42 <a 43 43 href="?state={{ if .FilteringByOpen }}open{{ else }}closed{{ end }}" ··· 48 48 </div> 49 49 <button 50 50 type="submit" 51 - class="p-2 text-gray-400 border rounded-r border-gray-400 dark:border-gray-600" 51 + class="p-2 text-gray-400 border rounded-r border-gray-300 dark:border-gray-600" 52 52 > 53 53 {{ i "search" "w-4 h-4" }} 54 54 </button> ··· 80 80 "Page" .Page 81 81 "TotalCount" .IssueCount 82 82 "BasePath" (printf "/%s/issues" .RepoInfo.FullName) 83 - "QueryParams" (printf "state=%s&q=%s" $state .FilterQuery) 83 + "QueryParams" (queryParams "state" $state "q" .FilterQuery) 84 84 ) }} 85 85 {{ end }} 86 86 {{ end }}
+5 -1
appview/pages/templates/repo/log.html
··· 186 186 {{ end }} 187 187 {{ end }} 188 188 189 - {{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "size-6") }} 189 + {{ if $author }} 190 + {{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "size-6") }} 191 + {{ else }} 192 + {{ placeholderAvatar "tiny" }} 193 + {{ end }} 190 194 <a href="{{ if $author }}/{{ $author }}{{ else }}mailto:{{ $commit.Author.Email }}{{ end }}" 191 195 class="no-underline hover:underline"> 192 196 {{ if $author }}{{ resolve $author }}{{ else }}{{ $commit.Author.Name }}{{ end }}
+2 -2
appview/pages/templates/repo/pulls/fragments/pullActions.html
··· 38 38 hx-vals='{"branch": "{{ .BranchDeleteStatus.Branch }}" }' 39 39 hx-swap="none" 40 40 class="btn-flat p-2 flex items-center gap-2 no-underline hover:no-underline group text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300"> 41 - {{ i "git-branch" "w-4 h-4" }} 42 - <span>delete branch</span> 41 + {{ i "git-branch" "w-4 h-4 inline group-[.htmx-request]:hidden" }} 43 42 {{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }} 43 + delete branch 44 44 </button> 45 45 {{ end }} 46 46 {{ if and $isPushAllowed $isOpen $isLastRound }}
+5 -16
appview/pages/templates/repo/pulls/fragments/pullHeader.html
··· 63 63 </article> 64 64 {{ end }} 65 65 66 - {{ with .OrderedReactionKinds }} 67 - <div class="flex items-center gap-2 mt-2"> 68 - {{ template "repo/fragments/reactionsPopUp" . }} 69 - {{ range $kind := . }} 70 - {{ $reactionData := index $.Reactions $kind }} 71 - {{ 72 - template "repo/fragments/reaction" 73 - (dict 74 - "Kind" $kind 75 - "Count" $reactionData.Count 76 - "IsReacted" (index $.UserReacted $kind) 77 - "ThreadAt" $.Pull.AtUri 78 - "Users" $reactionData.Users) 79 - }} 80 - {{ end }} 66 + <div class="mt-2"> 67 + {{ template "repo/fragments/reactions" 68 + (dict "Reactions" .Reactions 69 + "UserReacted" .UserReacted 70 + "ThreadAt" .Pull.AtUri) }} 81 71 </div> 82 - {{ end }} 83 72 </section> 84 73 85 74
+1 -1
appview/pages/templates/repo/pulls/fragments/pullNewComment.html
··· 12 12 > 13 13 <textarea 14 14 name="body" 15 - class="w-full p-2 rounded border border-gray-200" 15 + class="w-full p-2 rounded border" 16 16 rows=8 17 17 placeholder="Add to the discussion..."></textarea 18 18 >
+134 -72
appview/pages/templates/repo/pulls/pull.html
··· 22 22 <script> 23 23 (function() { 24 24 const details = document.getElementById('bottomSheet'); 25 + const backdrop = document.getElementById('bottomSheetBackdrop'); 25 26 const isDesktop = () => window.matchMedia('(min-width: 768px)').matches; 26 27 28 + // function to update backdrop 29 + const updateBackdrop = () => { 30 + if (backdrop) { 31 + if (details.open && !isDesktop()) { 32 + backdrop.classList.remove('opacity-0', 'pointer-events-none'); 33 + backdrop.classList.add('opacity-100', 'pointer-events-auto'); 34 + document.body.style.overflow = 'hidden'; 35 + } else { 36 + backdrop.classList.remove('opacity-100', 'pointer-events-auto'); 37 + backdrop.classList.add('opacity-0', 'pointer-events-none'); 38 + document.body.style.overflow = ''; 39 + } 40 + } 41 + }; 42 + 27 43 // close on mobile initially 28 44 if (!isDesktop()) { 29 45 details.open = false; 30 46 } 47 + updateBackdrop(); // initialize backdrop 31 48 32 49 // prevent closing on desktop 33 50 details.addEventListener('toggle', function(e) { 34 51 if (isDesktop() && !this.open) { 35 52 this.open = true; 36 53 } 54 + updateBackdrop(); 37 55 }); 38 56 39 57 const mediaQuery = window.matchMedia('(min-width: 768px)'); ··· 45 63 // switched to mobile - close 46 64 details.open = false; 47 65 } 66 + updateBackdrop(); 48 67 }); 68 + 69 + // close when clicking backdrop 70 + if (backdrop) { 71 + backdrop.addEventListener('click', () => { 72 + if (!isDesktop()) { 73 + details.open = false; 74 + } 75 + }); 76 + } 49 77 })(); 50 78 </script> 51 79 {{ end }} ··· 83 111 {{ end }} 84 112 {{ end }} 85 113 114 + {{ define "resize-grip" }} 115 + {{ $id := index . 0 }} 116 + {{ $target := index . 1 }} 117 + {{ $direction := index . 2 }} 118 + <div id="{{ $id }}" 119 + data-resizer="vertical" 120 + data-target="{{ $target }}" 121 + data-direction="{{ $direction }}" 122 + class="resizer-vertical hidden md:flex w-4 sticky top-12 max-h-screen flex-col items-center justify-center group"> 123 + <div class="w-1 h-16 group-hover:h-24 group-[.resizing]:h-24 transition-all rounded-full bg-gray-400 dark:bg-gray-500 group-hover:bg-gray-500 group-hover:dark:bg-gray-400"></div> 124 + </div> 125 + {{ end }} 126 + 86 127 {{ define "diffLayout" }} 87 128 {{ $diff := index . 0 }} 88 129 {{ $opts := index . 1 }} ··· 96 137 </section> 97 138 </div> 98 139 140 + {{ template "resize-grip" (list "resize-files" "files" "before") }} 141 + 99 142 <!-- main content --> 100 - <div class="flex-1 min-w-0 sticky top-12 pb-12"> 143 + <div id="diff-files" class="flex-1 min-w-0 sticky top-12 pb-12"> 101 144 {{ template "diffFiles" (list $diff $opts) }} 102 145 </div> 103 146 147 + {{ template "resize-grip" (list "resize-subs" "subs" "after") }} 148 + 104 149 <!-- right panel --> 105 150 {{ template "subsPanel" $ }} 106 151 </div> ··· 109 154 {{ define "subsPanel" }} 110 155 {{ $root := index . 2 }} 111 156 {{ $pull := $root.Pull }} 112 - 113 157 <!-- backdrop overlay - only visible on mobile when open --> 114 - <div class=" 115 - fixed inset-0 bg-black/50 z-50 md:hidden opacity-0 116 - pointer-events-none transition-opacity duration-300 117 - has-[~#subs_details[open]]:opacity-100 has-[~#subs_details[open]]:pointer-events-auto"> 118 - </div> 158 + <div id="bottomSheetBackdrop" class="fixed inset-0 bg-black/50 md:hidden opacity-0 pointer-events-none transition-opacity duration-300 z-40"></div> 119 159 <!-- right panel - bottom sheet on mobile, side panel on desktop --> 120 160 <div id="subs" class="fixed bottom-0 left-0 right-0 z-50 w-full md:static md:z-auto md:max-h-screen md:sticky md:top-12 overflow-hidden"> 121 - <details open id="bottomSheet" class="group rounded-t-2xl md:rounded-t drop-shadow-lg md:drop-shadow-none"> 161 + <details open id="bottomSheet" class="rounded-t-2xl md:rounded-t drop-shadow-lg md:drop-shadow-none group/panel"> 122 162 <summary class=" 123 163 flex gap-4 items-center justify-between 124 164 rounded-t-2xl md:rounded-t cursor-pointer list-none p-4 md:h-12 ··· 127 167 md:bg-white md:dark:bg-gray-800 128 168 drop-shadow-sm 129 169 border-t md:border-x md:border-t-0 border-gray-200 dark:border-gray-700"> 130 - <h2 class="">Submissions</h2> 170 + <h2 class="">History</h2> 131 171 {{ template "subsPanelSummary" $ }} 132 172 </summary> 133 173 <div class="max-h-[85vh] md:max-h-[calc(100vh-3rem-3rem)] w-full flex flex-col-reverse gap-4 overflow-y-auto bg-slate-100 dark:bg-gray-900 md:bg-transparent"> ··· 140 180 {{ define "subsPanelSummary" }} 141 181 {{ $root := index . 2 }} 142 182 {{ $pull := $root.Pull }} 143 - {{ $latest := $pull.LastRoundNumber }} 183 + {{ $rounds := len $pull.Submissions }} 184 + {{ $comments := $pull.TotalComments }} 144 185 <div class="flex items-center gap-2 text-sm"> 145 - <!--{{ if $root.IsInterdiff }} 146 - <span> 147 - viewing interdiff of 148 - <span class="font-mono">#{{ $root.ActiveRound }}</span> 149 - and 150 - <span class="font-mono">#{{ sub $root.ActiveRound 1 }}</span> 151 - </span> 152 - {{ else }} 153 - {{ if ne $root.ActiveRound $latest }} 154 - <span>(outdated)</span> 155 - <span class="before:content-['ยท']"></span> 156 - <a class="underline" href="/{{ $root.RepoInfo.FullName }}/pulls/{{ $root.Pull.PullId }}/round/{{ $latest }}?{{ safeUrl $root.DiffOpts.Encode }}"> 157 - view latest 158 - </a> 159 - {{ end }} 160 - {{ end }}--> 186 + <span> 187 + {{ $rounds }} round{{ if ne $rounds 1 }}s{{ end }} 188 + </span> 189 + <span class="select-none before:content-['\00B7']"></span> 190 + <span> 191 + {{ $comments }} comment{{ if ne $comments 1 }}s{{ end }} 192 + </span> 193 + 161 194 <span class="md:hidden inline"> 162 195 <span class="inline group-open:hidden">{{ i "chevron-up" "size-4" }}</span> 163 196 <span class="hidden group-open:inline">{{ i "chevron-down" "size-4" }}</span> ··· 171 204 172 205 {{ define "subsToggle" }} 173 206 <style> 174 - /* Mobile: full width */ 175 207 #subsToggle:checked ~ div div#subs { 176 208 width: 100%; 177 209 margin-left: 0; ··· 180 212 #subsToggle:checked ~ div label[for="subsToggle"] .hide-toggle { display: flex; } 181 213 #subsToggle:not(:checked) ~ div label[for="subsToggle"] .hide-toggle { display: none; } 182 214 183 - /* Desktop: 25vw with left margin */ 184 215 @media (min-width: 768px) { 185 216 #subsToggle:checked ~ div div#subs { 186 217 width: 25vw; 187 - margin-left: 1rem; 218 + max-width: 50vw; 188 219 } 189 - /* Unchecked state */ 190 220 #subsToggle:not(:checked) ~ div div#subs { 191 221 width: 0; 192 222 display: none; 193 223 margin-left: 0; 194 224 } 225 + #subsToggle:not(:checked) ~ div div#resize-subs { 226 + display: none; 227 + } 195 228 } 196 229 </style> 197 230 <label title="Toggle review panel" for="subsToggle" class="hidden md:flex items-center justify-end rounded cursor-pointer"> ··· 217 250 {{ $idx := index . 1 }} 218 251 {{ $lastIdx := index . 2 }} 219 252 {{ $root := index . 3 }} 220 - <div class="{{ if eq $item.RoundNumber 0 }}rounded-b border-t-0{{ else }}rounded{{ end }} border border-gray-200 dark:border-gray-700 w-full shadow-sm bg-gray-50 dark:bg-gray-800/50"> 253 + {{ $round := $item.RoundNumber }} 254 + <div class=" 255 + w-full shadow-sm bg-gray-50 dark:bg-gray-900 border-2 border-t-0 256 + {{ if eq $round 0 }}rounded-b{{ else }}rounded{{ end }} 257 + {{ if eq $round $root.ActiveRound }} 258 + border-blue-200 dark:border-blue-700 259 + {{ else }} 260 + border-gray-200 dark:border-gray-700 261 + {{ end }} 262 + "> 221 263 {{ template "submissionHeader" $ }} 222 264 {{ template "submissionComments" $ }} 223 - 224 - {{ if eq $lastIdx $item.RoundNumber }} 225 - {{ block "mergeStatus" $root }} {{ end }} 226 - {{ block "resubmitStatus" $root }} {{ end }} 227 - {{ end }} 228 - 229 - {{ if $root.LoggedInUser }} 230 - {{ template "repo/pulls/fragments/pullActions" 231 - (dict 232 - "LoggedInUser" $root.LoggedInUser 233 - "Pull" $root.Pull 234 - "RepoInfo" $root.RepoInfo 235 - "RoundNumber" $item.RoundNumber 236 - "MergeCheck" $root.MergeCheck 237 - "ResubmitCheck" $root.ResubmitCheck 238 - "BranchDeleteStatus" $root.BranchDeleteStatus 239 - "Stack" $root.Stack) }} 240 - {{ end }} 241 265 </div> 242 266 {{ end }} 243 267 ··· 249 273 <div class=" 250 274 {{ if eq $round 0 }}rounded-b{{ else }}rounded{{ end }} 251 275 px-6 py-4 pr-2 pt-2 252 - {{ if eq $root.ActiveRound $round }} 253 - bg-blue-100 dark:bg-blue-900 border-b border-blue-200 dark:border-blue-700 276 + bg-white dark:bg-gray-800 277 + {{ if eq $round $root.ActiveRound }} 278 + border-t-2 border-blue-200 dark:border-blue-700 254 279 {{ else }} 255 - bg-white dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700 280 + border-b-2 border-gray-200 dark:border-gray-700 256 281 {{ end }} 257 282 flex gap-2 sticky top-0 z-20"> 258 283 <!-- left column: just profile picture --> 259 284 <div class="flex-shrink-0 pt-2"> 260 - <img 261 - src="{{ tinyAvatar $root.Pull.OwnerDid }}" 262 - alt="" 263 - class="rounded-full size-8 mr-1 border-2 border-gray-100 dark:border-gray-900" 264 - /> 285 + {{ template "user/fragments/picLink" (list $root.Pull.OwnerDid "size-8") }} 265 286 </div> 266 287 <!-- right column --> 267 288 <div class="flex-1 min-w-0 flex flex-col gap-1"> ··· 282 303 {{ $round := $item.RoundNumber }} 283 304 <div class="flex gap-2 items-center justify-between mb-1"> 284 305 <span class="inline-flex items-center gap-2 text-sm text-gray-500 dark:text-gray-400 pt-2"> 285 - {{ resolve $root.Pull.OwnerDid }} submitted 286 - <span class="px-2 py-0.5 {{ if eq $root.ActiveRound $round }}text-white bg-blue-600 dark:bg-blue-500 border-blue-700 dark:border-blue-600{{ else }}text-black dark:text-white bg-gray-100 dark:bg-gray-700 border-gray-300 dark:border-gray-600{{ end }} rounded font-mono text-xs border"> 306 + {{ $handle := resolve $root.Pull.OwnerDid }} 307 + <a class="text-gray-500 dark:text-gray-400 hover:text-gray-500 dark:hover:text-gray-300" href="/{{ $handle }}">{{ $handle }}</a> 308 + submitted 309 + <span class="px-2 py-0.5 text-black dark:text-white bg-gray-100 dark:bg-gray-700 border-gray-300 dark:border-gray-600 rounded font-mono text-xs border"> 287 310 #{{ $round }} 288 311 </span> 289 312 <span class="select-none before:content-['\00B7']"></span> ··· 505 528 506 529 {{ define "submissionComments" }} 507 530 {{ $item := index . 0 }} 508 - <div class="relative ml-10 border-l-2 border-gray-200 dark:border-gray-700"> 509 - {{ range $item.Comments }} 510 - {{ template "submissionComment" . }} 511 - {{ end }} 512 - </div> 531 + {{ $idx := index . 1 }} 532 + {{ $lastIdx := index . 2 }} 533 + {{ $root := index . 3 }} 534 + {{ $round := $item.RoundNumber }} 535 + {{ $c := len $item.Comments }} 536 + <details class="relative ml-10 group/comments" {{ if or (eq $c 0) (eq $root.ActiveRound $round) }}open{{ end }}> 537 + <summary class="cursor-pointer list-none"> 538 + <div class="hidden group-open/comments:block absolute -left-8 top-0 bottom-0 w-16 transition-colors flex items-center justify-center group/border z-4"> 539 + <div class="absolute left-1/2 -translate-x-1/2 top-0 bottom-0 w-0.5 group-open/comments:bg-gray-200 dark:group-open/comments:bg-gray-700 group-hover/border:bg-gray-400 dark:group-hover/border:bg-gray-500 transition-colors"> </div> 540 + </div> 541 + <div class="group-open/comments:hidden block relative group/summary py-4"> 542 + <div class="absolute -left-8 top-0 bottom-0 w-16 transition-colors flex items-center justify-center z-4"> 543 + <div class="absolute left-1/2 -translate-x-1/2 h-1/3 top-0 bottom-0 w-0.5 bg-gray-200 dark:bg-gray-700 group-hover/summary:bg-gray-400 dark:group-hover/summary:bg-gray-500 transition-colors"></div> 544 + </div> 545 + <span class="text-gray-500 dark:text-gray-400 text-sm group-hover/summary:text-gray-600 dark:group-hover/summary:text-gray-300 transition-colors flex items-center gap-2 -ml-2 relative"> 546 + {{ i "circle-plus" "size-4 z-5" }} 547 + expand {{ $c }} comment{{ if ne $c 1 }}s{{ end }} 548 + </span> 549 + </div> 550 + </summary> 551 + <div> 552 + {{ range $item.Comments }} 553 + {{ template "submissionComment" . }} 554 + {{ end }} 555 + </div> 556 + 557 + <div class="relative -ml-10"> 558 + {{ if eq $lastIdx $item.RoundNumber }} 559 + {{ block "mergeStatus" $root }} {{ end }} 560 + {{ block "resubmitStatus" $root }} {{ end }} 561 + {{ end }} 562 + </div> 563 + <div class="relative -ml-10 bg-gray-50 dark:bg-gray-900"> 564 + {{ if $root.LoggedInUser }} 565 + {{ template "repo/pulls/fragments/pullActions" 566 + (dict 567 + "LoggedInUser" $root.LoggedInUser 568 + "Pull" $root.Pull 569 + "RepoInfo" $root.RepoInfo 570 + "RoundNumber" $item.RoundNumber 571 + "MergeCheck" $root.MergeCheck 572 + "ResubmitCheck" $root.ResubmitCheck 573 + "BranchDeleteStatus" $root.BranchDeleteStatus 574 + "Stack" $root.Stack) }} 575 + {{ end }} 576 + </div> 577 + </details> 513 578 {{ end }} 514 579 515 580 {{ define "submissionComment" }} 516 581 <div id="comment-{{.ID}}" class="flex gap-2 -ml-4 py-4 w-full mx-auto"> 517 582 <!-- left column: profile picture --> 518 - <div class="flex-shrink-0"> 519 - <img 520 - src="{{ tinyAvatar .OwnerDid }}" 521 - alt="" 522 - class="rounded-full size-8 mr-1 border-2 border-gray-100 dark:border-gray-900" 523 - /> 583 + <div class="flex-shrink-0 h-fit relative"> 584 + {{ template "user/fragments/picLink" (list .OwnerDid "size-8") }} 524 585 </div> 525 586 <!-- right column: name and body in two rows --> 526 587 <div class="flex-1 min-w-0"> 527 588 <!-- Row 1: Author and timestamp --> 528 589 <div class="text-sm text-gray-500 dark:text-gray-400 flex items-center gap-1"> 529 - <span>{{ resolve .OwnerDid }}</span> 590 + {{ $handle := resolve .OwnerDid }} 591 + <a class="text-gray-500 dark:text-gray-400 hover:text-gray-500 dark:hover:text-gray-300" href="/{{ $handle }}">{{ $handle }}</a> 530 592 <span class="before:content-['ยท']"></span> 531 593 <a class="text-gray-500 dark:text-gray-400 hover:text-gray-500 dark:hover:text-gray-300" href="#comment-{{.ID}}"> 532 - {{ template "repo/fragments/time" .Created }} 594 + {{ template "repo/fragments/shortTime" .Created }} 533 595 </a> 534 596 </div> 535 597 <!-- Row 2: Body text -->
+4 -4
appview/pages/templates/repo/pulls/pulls.html
··· 39 39 <div class="flex-1 flex relative"> 40 40 <input 41 41 id="search-q" 42 - class="flex-1 py-1 pl-2 pr-10 mr-[-1px] rounded-r-none focus:border-0 focus:outline-none focus:ring focus:ring-blue-400 ring-inset peer" 42 + class="flex-1 py-1 pl-2 pr-10 mr-[-1px] rounded-r-none peer" 43 43 type="text" 44 44 name="q" 45 45 value="{{ .FilterQuery }}" 46 - placeholder=" " 46 + placeholder="search pulls..." 47 47 > 48 48 <a 49 49 href="?state={{ .FilteringBy.String }}" ··· 54 54 </div> 55 55 <button 56 56 type="submit" 57 - class="p-2 text-gray-400 border rounded-r border-gray-400 dark:border-gray-600" 57 + class="p-2 text-gray-400 border rounded-r border-gray-300 dark:border-gray-600" 58 58 > 59 59 {{ i "search" "w-4 h-4" }} 60 60 </button> ··· 166 166 "Page" .Page 167 167 "TotalCount" .PullCount 168 168 "BasePath" (printf "/%s/pulls" .RepoInfo.FullName) 169 - "QueryParams" (printf "state=%s&q=%s" .FilteringBy.String .FilterQuery) 169 + "QueryParams" (queryParams "state" .FilteringBy.String "q" .FilterQuery) 170 170 ) }} 171 171 {{ end }} 172 172 {{ end }}
+1 -4
appview/pages/templates/repo/settings/access.html
··· 32 32 {{ $handle := resolve .Did }} 33 33 <div class="border border-gray-200 dark:border-gray-700 rounded p-4"> 34 34 <div class="flex items-center gap-3"> 35 - <img 36 - src="{{ fullAvatar $handle }}" 37 - alt="{{ $handle }}" 38 - class="rounded-full h-10 w-10 border border-gray-300 dark:border-gray-600 flex-shrink-0"/> 35 + {{ template "user/fragments/picLink" (list .Did "size-10") }} 39 36 40 37 <div class="flex-1 min-w-0"> 41 38 <a href="/{{ $handle }}" class="block truncate">
+1 -2
appview/pages/templates/repo/settings/fragments/sidebar.html
··· 1 1 {{ define "repo/settings/fragments/sidebar" }} 2 2 {{ $active := .Tab }} 3 - {{ $tabs := .Tabs }} 4 3 <div class="sticky top-2 grid grid-cols-1 rounded border border-gray-200 dark:border-gray-700 divide-y divide-gray-200 dark:divide-gray-700 shadow-inner"> 5 4 {{ $activeTab := "bg-white dark:bg-gray-700 drop-shadow-sm" }} 6 5 {{ $inactiveTab := "bg-gray-100 dark:bg-gray-800" }} 7 - {{ range $tabs }} 6 + {{ range const.RepoSettingsTabs }} 8 7 <a href="/{{ $.RepoInfo.FullName }}/settings?tab={{.Name}}" class="no-underline hover:no-underline hover:bg-gray-100/25 hover:dark:bg-gray-700/25"> 9 8 <div class="flex gap-3 items-center p-2 {{ if eq .Name $active }} {{ $activeTab }} {{ else }} {{ $inactiveTab }} {{ end }}"> 10 9 {{ i .Icon "size-4" }}
+16
appview/pages/templates/repo/tag.html
··· 1 + {{ define "title" }} 2 + tags ยท {{ .RepoInfo.FullName }} 3 + {{ end }} 4 + 5 + {{ define "extrameta" }} 6 + {{ $title := printf "tags &middot; %s" .RepoInfo.FullName }} 7 + {{ $url := printf "https://tangled.org/%s/tag/%s" .RepoInfo.FullName .Tag.Name }} 8 + 9 + {{ template "repo/fragments/og" (dict "RepoInfo" .RepoInfo "Title" $title "Url" $url) }} 10 + {{ end }} 11 + 12 + {{ define "repoContent" }} 13 + <section class="flex flex-col py-2 gap-12 md:gap-0"> 14 + {{ template "repo/fragments/singleTag" (list $ .Tag ) }} 15 + </section> 16 + {{ end }}
+1 -129
appview/pages/templates/repo/tags.html
··· 14 14 <h2 class="mb-4 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold">tags</h2> 15 15 <div class="flex flex-col py-2 gap-12 md:gap-0"> 16 16 {{ range .Tags }} 17 - <div class="md:grid md:grid-cols-12 md:items-start flex flex-col"> 18 - <!-- Header column (top on mobile, left on md+) --> 19 - <div class="md:col-span-2 md:border-r border-b md:border-b-0 border-gray-200 dark:border-gray-700 w-full md:h-full"> 20 - <!-- Mobile layout: horizontal --> 21 - <div class="flex md:hidden flex-col py-2 px-2 text-xl"> 22 - <a href="/{{ $.RepoInfo.FullName }}/tree/{{ .Name | urlquery }}" class="no-underline hover:underline flex items-center gap-2 font-bold"> 23 - {{ i "tag" "w-4 h-4" }} 24 - {{ .Name }} 25 - </a> 26 - 27 - <div class="flex items-center gap-3 text-gray-500 dark:text-gray-400 text-sm"> 28 - {{ if .Tag }} 29 - <a href="/{{ $.RepoInfo.FullName }}/commit/{{ .Tag.Target.String }}" 30 - class="no-underline hover:underline text-gray-500 dark:text-gray-400"> 31 - {{ slice .Tag.Target.String 0 8 }} 32 - </a> 33 - 34 - <span class="px-1 text-gray-500 dark:text-gray-400 select-none after:content-['ยท']"></span> 35 - <span>{{ .Tag.Tagger.Name }}</span> 36 - 37 - <span class="px-1 text-gray-500 dark:text-gray-400 select-none after:content-['ยท']"></span> 38 - {{ template "repo/fragments/shortTime" .Tag.Tagger.When }} 39 - {{ end }} 40 - </div> 41 - </div> 42 - 43 - <!-- Desktop layout: vertical and left-aligned --> 44 - <div class="hidden md:block text-left px-2 pb-6"> 45 - <a href="/{{ $.RepoInfo.FullName }}/tree/{{ .Name | urlquery }}" class="no-underline hover:underline flex items-center gap-2 font-bold"> 46 - {{ i "tag" "w-4 h-4" }} 47 - {{ .Name }} 48 - </a> 49 - <div class="flex flex-grow flex-col text-gray-500 dark:text-gray-400 text-sm"> 50 - {{ if .Tag }} 51 - <a href="/{{ $.RepoInfo.FullName }}/commit/{{ .Tag.Target.String }}" 52 - class="no-underline hover:underline text-gray-500 dark:text-gray-400 flex items-center gap-2"> 53 - {{ i "git-commit-horizontal" "w-4 h-4" }} 54 - {{ slice .Tag.Target.String 0 8 }} 55 - </a> 56 - <span>{{ .Tag.Tagger.Name }}</span> 57 - {{ template "repo/fragments/time" .Tag.Tagger.When }} 58 - {{ end }} 59 - </div> 60 - </div> 61 - </div> 62 - 63 - <!-- Content column (bottom on mobile, right on md+) --> 64 - <div class="md:col-span-10 px-2 py-3 md:py-0 md:pb-6"> 65 - {{ if .Tag }} 66 - {{ $messageParts := splitN .Tag.Message "\n\n" 2 }} 67 - <p class="font-bold text-lg">{{ index $messageParts 0 }}</p> 68 - {{ if gt (len $messageParts) 1 }} 69 - <p class="cursor-text py-2">{{ nl2br (index $messageParts 1) }}</p> 70 - {{ end }} 71 - {{ block "artifacts" (list $ .) }} {{ end }} 72 - {{ else }} 73 - <p class="italic text-gray-500 dark:text-gray-400">no message</p> 74 - {{ end }} 75 - </div> 76 - </div> 17 + {{ template "repo/fragments/singleTag" (list $ . ) }} 77 18 {{ else }} 78 19 <p class="text-center text-gray-400 dark:text-gray-500 p-4"> 79 20 This repository does not contain any tags. ··· 89 30 {{ block "dangling" . }} {{ end }} 90 31 </section> 91 32 {{ end }} 92 - {{ end }} 93 - 94 - {{ define "artifacts" }} 95 - {{ $root := index . 0 }} 96 - {{ $tag := index . 1 }} 97 - {{ $isPushAllowed := $root.RepoInfo.Roles.IsPushAllowed }} 98 - {{ $artifacts := index $root.ArtifactMap $tag.Tag.Hash }} 99 - 100 - <h2 class="my-4 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold">artifacts</h2> 101 - <div class="flex flex-col rounded border border-gray-200 dark:border-gray-700"> 102 - {{ range $artifact := $artifacts }} 103 - {{ $args := dict "LoggedInUser" $root.LoggedInUser "RepoInfo" $root.RepoInfo "Artifact" $artifact }} 104 - {{ template "repo/fragments/artifact" $args }} 105 - {{ end }} 106 - <div id="artifact-git-source" class="flex items-center justify-between p-2 border-b border-gray-200 dark:border-gray-700"> 107 - <div id="left-side" class="flex items-center gap-2 min-w-0 max-w-[60%]"> 108 - {{ i "archive" "w-4 h-4" }} 109 - <a href="/{{ $root.RepoInfo.FullName }}/archive/{{ pathEscape (print "refs/tags/" $tag.Name) }}" class="no-underline hover:no-underline"> 110 - Source code (.tar.gz) 111 - </a> 112 - </div> 113 - </div> 114 - {{ if $isPushAllowed }} 115 - {{ block "uploadArtifact" (list $root $tag) }} {{ end }} 116 - {{ end }} 117 - </div> 118 - {{ end }} 119 - 120 - {{ define "uploadArtifact" }} 121 - {{ $root := index . 0 }} 122 - {{ $tag := index . 1 }} 123 - {{ $unique := $tag.Tag.Target.String }} 124 - <form 125 - id="upload-{{$unique}}" 126 - method="post" 127 - enctype="multipart/form-data" 128 - hx-post="/{{ $root.RepoInfo.FullName }}/tags/{{ $tag.Name | urlquery }}/upload" 129 - hx-on::after-request="if(event.detail.successful) this.reset()" 130 - hx-disabled-elt="#upload-btn-{{$unique}}" 131 - hx-swap="beforebegin" 132 - hx-target="this" 133 - class="flex items-center gap-2 px-2"> 134 - <div class="flex-grow"> 135 - <input type="file" 136 - name="artifact" 137 - required 138 - class="block py-2 px-0 w-full border-none 139 - text-black dark:text-white 140 - bg-white dark:bg-gray-800 141 - file:mr-4 file:px-2 file:py-2 142 - file:rounded file:border-0 143 - file:text-sm file:font-medium 144 - file:text-gray-700 file:dark:text-gray-300 145 - file:bg-gray-200 file:dark:bg-gray-700 146 - file:hover:bg-gray-100 file:hover:dark:bg-gray-600 147 - "> 148 - </input> 149 - </div> 150 - <div class="flex justify-end"> 151 - <button 152 - type="submit" 153 - class="btn gap-2" 154 - id="upload-btn-{{$unique}}" 155 - title="Upload artifact"> 156 - {{ i "upload" "w-4 h-4" }} 157 - <span class="hidden md:inline">upload</span> 158 - </button> 159 - </div> 160 - </form> 161 33 {{ end }} 162 34 163 35 {{ define "dangling" }}
+4
appview/pages/templates/repo/tree.html
··· 52 52 </div> 53 53 </div> 54 54 55 + {{ if .LastCommitInfo }} 56 + {{ template "repo/fragments/lastCommitPanel" $ }} 57 + {{ end }} 58 + 55 59 {{ range .Files }} 56 60 <div class="grid grid-cols-12 gap-4 items-center py-1"> 57 61 <div class="col-span-8 md:col-span-4">
+1 -1
appview/pages/templates/strings/fragments/form.html
··· 31 31 name="content" 32 32 id="content-textarea" 33 33 wrap="off" 34 - class="w-full dark:bg-gray-700 dark:text-white dark:border-gray-600 dark:placeholder-gray-400" 34 + class="w-full dark:bg-gray-700 dark:text-white dark:border-gray-600 dark:placeholder-gray-400 font-mono" 35 35 rows="20" 36 36 spellcheck="false" 37 37 placeholder="Paste your string here!"
+62 -99
appview/pages/templates/user/completeSignup.html
··· 1 - {{ define "user/completeSignup" }} 2 - <!doctype html> 3 - <html lang="en" class="dark:bg-gray-900"> 4 - <head> 5 - <meta charset="UTF-8" /> 6 - <meta 7 - name="viewport" 8 - content="width=device-width, initial-scale=1.0" 9 - /> 10 - <meta 11 - property="og:title" 12 - content="complete signup ยท tangled" 13 - /> 14 - <meta 15 - property="og:url" 16 - content="https://tangled.org/complete-signup" 17 - /> 18 - <meta 19 - property="og:description" 20 - content="complete your signup for tangled" 21 - /> 22 - <script src="/static/htmx.min.js"></script> 23 - <link rel="manifest" href="/pwa-manifest.json" /> 24 - <link 25 - rel="stylesheet" 26 - href="/static/tw.css?{{ cssContentHash }}" 27 - type="text/css" 28 - /> 29 - <title>complete signup &middot; tangled</title> 30 - </head> 31 - <body class="flex items-center justify-center min-h-screen"> 32 - <main class="max-w-md px-6 -mt-4"> 33 - <h1 class="flex place-content-center text-2xl font-semibold italic dark:text-white" > 34 - {{ template "fragments/logotype" }} 35 - </h1> 36 - <h2 class="text-center text-xl italic dark:text-white"> 37 - tightly-knit social coding. 38 - </h2> 39 - <form 40 - class="mt-4 max-w-sm mx-auto flex flex-col gap-4" 41 - hx-post="/signup/complete" 42 - hx-swap="none" 43 - hx-disabled-elt="#complete-signup-button" 44 - > 45 - <div class="flex flex-col"> 46 - <label for="code">verification code</label> 47 - <input 48 - type="text" 49 - id="code" 50 - name="code" 51 - tabindex="1" 52 - required 53 - placeholder="tngl-sh-foo-bar" 54 - /> 55 - <span class="text-sm text-gray-500 mt-1"> 56 - Enter the code sent to your email. 57 - </span> 58 - </div> 1 + {{ define "title" }} complete signup {{ end }} 2 + 3 + {{ define "content" }} 4 + <form 5 + class="mt-4 max-w-sm mx-auto flex flex-col gap-4 group" 6 + hx-post="/signup/complete" 7 + hx-swap="none" 8 + hx-disabled-elt="#complete-signup-button" 9 + > 10 + <div class="flex flex-col"> 11 + <label for="code">verification code</label> 12 + <input 13 + type="text" 14 + id="code" 15 + name="code" 16 + tabindex="1" 17 + required 18 + placeholder="tngl-sh-foo-bar" 19 + /> 20 + <span class="text-sm text-gray-500 mt-1"> 21 + Enter the code sent to your email. 22 + </span> 23 + </div> 59 24 60 - <div class="flex flex-col"> 61 - <label for="username">username</label> 62 - <input 63 - type="text" 64 - id="username" 65 - name="username" 66 - tabindex="2" 67 - required 68 - placeholder="jason" 69 - /> 70 - <span class="text-sm text-gray-500 mt-1"> 71 - Your complete handle will be of the form <code>user.tngl.sh</code>. 72 - </span> 73 - </div> 25 + <div class="flex flex-col"> 26 + <label for="username">username</label> 27 + <input 28 + type="text" 29 + id="username" 30 + name="username" 31 + tabindex="2" 32 + required 33 + placeholder="jason" 34 + /> 35 + <span class="text-sm text-gray-500 mt-1"> 36 + Your complete handle will be of the form <code>user.tngl.sh</code>. 37 + </span> 38 + </div> 74 39 75 - <div class="flex flex-col"> 76 - <label for="password">password</label> 77 - <input 78 - type="password" 79 - id="password" 80 - name="password" 81 - tabindex="3" 82 - required 83 - /> 84 - <span class="text-sm text-gray-500 mt-1"> 85 - Choose a strong password for your account. 86 - </span> 87 - </div> 40 + <div class="flex flex-col"> 41 + <label for="password">password</label> 42 + <input 43 + type="password" 44 + id="password" 45 + name="password" 46 + tabindex="3" 47 + required 48 + /> 49 + <span class="text-sm text-gray-500 mt-1"> 50 + Choose a strong password for your account. 51 + </span> 52 + </div> 88 53 89 - <button 90 - class="btn-create w-full my-2 mt-6 text-base" 91 - type="submit" 92 - id="complete-signup-button" 93 - tabindex="4" 94 - > 95 - <span>complete signup</span> 96 - </button> 97 - </form> 98 - <p id="signup-error" class="error w-full"></p> 99 - <p id="signup-msg" class="dark:text-white w-full"></p> 100 - </main> 101 - </body> 102 - </html> 54 + <button 55 + class="btn-create w-full my-2 mt-6 text-base" 56 + type="submit" 57 + id="complete-signup-button" 58 + tabindex="4" 59 + > 60 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 61 + <span class="inline group-[.htmx-request]:hidden">complete signup</span> 62 + </button> 63 + </form> 64 + <p id="signup-error" class="error w-full"></p> 65 + <p id="signup-msg" class="dark:text-white w-full"></p> 103 66 {{ end }}
+54
appview/pages/templates/user/fragments/editAvatar.html
··· 1 + {{ define "user/fragments/editAvatar" }} 2 + <form 3 + hx-post="/profile/avatar" 4 + hx-encoding="multipart/form-data" 5 + hx-swap="none" 6 + class="flex flex-col gap-2 group/form"> 7 + <label for="avatar-file" class="uppercase p-0"> 8 + Upload or Remove Avatar 9 + </label> 10 + <p class="text-sm text-gray-500 dark:text-gray-400">Upload a new image (PNG or JPEG, max 1MB) or remove your current avatar.</p> 11 + <input 12 + type="file" 13 + id="avatar-file" 14 + name="avatar" 15 + accept="image/png,image/jpeg" 16 + required 17 + class="block w-full text-sm text-gray-500 dark:text-gray-400 18 + file:mr-4 file:py-2 file:px-4 19 + file:rounded file:border-0 20 + file:text-sm file:font-semibold 21 + file:bg-gray-100 file:text-gray-700 22 + dark:file:bg-gray-700 dark:file:text-gray-300 23 + hover:file:bg-gray-200 dark:hover:file:bg-gray-600" /> 24 + <div class="flex flex-col gap-2 pt-2"> 25 + <button type="submit" class="btn w-full flex items-center justify-center gap-2"> 26 + {{ i "upload" "size-4 inline group-[.htmx-request]/form:hidden" }} 27 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]/form:inline" }} 28 + upload 29 + </button> 30 + {{ if .Profile.Avatar }} 31 + <button 32 + type="button" 33 + hx-delete="/profile/avatar" 34 + hx-confirm="Are you sure you want to remove your profile picture?" 35 + hx-swap="none" 36 + class="btn w-full flex items-center justify-center gap-2 group"> 37 + {{ i "trash-2" "size-4 inline group-[.htmx-request]:hidden" }} 38 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 39 + remove avatar 40 + </button> 41 + {{ end }} 42 + <button 43 + id="cancel-avatar-btn" 44 + type="button" 45 + popovertarget="avatar-upload-modal" 46 + popovertargetaction="hide" 47 + class="btn text-red-500 dark:text-red-400 w-full flex items-center justify-center gap-2"> 48 + {{ i "x" "size-4" }} 49 + cancel 50 + </button> 51 + </div> 52 + <div id="avatar-error" class="text-red-500 dark:text-red-400 text-sm"></div> 53 + </form> 54 + {{ end }}
+2 -1
appview/pages/templates/user/fragments/editBio.html
··· 110 110 {{ $id := index . 0 }} 111 111 {{ $stat := index . 1 }} 112 112 <select class="stat-group w-full p-1 border border-gray-200 bg-white dark:bg-gray-800 dark:text-white dark:border-gray-700 text-sm" id="stat{{$id}}" name="stat{{$id}}"> 113 - <option value="">choose stat</option> 113 + <option value="">Choose Stat</option> 114 114 {{ $stats := assoc 115 115 "merged-pull-request-count" "Merged PR Count" 116 116 "closed-pull-request-count" "Closed PR Count" ··· 118 118 "open-issue-count" "Open Issue Count" 119 119 "closed-issue-count" "Closed Issue Count" 120 120 "repository-count" "Repository Count" 121 + "star-count" "Star Count" 121 122 }} 122 123 {{ range $s := $stats }} 123 124 {{ $value := index $s 0 }}
+6 -3
appview/pages/templates/user/fragments/follow.html
··· 13 13 hx-swap="outerHTML" 14 14 > 15 15 {{ if eq .FollowStatus.String "IsNotFollowing" }} 16 - {{ i "user-round-plus" "w-4 h-4" }} follow 16 + {{ i "user-round-plus" "size-4 inline group-[.htmx-request]:hidden" }} 17 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 18 + follow 17 19 {{ else }} 18 - {{ i "user-round-minus" "w-4 h-4" }} unfollow 20 + {{ i "user-round-minus" "size-4 inline group-[.htmx-request]:hidden" }} 21 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 22 + unfollow 19 23 {{ end }} 20 - {{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }} 21 24 </button> 22 25 {{ end }}
+11
appview/pages/templates/user/fragments/pic.html
··· 1 + {{ define "user/fragments/pic" }} 2 + {{ $did := index . 0 }} 3 + {{ $classes := index . 1 }} 4 + <img 5 + src="{{ tinyAvatar $did }}" 6 + alt="" 7 + class="rounded-full border border-gray-300 dark:border-gray-700 {{ $classes }}" 8 + /> 9 + {{ end }} 10 + 11 +
+15
appview/pages/templates/user/fragments/picLink.html
··· 1 + {{ define "user/fragments/picLink" }} 2 + {{ $did := index . 0 }} 3 + {{ $classes := index . 1 }} 4 + {{ $handle := resolve $did }} 5 + <a href="/{{ $handle }}" title="{{ $handle }}"> 6 + <img 7 + src="{{ tinyAvatar $did }}" 8 + alt="" 9 + class="rounded-full border border-gray-300 dark:border-gray-700 {{ $classes }}" 10 + /> 11 + </a> 12 + {{ end }} 13 + 14 + 15 +
+39 -22
appview/pages/templates/user/fragments/profileCard.html
··· 3 3 <div class="grid grid-cols-3 md:grid-cols-1 gap-1 items-center"> 4 4 <div id="avatar" class="col-span-1 flex justify-center items-center"> 5 5 <div class="w-3/4 aspect-square relative"> 6 - <img class="absolute inset-0 w-full h-full object-cover rounded-full p-2" src="{{ fullAvatar .UserDid }}" /> 6 + <img class="absolute inset-0 w-full h-full object-cover rounded-full p-2" src="{{ profileAvatarUrl .Profile "" }}" /> 7 + {{ if eq .FollowStatus.String "IsSelf" }} 8 + <button 9 + class="absolute bottom-2 right-2 bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-full p-2 hover:bg-gray-100 dark:hover:bg-gray-700 transition-colors" 10 + popovertarget="avatar-upload-modal" 11 + popovertargetaction="toggle" 12 + title="Upload avatar"> 13 + {{ i "camera" "w-4 h-4" }} 14 + </button> 15 + {{ end }} 7 16 </div> 8 17 </div> 18 + <div 19 + id="avatar-upload-modal" 20 + popover 21 + class="bg-white w-full md:w-96 dark:bg-gray-800 p-4 rounded border border-gray-200 dark:border-gray-700 drop-shadow dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50"> 22 + {{ template "user/fragments/editAvatar" . }} 23 + </div> 9 24 <div class="col-span-2"> 10 25 <div class="flex items-center flex-row flex-nowrap gap-2"> 11 26 <p title="{{ $userIdent }}" ··· 13 28 {{ $userIdent }} 14 29 </p> 15 30 {{ with .Profile }} 16 - {{ if .Pronouns }} 17 - <p class="text-gray-500 dark:text-gray-400">{{ .Pronouns }}</p> 18 - {{ end }} 31 + {{ if .Pronouns }} 32 + <p class="text-gray-500 dark:text-gray-400">{{ .Pronouns }}</p> 33 + {{ end }} 19 34 {{ end }} 20 35 </div> 21 36 ··· 29 44 {{ with .Profile }} 30 45 31 46 {{ if .Description }} 32 - <p class="text-base pb-4 md:pb-2">{{ .Description }}</p> 47 + <p class="text-base pb-4 md:pb-2">{{ .Description }}</p> 33 48 {{ end }} 34 49 35 50 <div class="hidden md:block"> ··· 45 60 {{ end }} 46 61 {{ if .IncludeBluesky }} 47 62 <div class="flex items-center gap-2"> 48 - <span class="flex-shrink-0">{{ template "user/fragments/bluesky" "w-4 h-4 text-black dark:text-white" 49 - }}</span> 63 + <span class="flex-shrink-0">{{ template "user/fragments/bluesky" "w-4 h-4 text-black dark:text-white" }}</span> 50 64 <a id="bluesky-link" href="https://bsky.app/profile/{{ $.UserDid }}">{{ $userIdent }}</a> 51 65 </div> 52 66 {{ end }} 53 67 {{ range $link := .Links }} 54 - {{ if $link }} 55 - <div class="flex items-center gap-2"> 56 - <span class="flex-shrink-0">{{ i "link" "size-4" }}</span> 57 - <a href="{{ $link }}">{{ $link }}</a> 58 - </div> 59 - {{ end }} 68 + {{ if $link }} 69 + <div class="flex items-center gap-2"> 70 + <span class="flex-shrink-0">{{ i "link" "size-4" }}</span> 71 + <a href="{{ $link }}">{{ $link }}</a> 72 + </div> 73 + {{ end }} 60 74 {{ end }} 61 75 {{ if not $profile.IsStatsEmpty }} 62 76 <div class="flex items-center justify-evenly gap-2 py-2"> 63 77 {{ range $stat := .Stats }} 64 - {{ if $stat.Kind }} 65 - <div class="flex flex-col items-center gap-2"> 66 - <span class="text-xl font-bold">{{ $stat.Value }}</span> 67 - <span>{{ $stat.Kind.String }}</span> 68 - </div> 69 - {{ end }} 78 + {{ if $stat.Kind }} 79 + <div class="flex flex-col items-center gap-2"> 80 + <span class="text-xl font-bold">{{ $stat.Value }}</span> 81 + <span>{{ $stat.Kind.String }}</span> 82 + </div> 83 + {{ end }} 70 84 {{ end }} 71 85 </div> 72 86 {{ end }} ··· 75 89 76 90 <div class="flex mt-2 items-center gap-2"> 77 91 {{ if ne .FollowStatus.String "IsSelf" }} 78 - {{ template "user/fragments/follow" . }} 92 + {{ template "user/fragments/follow" . }} 79 93 {{ else }} 80 - <button id="editBtn" class="btn w-full flex items-center gap-2 group" hx-target="#profile-bio" 81 - hx-get="/profile/edit-bio" hx-swap="innerHTML"> 94 + <button id="editBtn" 95 + class="btn w-full flex items-center gap-2 group" 96 + hx-target="#profile-bio" 97 + hx-get="/profile/edit-bio" 98 + hx-swap="innerHTML"> 82 99 {{ i "pencil" "w-4 h-4" }} 83 100 edit 84 101 {{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+111 -132
appview/pages/templates/user/login.html
··· 1 - {{ define "user/login" }} 2 - <!doctype html> 3 - <html lang="en" class="dark:bg-gray-900"> 4 - <head> 5 - <meta charset="UTF-8" /> 6 - <meta name="viewport" content="width=device-width, initial-scale=1.0" /> 7 - <meta property="og:title" content="login ยท tangled" /> 8 - <meta property="og:url" content="https://tangled.org/login" /> 9 - <meta property="og:description" content="login to for tangled" /> 10 - <script src="/static/htmx.min.js"></script> 11 - <link rel="manifest" href="/pwa-manifest.json" /> 12 - <link rel="stylesheet" href="/static/tw.css?{{ cssContentHash }}" type="text/css" /> 13 - <title>login &middot; tangled</title> 14 - </head> 15 - <body class="flex items-center justify-center min-h-screen"> 16 - <main class="max-w-md px-7 mt-4"> 17 - <h1 class="flex place-content-center text-3xl font-semibold italic dark:text-white" > 18 - {{ template "fragments/logotype" }} 19 - </h1> 20 - <h2 class="text-center text-xl italic dark:text-white"> 21 - tightly-knit social coding. 22 - </h2> 1 + {{ define "title" }} login {{ end }} 23 2 24 - {{ if .AddAccount }} 25 - <div class="flex gap-2 my-4 bg-blue-50 dark:bg-blue-900/30 border border-blue-300 dark:border-sky-800 rounded px-3 py-2 text-blue-600 dark:text-blue-300"> 26 - <span class="py-1">{{ i "user-plus" "w-4 h-4" }}</span> 27 - <div> 28 - <h5 class="font-medium">Add another account</h5> 29 - <p class="text-sm">Sign in with a different account to add it to your account list.</p> 30 - </div> 31 - </div> 32 - {{ end }} 3 + {{ define "content" }} 4 + {{ if .AddAccount }} 5 + <div class="flex gap-2 my-4 bg-blue-50 dark:bg-blue-900/30 border border-blue-300 dark:border-sky-800 rounded px-3 py-2 text-blue-600 dark:text-blue-300"> 6 + <span class="py-1">{{ i "user-plus" "w-4 h-4" }}</span> 7 + <div> 8 + <h5 class="font-medium">Add another account</h5> 9 + <p class="text-sm">Sign in with a different account to add it to your account list.</p> 10 + </div> 11 + </div> 12 + {{ end }} 33 13 34 - {{ if and .LoggedInUser .LoggedInUser.Accounts }} 35 - {{ $accounts := .LoggedInUser.Accounts }} 36 - {{ if $accounts }} 37 - <div class="my-4 border border-gray-200 dark:border-gray-700 rounded overflow-hidden"> 38 - <div class="px-3 py-2 bg-gray-50 dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700"> 39 - <span class="text-xs text-gray-500 dark:text-gray-400 uppercase tracking-wide font-medium">Saved accounts</span> 40 - </div> 41 - <div class="divide-y divide-gray-200 dark:divide-gray-700"> 42 - {{ range $accounts }} 43 - <div class="flex items-center justify-between px-3 py-2 hover:bg-gray-100 dark:hover:bg-gray-700"> 44 - <button 45 - type="button" 46 - hx-post="/account/switch" 47 - hx-vals='{"did": "{{ .Did }}"}' 48 - hx-swap="none" 49 - class="flex items-center gap-2 flex-1 text-left min-w-0" 50 - > 51 - <img src="{{ tinyAvatar .Did }}" alt="" class="rounded-full h-8 w-8 flex-shrink-0 border border-gray-300 dark:border-gray-700" /> 52 - <div class="flex flex-col min-w-0"> 53 - <span class="text-sm font-medium dark:text-white truncate">{{ .Did | resolve | truncateAt30 }}</span> 54 - <span class="text-xs text-gray-500 dark:text-gray-400">Click to switch</span> 55 - </div> 56 - </button> 57 - <button 58 - type="button" 59 - hx-delete="/account/{{ .Did }}" 60 - hx-swap="none" 61 - class="p-1 text-gray-400 hover:text-red-500 dark:hover:text-red-400 flex-shrink-0" 62 - title="Remove account" 63 - > 64 - {{ i "x" "w-4 h-4" }} 65 - </button> 66 - </div> 67 - {{ end }} 68 - </div> 69 - </div> 70 - {{ end }} 71 - {{ end }} 14 + {{ if and .LoggedInUser .LoggedInUser.Accounts }} 15 + {{ $accounts := .LoggedInUser.Accounts }} 16 + {{ if $accounts }} 17 + <div class="my-4 border border-gray-200 dark:border-gray-700 rounded overflow-hidden"> 18 + <div class="px-3 py-2 bg-gray-50 dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700"> 19 + <span class="text-xs text-gray-500 dark:text-gray-400 uppercase tracking-wide font-medium">Saved accounts</span> 20 + </div> 21 + <div class="divide-y divide-gray-200 dark:divide-gray-700"> 22 + {{ range $accounts }} 23 + <div class="flex items-center justify-between px-3 py-2 hover:bg-gray-100 dark:hover:bg-gray-700"> 24 + <button 25 + type="button" 26 + hx-post="/account/switch" 27 + hx-vals='{"did": "{{ .Did }}"}' 28 + hx-swap="none" 29 + class="flex items-center gap-2 flex-1 text-left min-w-0" 30 + > 31 + {{ template "user/fragments/pic" (list .Did "size-8") }} 32 + <div class="flex flex-col min-w-0"> 33 + <span class="text-sm font-medium dark:text-white truncate">{{ .Did | resolve | truncateAt30 }}</span> 34 + <span class="text-xs text-gray-500 dark:text-gray-400">Click to switch</span> 35 + </div> 36 + </button> 37 + <button 38 + type="button" 39 + hx-delete="/account/{{ .Did }}" 40 + hx-swap="none" 41 + class="p-1 text-gray-400 hover:text-red-500 dark:hover:text-red-400 flex-shrink-0" 42 + title="Remove account" 43 + > 44 + {{ i "x" "w-4 h-4" }} 45 + </button> 46 + </div> 47 + {{ end }} 48 + </div> 49 + </div> 50 + {{ end }} 51 + {{ end }} 72 52 73 - <form 74 - class="mt-4" 75 - hx-post="/login" 76 - hx-swap="none" 77 - hx-disabled-elt="#login-button" 78 - > 79 - <div class="flex flex-col"> 80 - <label for="handle">handle</label> 81 - <input 82 - autocapitalize="none" 83 - autocorrect="off" 84 - autocomplete="username" 85 - type="text" 86 - id="handle" 87 - name="handle" 88 - tabindex="1" 89 - required 90 - placeholder="akshay.tngl.sh" 91 - /> 92 - <span class="text-sm text-gray-500 mt-1"> 93 - Use your <a href="https://atproto.com">AT Protocol</a> 94 - handle to log in. If you're unsure, this is likely 95 - your Tangled (<code>.tngl.sh</code>) or <a href="https://bsky.app">Bluesky</a> (<code>.bsky.social</code>) account. 96 - </span> 97 - </div> 98 - <input type="hidden" name="return_url" value="{{ .ReturnUrl }}"> 99 - <input type="hidden" name="add_account" value="{{ if .AddAccount }}true{{ end }}"> 53 + <form 54 + class="mt-4 group" 55 + hx-post="/login" 56 + hx-swap="none" 57 + hx-disabled-elt="#login-button" 58 + > 59 + <div class="flex flex-col"> 60 + <label for="handle">handle</label> 61 + <input 62 + autocapitalize="none" 63 + autocorrect="off" 64 + autocomplete="username" 65 + type="text" 66 + id="handle" 67 + name="handle" 68 + tabindex="1" 69 + required 70 + placeholder="akshay.tngl.sh" 71 + /> 72 + <span class="text-sm text-gray-500 mt-1"> 73 + Use your <a href="https://atproto.com">AT Protocol</a> 74 + handle to log in. If you're unsure, this is likely 75 + your Tangled (<code>.tngl.sh</code>) or <a href="https://bsky.app">Bluesky</a> (<code>.bsky.social</code>) account. 76 + </span> 77 + </div> 78 + <input type="hidden" name="return_url" value="{{ .ReturnUrl }}"> 79 + <input type="hidden" name="add_account" value="{{ if .AddAccount }}true{{ end }}"> 100 80 101 - <button 102 - class="btn w-full my-2 mt-6 text-base " 103 - type="submit" 104 - id="login-button" 105 - tabindex="3" 106 - > 107 - <span>login</span> 108 - </button> 109 - </form> 110 - {{ if .ErrorCode }} 111 - <div class="flex gap-2 my-2 bg-red-50 dark:bg-red-900 border border-red-500 rounded drop-shadow-sm px-3 py-2 text-red-500 dark:text-red-300"> 112 - <span class="py-1">{{ i "circle-alert" "w-4 h-4" }}</span> 113 - <div> 114 - <h5 class="font-medium">Login error</h5> 115 - <p class="text-sm"> 116 - {{ if eq .ErrorCode "access_denied" }} 117 - You have not authorized the app. 118 - {{ else if eq .ErrorCode "session" }} 119 - Server failed to create user session. 120 - {{ else if eq .ErrorCode "max_accounts" }} 121 - You have reached the maximum of 20 linked accounts. Please remove an account before adding a new one. 122 - {{ else }} 123 - Internal Server error. 124 - {{ end }} 125 - Please try again. 126 - </p> 127 - </div> 128 - </div> 129 - {{ end }} 130 - <p class="text-sm text-gray-500"> 131 - Don't have an account? <a href="/signup" class="underline">Create an account</a> on Tangled now! 132 - </p> 81 + <button 82 + class="btn w-full my-2 mt-6 text-base" 83 + type="submit" 84 + id="login-button" 85 + tabindex="3" 86 + > 87 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 88 + <span class="inline group-[.htmx-request]:hidden">login</span> 89 + </button> 90 + </form> 91 + {{ if .ErrorCode }} 92 + <div class="flex gap-2 my-2 bg-red-50 dark:bg-red-900 border border-red-500 rounded drop-shadow-sm px-3 py-2 text-red-500 dark:text-red-300"> 93 + <span class="py-1">{{ i "circle-alert" "w-4 h-4" }}</span> 94 + <div> 95 + <h5 class="font-medium">Login error</h5> 96 + <p class="text-sm"> 97 + {{ if eq .ErrorCode "access_denied" }} 98 + You have not authorized the app. 99 + {{ else if eq .ErrorCode "session" }} 100 + Server failed to create user session. 101 + {{ else if eq .ErrorCode "max_accounts" }} 102 + You have reached the maximum of 20 linked accounts. Please remove an account before adding a new one. 103 + {{ else }} 104 + Internal Server error. 105 + {{ end }} 106 + Please try again. 107 + </p> 108 + </div> 109 + </div> 110 + {{ end }} 111 + <p class="text-sm text-gray-500"> 112 + Don't have an account? <a href="/signup" class="underline">Create an account</a> on Tangled now! 113 + </p> 133 114 134 - <p id="login-msg" class="error w-full"></p> 135 - </main> 136 - </body> 137 - </html> 115 + <p id="login-msg" class="error w-full"></p> 138 116 {{ end }} 117 +
+4 -2
appview/pages/templates/user/settings/emails.html
··· 62 62 hx-swap="none" 63 63 class="flex flex-col gap-2" 64 64 > 65 - <p class="uppercase p-0">ADD EMAIL</p> 65 + <label for="email-address" class="uppercase p-0"> 66 + add email 67 + </label> 66 68 <p class="text-sm text-gray-500 dark:text-gray-400">Commits using this email will be associated with your profile.</p> 67 69 <input 68 70 type="email" ··· 91 93 <div id="settings-emails-error" class="text-red-500 dark:text-red-400"></div> 92 94 <div id="settings-emails-success" class="text-green-500 dark:text-green-400"></div> 93 95 </form> 94 - {{ end }} 96 + {{ end }}
+1 -1
appview/pages/templates/user/settings/fragments/keyListing.html
··· 19 19 <button 20 20 class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group" 21 21 title="Delete key" 22 - hx-delete="/settings/keys?name={{urlquery $key.Name}}&rkey={{urlquery $key.Rkey}}" 22 + hx-delete="/settings/keys?name={{urlquery $key.Name}}&rkey={{urlquery $key.Rkey}}&key={{urlquery $key.Key}}" 23 23 hx-swap="none" 24 24 hx-confirm="Are you sure you want to delete the key {{ $key.Name }}?" 25 25 >
+2 -3
appview/pages/templates/user/settings/fragments/sidebar.html
··· 1 1 {{ define "user/settings/fragments/sidebar" }} 2 2 {{ $active := .Tab }} 3 - {{ $tabs := .Tabs }} 4 3 <div class="sticky top-2 grid grid-cols-1 rounded border border-gray-200 dark:border-gray-700 divide-y divide-gray-200 dark:divide-gray-700 shadow-inner"> 5 4 {{ $activeTab := "bg-white dark:bg-gray-700 drop-shadow-sm" }} 6 5 {{ $inactiveTab := "bg-gray-100 dark:bg-gray-800" }} 7 - {{ range $tabs }} 6 + {{ range const.UserSettingsTabs }} 8 7 <a href="/settings/{{.Name}}" class="no-underline hover:no-underline hover:bg-gray-100/25 hover:dark:bg-gray-700/25"> 9 8 <div class="flex gap-3 items-center p-2 {{ if eq .Name $active }} {{ $activeTab }} {{ else }} {{ $inactiveTab }} {{ end }}"> 10 9 {{ i .Icon "size-4" }} ··· 13 12 </a> 14 13 {{ end }} 15 14 </div> 16 - {{ end }} 15 + {{ end }}
+4 -2
appview/pages/templates/user/settings/keys.html
··· 21 21 <div class="col-span-1 md:col-span-2"> 22 22 <h2 class="text-sm pb-2 uppercase font-bold">SSH Keys</h2> 23 23 <p class="text-gray-500 dark:text-gray-400"> 24 - SSH public keys added here will be broadcasted to knots that you are a member of, 24 + SSH public keys added here will be broadcasted to knots that you are a member of, 25 25 allowing you to push to repositories there. 26 26 </p> 27 27 </div> ··· 63 63 hx-swap="none" 64 64 class="flex flex-col gap-2" 65 65 > 66 - <p class="uppercase p-0">ADD SSH KEY</p> 66 + <label for="key-name" class="uppercase p-0"> 67 + add ssh key 68 + </label> 67 69 <p class="text-sm text-gray-500 dark:text-gray-400">SSH keys allow you to push to repositories in knots you're a member of.</p> 68 70 <input 69 71 type="text"
+43 -60
appview/pages/templates/user/signup.html
··· 1 - {{ define "user/signup" }} 2 - <!doctype html> 3 - <html lang="en" class="dark:bg-gray-900"> 4 - <head> 5 - <meta charset="UTF-8" /> 6 - <meta name="viewport" content="width=device-width, initial-scale=1.0" /> 7 - <meta property="og:title" content="signup ยท tangled" /> 8 - <meta property="og:url" content="https://tangled.org/signup" /> 9 - <meta property="og:description" content="sign up for tangled" /> 10 - <script src="/static/htmx.min.js"></script> 11 - <link rel="manifest" href="/pwa-manifest.json" /> 12 - <link rel="stylesheet" href="/static/tw.css?{{ cssContentHash }}" type="text/css" /> 13 - <title>sign up &middot; tangled</title> 1 + {{ define "title" }} signup {{ end }} 14 2 15 - <script src="https://challenges.cloudflare.com/turnstile/v0/api.js" async defer></script> 16 - </head> 17 - <body class="flex items-center justify-center min-h-screen"> 18 - <main class="max-w-md px-6 -mt-4"> 19 - <h1 class="flex place-content-center text-2xl font-semibold italic dark:text-white" > 20 - {{ template "fragments/logotype" }} 21 - </h1> 22 - <h2 class="text-center text-xl italic dark:text-white">tightly-knit social coding.</h2> 23 - <form 24 - class="mt-4 max-w-sm mx-auto" 25 - hx-post="/signup" 26 - hx-swap="none" 27 - hx-disabled-elt="#signup-button" 28 - > 29 - <div class="flex flex-col mt-2"> 30 - <label for="email">email</label> 31 - <input 32 - type="email" 33 - id="email" 34 - name="email" 35 - tabindex="4" 36 - required 37 - placeholder="jason@bourne.co" 38 - /> 39 - </div> 40 - <span class="text-sm text-gray-500 mt-1"> 41 - You will receive an email with an invite code. Enter your 42 - invite code, desired username, and password in the next 43 - page to complete your registration. 44 - </span> 45 - <div class="w-full mt-4 text-center"> 46 - <div class="cf-turnstile" data-sitekey="{{ .CloudflareSiteKey }}" data-size="flexible"></div> 47 - </div> 48 - <button class="btn text-base w-full my-2 mt-6" type="submit" id="signup-button" tabindex="7" > 49 - <span>join now</span> 50 - </button> 51 - <p class="text-sm text-gray-500"> 52 - Already have an AT Protocol account? <a href="/login" class="underline">Login to Tangled</a>. 53 - </p> 3 + {{ define "extrameta" }} 4 + <script src="https://challenges.cloudflare.com/turnstile/v0/api.js" async defer></script> 5 + {{ end }} 6 + 7 + {{ define "content" }} 8 + <form 9 + class="mt-4 max-w-sm mx-auto group" 10 + hx-post="/signup" 11 + hx-swap="none" 12 + hx-disabled-elt="#signup-button" 13 + > 14 + <div class="flex flex-col mt-2"> 15 + <label for="email">email</label> 16 + <input 17 + type="email" 18 + id="email" 19 + name="email" 20 + tabindex="4" 21 + required 22 + placeholder="jason@bourne.co" 23 + /> 24 + </div> 25 + <span class="text-sm text-gray-500 mt-1"> 26 + You will receive an email with an invite code. Enter your 27 + invite code, desired username, and password in the next 28 + page to complete your registration. 29 + </span> 30 + <div class="w-full mt-4 text-center"> 31 + <div class="cf-turnstile" data-sitekey="{{ .CloudflareSiteKey }}" data-size="flexible"></div> 32 + </div> 33 + <button class="btn text-base w-full my-2 mt-6" type="submit" id="signup-button" tabindex="7" > 34 + {{ i "loader-circle" "size-4 animate-spin hidden group-[.htmx-request]:inline" }} 35 + <span class="inline group-[.htmx-request]:hidden">join now</span> 36 + </button> 37 + <p class="text-sm text-gray-500"> 38 + Already have an AT Protocol account? <a href="/login" class="underline">Login to Tangled</a>. 39 + </p> 54 40 55 - <p id="signup-msg" class="error w-full"></p> 56 - <p class="text-sm text-gray-500 pt-4"> 57 - By signing up, you agree to our <a href="/terms" class="underline">Terms of Service</a> and <a href="/privacy" class="underline">Privacy Policy</a>. 58 - </p> 59 - </form> 60 - </main> 61 - </body> 62 - </html> 41 + <p id="signup-msg" class="error w-full"></p> 42 + <p class="text-sm text-gray-500 pt-4"> 43 + By signing up, you agree to our <a href="/terms" class="underline">Terms of Service</a> and <a href="/privacy" class="underline">Privacy Policy</a>. 44 + </p> 45 + </form> 63 46 {{ end }}
+4 -4
appview/pipelines/pipelines.go
··· 219 219 } 220 220 221 221 scheme := "wss" 222 - // if p.config.Core.Dev { 223 - // scheme = "ws" 224 - // } 222 + if p.config.Core.Dev { 223 + scheme = "ws" 224 + } 225 225 226 226 url := scheme + "://" + strings.Join([]string{spindle, "logs", knot, rkey, workflow}, "/") 227 227 l = l.With("url", url) ··· 378 378 r, 379 379 oauth.WithService(f.Spindle), 380 380 oauth.WithLxm(tangled.PipelineCancelPipelineNSID), 381 - oauth.WithDev(false), 381 + oauth.WithDev(p.config.Core.Dev), 382 382 oauth.WithTimeout(time.Second*30), // workflow cleanup usually takes time 383 383 ) 384 384
+27 -19
appview/pulls/opengraph.go
··· 128 128 } 129 129 130 130 // Split stats area: left side for status/stats (80%), right side for dolly (20%) 131 - statusStatsArea, dollyArea := statsArea.Split(true, 80) 131 + statusArea, dollyArea := statsArea.Split(true, 80) 132 132 133 133 // Draw status and stats 134 - statsBounds := statusStatsArea.Img.Bounds() 134 + statsBounds := statusArea.Img.Bounds() 135 135 statsX := statsBounds.Min.X + 60 // left padding 136 136 statsY := statsBounds.Min.Y 137 137 ··· 157 157 } else { 158 158 statusIcon = "git-pull-request-closed" 159 159 statusText = "closed" 160 - statusColor = color.RGBA{128, 128, 128, 255} // gray 160 + statusColor = color.RGBA{52, 58, 64, 255} // dark gray 161 161 } 162 162 163 - statusIconSize := 36 163 + statusTextWidth := statusArea.TextWidth(statusText, textSize) 164 + badgePadding := 12 165 + badgeHeight := int(textSize) + (badgePadding * 2) 166 + badgeWidth := iconSize + badgePadding + statusTextWidth + (badgePadding * 2) 167 + cornerRadius := 8 168 + badgeX := 60 169 + badgeY := 0 170 + 171 + statusArea.DrawRoundedRect(badgeX, badgeY, badgeWidth, badgeHeight, cornerRadius, statusColor) 164 172 165 - // Draw icon with status color 166 - err = statusStatsArea.DrawLucideIcon(statusIcon, statsX, statsY+iconBaselineOffset-statusIconSize/2+5, statusIconSize, statusColor) 173 + whiteColor := color.RGBA{255, 255, 255, 255} 174 + iconX := statsX + badgePadding 175 + iconY := statsY + (badgeHeight-iconSize)/2 176 + err = statusArea.DrawLucideIcon(statusIcon, iconX, iconY, iconSize, whiteColor) 167 177 if err != nil { 168 178 log.Printf("failed to draw status icon: %v", err) 169 179 } 170 180 171 - // Draw text with status color 172 - textX := statsX + statusIconSize + 12 173 - statusTextSize := 32.0 174 - err = statusStatsArea.DrawTextAt(statusText, textX, statsY+iconBaselineOffset, statusColor, statusTextSize, ogcard.Middle, ogcard.Left) 181 + textX := statsX + badgePadding + iconSize + badgePadding 182 + textY := statsY + (badgeHeight-int(textSize))/2 - 5 183 + err = statusArea.DrawTextAt(statusText, textX, textY, whiteColor, textSize, ogcard.Top, ogcard.Left) 175 184 if err != nil { 176 185 log.Printf("failed to draw status text: %v", err) 177 186 } 178 187 179 - statusTextWidth := len(statusText) * 20 180 - currentX := statsX + statusIconSize + 12 + statusTextWidth + 40 188 + currentX := statsX + badgeWidth + 50 181 189 182 190 // Draw comment count 183 - err = statusStatsArea.DrawLucideIcon("message-square", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor) 191 + err = statusArea.DrawLucideIcon("message-square", currentX, iconY, iconSize, iconColor) 184 192 if err != nil { 185 193 log.Printf("failed to draw comment icon: %v", err) 186 194 } ··· 190 198 if commentCount == 1 { 191 199 commentText = "1 comment" 192 200 } 193 - err = statusStatsArea.DrawTextAt(commentText, currentX, statsY+iconBaselineOffset, iconColor, textSize, ogcard.Middle, ogcard.Left) 201 + err = statusArea.DrawTextAt(commentText, currentX, textY, iconColor, textSize, ogcard.Top, ogcard.Left) 194 202 if err != nil { 195 203 log.Printf("failed to draw comment text: %v", err) 196 204 } ··· 199 207 currentX += commentTextWidth + 40 200 208 201 209 // Draw files changed 202 - err = statusStatsArea.DrawLucideIcon("static/icons/file-diff", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor) 210 + err = statusArea.DrawLucideIcon("file-diff", currentX, iconY, iconSize, iconColor) 203 211 if err != nil { 204 212 log.Printf("failed to draw file diff icon: %v", err) 205 213 } ··· 209 217 if filesChanged == 1 { 210 218 filesText = "1 file" 211 219 } 212 - err = statusStatsArea.DrawTextAt(filesText, currentX, statsY+iconBaselineOffset, iconColor, textSize, ogcard.Middle, ogcard.Left) 220 + err = statusArea.DrawTextAt(filesText, currentX, textY, iconColor, textSize, ogcard.Top, ogcard.Left) 213 221 if err != nil { 214 222 log.Printf("failed to draw files text: %v", err) 215 223 } ··· 220 228 // Draw additions (green +) 221 229 greenColor := color.RGBA{34, 139, 34, 255} 222 230 additionsText := fmt.Sprintf("+%d", diffStats.Insertions) 223 - err = statusStatsArea.DrawTextAt(additionsText, currentX, statsY+iconBaselineOffset, greenColor, textSize, ogcard.Middle, ogcard.Left) 231 + err = statusArea.DrawTextAt(additionsText, currentX, textY, greenColor, textSize, ogcard.Top, ogcard.Left) 224 232 if err != nil { 225 233 log.Printf("failed to draw additions text: %v", err) 226 234 } ··· 231 239 // Draw deletions (red -) right next to additions 232 240 redColor := color.RGBA{220, 20, 60, 255} 233 241 deletionsText := fmt.Sprintf("-%d", diffStats.Deletions) 234 - err = statusStatsArea.DrawTextAt(deletionsText, currentX, statsY+iconBaselineOffset, redColor, textSize, ogcard.Middle, ogcard.Left) 242 + err = statusArea.DrawTextAt(deletionsText, currentX, textY, redColor, textSize, ogcard.Top, ogcard.Left) 235 243 if err != nil { 236 244 log.Printf("failed to draw deletions text: %v", err) 237 245 } ··· 254 262 openedDate := pull.Created.Format("Jan 2, 2006") 255 263 metaText := fmt.Sprintf("opened by %s ยท %s", authorHandle, openedDate) 256 264 257 - err = statusStatsArea.DrawTextAt(metaText, statsX, labelY, iconColor, labelSize, ogcard.Top, ogcard.Left) 265 + err = statusArea.DrawTextAt(metaText, statsX, labelY, iconColor, labelSize, ogcard.Top, ogcard.Left) 258 266 if err != nil { 259 267 log.Printf("failed to draw metadata: %v", err) 260 268 }
+32 -38
appview/pulls/pulls.go
··· 27 27 "tangled.org/core/appview/notify" 28 28 "tangled.org/core/appview/oauth" 29 29 "tangled.org/core/appview/pages" 30 - "tangled.org/core/appview/pages/markup/sanitizer" 30 + "tangled.org/core/appview/pages/markup" 31 31 "tangled.org/core/appview/pages/repoinfo" 32 32 "tangled.org/core/appview/pagination" 33 33 "tangled.org/core/appview/reporesolver" 34 + "tangled.org/core/appview/validator" 34 35 "tangled.org/core/appview/xrpcclient" 35 36 "tangled.org/core/idresolver" 36 37 "tangled.org/core/orm" ··· 38 39 "tangled.org/core/rbac" 39 40 "tangled.org/core/tid" 40 41 "tangled.org/core/types" 42 + "tangled.org/core/xrpc" 41 43 42 44 comatproto "github.com/bluesky-social/indigo/api/atproto" 43 45 "github.com/bluesky-social/indigo/atproto/syntax" ··· 47 49 "github.com/google/uuid" 48 50 ) 49 51 52 + const ApplicationGzip = "application/gzip" 53 + 50 54 type Pulls struct { 51 55 oauth *oauth.OAuth 52 56 repoResolver *reporesolver.RepoResolver ··· 58 62 notifier notify.Notifier 59 63 enforcer *rbac.Enforcer 60 64 logger *slog.Logger 65 + validator *validator.Validator 61 66 indexer *pulls_indexer.Indexer 62 67 } 63 68 ··· 71 76 config *config.Config, 72 77 notifier notify.Notifier, 73 78 enforcer *rbac.Enforcer, 79 + validator *validator.Validator, 74 80 indexer *pulls_indexer.Indexer, 75 81 logger *slog.Logger, 76 82 ) *Pulls { ··· 85 91 notifier: notifier, 86 92 enforcer: enforcer, 87 93 logger: logger, 94 + validator: validator, 88 95 indexer: indexer, 89 96 } 90 97 } ··· 223 230 224 231 reactionMap, err := db.GetReactionMap(s.db, 20, pull.AtUri()) 225 232 if err != nil { 226 - s.logger.Error("failed to get pull reaction status", "err", err) 227 - s.pages.Notice(w, "pulls", "Failed to load pull. Try again later.") 233 + log.Println("failed to get pull reactions") 228 234 } 229 235 230 236 userReactions := map[models.ReactionKind]bool{} 231 237 if user != nil { 232 - userReactions, err = db.GetReactionStatusMap(s.db, user.Active.Did, pull.AtUri()) 233 - if err != nil { 234 - s.logger.Error("failed to get pull reaction status", "err", err) 235 - } 238 + userReactions = db.GetReactionStatusMap(s.db, user.Active.Did, pull.AtUri()) 236 239 } 237 240 238 241 labelDefs, err := db.GetLabelDefinitions( ··· 289 292 ActiveRound: roundIdInt, 290 293 IsInterdiff: interdiff, 291 294 292 - OrderedReactionKinds: models.OrderedReactionKinds, 293 - Reactions: reactionMap, 294 - UserReacted: userReactions, 295 + Reactions: reactionMap, 296 + UserReacted: userReactions, 295 297 296 298 LabelDefs: defs, 297 299 }) 298 300 } 299 301 300 302 func (s *Pulls) RepoSinglePull(w http.ResponseWriter, r *http.Request) { 301 - s.repoPullHelper(w, r, false) 303 + pull, ok := r.Context().Value("pull").(*models.Pull) 304 + if !ok { 305 + log.Println("failed to get pull") 306 + s.pages.Notice(w, "pull-error", "Failed to edit patch. Try again later.") 307 + return 308 + } 309 + 310 + http.Redirect(w, r, r.URL.String()+fmt.Sprintf("/round/%d", pull.LastRoundNumber()), http.StatusFound) 302 311 } 303 312 304 313 func (s *Pulls) mergeCheck(r *http.Request, f *models.Repo, pull *models.Pull, stack models.Stack) types.MergeCheckResponse { ··· 862 871 s.pages.Notice(w, "pull", "Title is required for git-diff patches.") 863 872 return 864 873 } 874 + sanitizer := markup.NewSanitizer() 865 875 if st := strings.TrimSpace(sanitizer.SanitizeDescription(title)); (st) == "" { 866 876 s.pages.Notice(w, "pull", "Title is empty after HTML sanitization") 867 877 return ··· 989 999 patch := comparison.FormatPatchRaw 990 1000 combined := comparison.CombinedPatchRaw 991 1001 992 - if err := validatePatch(&patch); err != nil { 1002 + if err := s.validator.ValidatePatch(&patch); err != nil { 993 1003 s.logger.Error("failed to validate patch", "err", err) 994 1004 s.pages.Notice(w, "pull", "Invalid patch format. Please provide a valid diff.") 995 1005 return ··· 1007 1017 } 1008 1018 1009 1019 func (s *Pulls) handlePatchBasedPull(w http.ResponseWriter, r *http.Request, repo *models.Repo, user *oauth.MultiAccountUser, title, body, targetBranch, patch string, isStacked bool) { 1010 - if err := validatePatch(&patch); err != nil { 1020 + if err := s.validator.ValidatePatch(&patch); err != nil { 1011 1021 s.logger.Error("patch validation failed", "err", err) 1012 1022 s.pages.Notice(w, "pull", "Invalid patch format. Please provide a valid diff.") 1013 1023 return ··· 1099 1109 patch := comparison.FormatPatchRaw 1100 1110 combined := comparison.CombinedPatchRaw 1101 1111 1102 - if err := validatePatch(&patch); err != nil { 1112 + if err := s.validator.ValidatePatch(&patch); err != nil { 1103 1113 s.logger.Error("failed to validate patch", "err", err) 1104 1114 s.pages.Notice(w, "pull", "Invalid patch format. Please provide a valid diff.") 1105 1115 return ··· 1220 1230 return 1221 1231 } 1222 1232 1223 - blob, err := comatproto.RepoUploadBlob(r.Context(), client, gz(patch)) 1233 + blob, err := xrpc.RepoUploadBlob(r.Context(), client, gz(patch), ApplicationGzip) 1224 1234 if err != nil { 1225 1235 log.Println("failed to upload patch", err) 1226 1236 s.pages.Notice(w, "pull", "Failed to create pull request. Try again later.") ··· 1314 1324 // apply all record creations at once 1315 1325 var writes []*comatproto.RepoApplyWrites_Input_Writes_Elem 1316 1326 for _, p := range stack { 1317 - blob, err := comatproto.RepoUploadBlob(r.Context(), client, gz(p.LatestPatch())) 1327 + blob, err := xrpc.RepoUploadBlob(r.Context(), client, gz(p.LatestPatch()), ApplicationGzip) 1318 1328 if err != nil { 1319 1329 log.Println("failed to upload patch blob", err) 1320 1330 s.pages.Notice(w, "pull", "Failed to create pull request. Try again later.") ··· 1392 1402 return 1393 1403 } 1394 1404 1395 - if err := validatePatch(&patch); err != nil { 1405 + if err := s.validator.ValidatePatch(&patch); err != nil { 1396 1406 s.logger.Error("faield to validate patch", "err", err) 1397 1407 s.pages.Notice(w, "patch-error", "Invalid patch format. Please provide a valid git diff or format-patch.") 1398 1408 return ··· 1813 1823 return 1814 1824 } 1815 1825 1816 - if err := validatePatch(&patch); err != nil { 1826 + if err := s.validator.ValidatePatch(&patch); err != nil { 1817 1827 s.pages.Notice(w, "resubmit-error", err.Error()) 1818 1828 return 1819 1829 } ··· 1864 1874 return 1865 1875 } 1866 1876 1867 - blob, err := comatproto.RepoUploadBlob(r.Context(), client, gz(patch)) 1877 + blob, err := xrpc.RepoUploadBlob(r.Context(), client, gz(patch), ApplicationGzip) 1868 1878 if err != nil { 1869 1879 log.Println("failed to upload patch blob", err) 1870 1880 s.pages.Notice(w, "resubmit-error", "Failed to update pull request on the PDS. Try again later.") ··· 1873 1883 record := pull.AsRecord() 1874 1884 record.PatchBlob = blob.Blob 1875 1885 record.CreatedAt = time.Now().Format(time.RFC3339) 1886 + record.Source.Sha = newSourceRev 1876 1887 1877 1888 _, err = comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 1878 1889 Collection: tangled.RepoPullNSID, ··· 2006 2017 return 2007 2018 } 2008 2019 2009 - blob, err := comatproto.RepoUploadBlob(r.Context(), client, gz(patch)) 2020 + blob, err := xrpc.RepoUploadBlob(r.Context(), client, gz(patch), ApplicationGzip) 2010 2021 if err != nil { 2011 2022 log.Println("failed to upload patch blob", err) 2012 2023 s.pages.Notice(w, "resubmit-error", "Failed to update pull request on the PDS. Try again later.") ··· 2048 2059 return 2049 2060 } 2050 2061 2051 - blob, err := comatproto.RepoUploadBlob(r.Context(), client, gz(patch)) 2062 + blob, err := xrpc.RepoUploadBlob(r.Context(), client, gz(patch), ApplicationGzip) 2052 2063 if err != nil { 2053 2064 log.Println("failed to upload patch blob", err) 2054 2065 s.pages.Notice(w, "resubmit-error", "Failed to update pull request on the PDS. Try again later.") ··· 2439 2450 w.Close() 2440 2451 return &b 2441 2452 } 2442 - 2443 - func validatePatch(patch *string) error { 2444 - if patch == nil || *patch == "" { 2445 - return fmt.Errorf("patch is empty") 2446 - } 2447 - 2448 - // add newline if not present to diff style patches 2449 - if !patchutil.IsFormatPatch(*patch) && !strings.HasSuffix(*patch, "\n") { 2450 - *patch = *patch + "\n" 2451 - } 2452 - 2453 - if err := patchutil.IsPatchValid(*patch); err != nil { 2454 - return err 2455 - } 2456 - 2457 - return nil 2458 - }
+1 -1
appview/repo/archive.go
··· 66 66 if link := resp.Header.Get("Link"); link != "" { 67 67 if resolvedRef, err := extractImmutableLink(link); err == nil { 68 68 newLink := fmt.Sprintf("<%s/%s/archive/%s.tar.gz>; rel=\"immutable\"", 69 - rp.config.Core.Url(), f.DidSlashRepo(), resolvedRef) 69 + rp.config.Core.BaseUrl(), f.DidSlashRepo(), resolvedRef) 70 70 w.Header().Set("Link", newLink) 71 71 } 72 72 }
+44 -34
appview/repo/artifact.go
··· 5 5 "encoding/json" 6 6 "fmt" 7 7 "io" 8 - "log" 9 8 "net/http" 10 9 "net/url" 11 10 "time" ··· 18 17 "tangled.org/core/orm" 19 18 "tangled.org/core/tid" 20 19 "tangled.org/core/types" 20 + "tangled.org/core/xrpc" 21 21 22 22 comatproto "github.com/bluesky-social/indigo/api/atproto" 23 23 lexutil "github.com/bluesky-social/indigo/lex/util" ··· 30 30 31 31 // TODO: proper statuses here on early exit 32 32 func (rp *Repo) AttachArtifact(w http.ResponseWriter, r *http.Request) { 33 + l := rp.logger.With("handler", "AttachArtifact") 34 + 33 35 user := rp.oauth.GetMultiAccountUser(r) 34 36 tagParam := chi.URLParam(r, "tag") 35 37 f, err := rp.repoResolver.Resolve(r) 36 38 if err != nil { 37 - log.Println("failed to get repo and knot", err) 39 + l.Error("failed to get repo and knot", "err", err) 38 40 rp.pages.Notice(w, "upload", "failed to upload artifact, error in repo resolution") 39 41 return 40 42 } 41 43 42 44 tag, err := rp.resolveTag(r.Context(), f, tagParam) 43 45 if err != nil { 44 - log.Println("failed to resolve tag", err) 46 + l.Error("failed to resolve tag", "err", err) 45 47 rp.pages.Notice(w, "upload", "failed to upload artifact, error in tag resolution") 46 48 return 47 49 } 48 50 49 - file, handler, err := r.FormFile("artifact") 51 + file, header, err := r.FormFile("artifact") 50 52 if err != nil { 51 - log.Println("failed to upload artifact", err) 53 + l.Error("failed to upload artifact", "err", err) 52 54 rp.pages.Notice(w, "upload", "failed to upload artifact") 53 55 return 54 56 } ··· 56 58 57 59 client, err := rp.oauth.AuthorizedClient(r) 58 60 if err != nil { 59 - log.Println("failed to get authorized client", err) 61 + l.Error("failed to get authorized client", "err", err) 60 62 rp.pages.Notice(w, "upload", "failed to get authorized client") 61 63 return 62 64 } 63 65 64 - uploadBlobResp, err := comatproto.RepoUploadBlob(r.Context(), client, file) 66 + uploadBlobResp, err := xrpc.RepoUploadBlob(r.Context(), client, file, header.Header.Get("Content-Type")) 65 67 if err != nil { 66 - log.Println("failed to upload blob", err) 68 + l.Error("failed to upload blob", "err", err) 67 69 rp.pages.Notice(w, "upload", "Failed to upload blob to your PDS. Try again later.") 68 70 return 69 71 } 70 72 71 - log.Println("uploaded blob", humanize.Bytes(uint64(uploadBlobResp.Blob.Size)), uploadBlobResp.Blob.Ref.String()) 73 + l.Info("uploaded blob", "size", humanize.Bytes(uint64(uploadBlobResp.Blob.Size)), "blobRef", uploadBlobResp.Blob.Ref.String()) 72 74 73 75 rkey := tid.TID() 74 76 createdAt := time.Now() ··· 81 83 Val: &tangled.RepoArtifact{ 82 84 Artifact: uploadBlobResp.Blob, 83 85 CreatedAt: createdAt.Format(time.RFC3339), 84 - Name: handler.Filename, 86 + Name: header.Filename, 85 87 Repo: f.RepoAt().String(), 86 88 Tag: tag.Tag.Hash[:], 87 89 }, 88 90 }, 89 91 }) 90 92 if err != nil { 91 - log.Println("failed to create record", err) 93 + l.Error("failed to create record", "err", err) 92 94 rp.pages.Notice(w, "upload", "Failed to create artifact record. Try again later.") 93 95 return 94 96 } 95 97 96 - log.Println(putRecordResp.Uri) 98 + l.Debug("created record for blob", "aturi", putRecordResp.Uri) 97 99 98 100 tx, err := rp.db.BeginTx(r.Context(), nil) 99 101 if err != nil { 100 - log.Println("failed to start tx") 102 + l.Error("failed to start tx") 101 103 rp.pages.Notice(w, "upload", "Failed to create artifact. Try again later.") 102 104 return 103 105 } ··· 110 112 Tag: tag.Tag.Hash, 111 113 CreatedAt: createdAt, 112 114 BlobCid: cid.Cid(uploadBlobResp.Blob.Ref), 113 - Name: handler.Filename, 115 + Name: header.Filename, 114 116 Size: uint64(uploadBlobResp.Blob.Size), 115 117 MimeType: uploadBlobResp.Blob.MimeType, 116 118 } 117 119 118 120 err = db.AddArtifact(tx, artifact) 119 121 if err != nil { 120 - log.Println("failed to add artifact record to db", err) 122 + l.Error("failed to add artifact record to db", "err", err) 121 123 rp.pages.Notice(w, "upload", "Failed to create artifact. Try again later.") 122 124 return 123 125 } 124 126 125 127 err = tx.Commit() 126 128 if err != nil { 127 - log.Println("failed to add artifact record to db") 129 + l.Error("failed to add artifact record to db") 128 130 rp.pages.Notice(w, "upload", "Failed to create artifact. Try again later.") 129 131 return 130 132 } ··· 137 139 } 138 140 139 141 func (rp *Repo) DownloadArtifact(w http.ResponseWriter, r *http.Request) { 142 + l := rp.logger.With("handler", "DownloadArtifact") 143 + 140 144 f, err := rp.repoResolver.Resolve(r) 141 145 if err != nil { 142 - log.Println("failed to get repo and knot", err) 146 + l.Error("failed to get repo and knot", "err", err) 143 147 http.Error(w, "failed to resolve repo", http.StatusInternalServerError) 144 148 return 145 149 } ··· 149 153 150 154 tag, err := rp.resolveTag(r.Context(), f, tagParam) 151 155 if err != nil { 152 - log.Println("failed to resolve tag", err) 156 + l.Error("failed to resolve tag", "err", err) 153 157 rp.pages.Notice(w, "upload", "failed to upload artifact, error in tag resolution") 154 158 return 155 159 } ··· 161 165 orm.FilterEq("name", filename), 162 166 ) 163 167 if err != nil { 164 - log.Println("failed to get artifacts", err) 168 + l.Error("failed to get artifacts", "err", err) 165 169 http.Error(w, "failed to get artifact", http.StatusInternalServerError) 166 170 return 167 171 } 168 172 169 173 if len(artifacts) != 1 { 170 - log.Printf("too many or too few artifacts found") 174 + l.Error("too many or too few artifacts found") 171 175 http.Error(w, "artifact not found", http.StatusNotFound) 172 176 return 173 177 } ··· 176 180 177 181 ownerId, err := rp.idResolver.ResolveIdent(r.Context(), f.Did) 178 182 if err != nil { 179 - log.Println("failed to resolve repo owner did", f.Did, err) 183 + l.Error("failed to resolve repo owner did", "did", f.Did, "err", err) 180 184 http.Error(w, "repository owner not found", http.StatusNotFound) 181 185 return 182 186 } ··· 190 194 191 195 req, err := http.NewRequest(http.MethodGet, url.String(), nil) 192 196 if err != nil { 193 - log.Println("failed to create request", err) 197 + l.Error("failed to create request", "err", err) 194 198 http.Error(w, "failed to create request", http.StatusInternalServerError) 195 199 return 196 200 } ··· 198 202 199 203 resp, err := http.DefaultClient.Do(req) 200 204 if err != nil { 201 - log.Println("failed to make request", err) 205 + l.Error("failed to make request", "err", err) 202 206 http.Error(w, "failed to make request to PDS", http.StatusInternalServerError) 203 207 return 204 208 } ··· 214 218 215 219 // stream the body directly to the client 216 220 if _, err := io.Copy(w, resp.Body); err != nil { 217 - log.Println("error streaming response to client:", err) 221 + l.Error("error streaming response to client:", "err", err) 218 222 } 219 223 } 220 224 221 225 // TODO: proper statuses here on early exit 222 226 func (rp *Repo) DeleteArtifact(w http.ResponseWriter, r *http.Request) { 227 + l := rp.logger.With("handler", "DeleteArtifact") 228 + 223 229 user := rp.oauth.GetMultiAccountUser(r) 224 230 tagParam := chi.URLParam(r, "tag") 225 231 filename := chi.URLParam(r, "file") 226 232 f, err := rp.repoResolver.Resolve(r) 227 233 if err != nil { 228 - log.Println("failed to get repo and knot", err) 234 + l.Error("failed to get repo and knot", "err", err) 229 235 return 230 236 } 231 237 ··· 240 246 orm.FilterEq("name", filename), 241 247 ) 242 248 if err != nil { 243 - log.Println("failed to get artifacts", err) 249 + l.Error("failed to get artifacts", "err", err) 244 250 rp.pages.Notice(w, "remove", "Failed to delete artifact. Try again later.") 245 251 return 246 252 } ··· 252 258 artifact := artifacts[0] 253 259 254 260 if user.Active.Did != artifact.Did { 255 - log.Println("user not authorized to delete artifact", err) 261 + l.Error("user not authorized to delete artifact", "err", err) 256 262 rp.pages.Notice(w, "remove", "Unauthorized deletion of artifact.") 257 263 return 258 264 } ··· 263 269 Rkey: artifact.Rkey, 264 270 }) 265 271 if err != nil { 266 - log.Println("failed to get blob from pds", err) 272 + l.Error("failed to get blob from pds", "err", err) 267 273 rp.pages.Notice(w, "remove", "Failed to remove blob from PDS.") 268 274 return 269 275 } 270 276 271 277 tx, err := rp.db.BeginTx(r.Context(), nil) 272 278 if err != nil { 273 - log.Println("failed to start tx") 279 + l.Error("failed to start tx") 274 280 rp.pages.Notice(w, "remove", "Failed to delete artifact. Try again later.") 275 281 return 276 282 } ··· 282 288 orm.FilterEq("name", filename), 283 289 ) 284 290 if err != nil { 285 - log.Println("failed to remove artifact record from db", err) 291 + l.Error("failed to remove artifact record from db", "err", err) 286 292 rp.pages.Notice(w, "remove", "Failed to delete artifact. Try again later.") 287 293 return 288 294 } 289 295 290 296 err = tx.Commit() 291 297 if err != nil { 292 - log.Println("failed to remove artifact record from db") 298 + l.Error("failed to remove artifact record from db") 293 299 rp.pages.Notice(w, "remove", "Failed to delete artifact. Try again later.") 294 300 return 295 301 } 296 302 303 + l.Info("successfully deleted artifact", "tag", tagParam, "file", filename) 304 + 297 305 w.Write([]byte{}) 298 306 } 299 307 300 308 func (rp *Repo) resolveTag(ctx context.Context, f *models.Repo, tagParam string) (*types.TagReference, error) { 309 + l := rp.logger.With("handler", "resolveTag") 310 + 301 311 tagParam, err := url.QueryUnescape(tagParam) 302 312 if err != nil { 303 313 return nil, err ··· 316 326 xrpcBytes, err := tangled.RepoTags(ctx, xrpcc, "", 0, repo) 317 327 if err != nil { 318 328 if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil { 319 - log.Println("failed to call XRPC repo.tags", xrpcerr) 329 + l.Error("failed to call XRPC repo.tags", "err", xrpcerr) 320 330 return nil, xrpcerr 321 331 } 322 - log.Println("failed to reach knotserver", err) 332 + l.Error("failed to reach knotserver", "err", err) 323 333 return nil, err 324 334 } 325 335 326 336 var result types.RepoTagsResponse 327 337 if err := json.Unmarshal(xrpcBytes, &result); err != nil { 328 - log.Println("failed to decode XRPC tags response", err) 338 + l.Error("failed to decode XRPC tags response", "err", err) 329 339 return nil, err 330 340 } 331 341
+49 -2
appview/repo/blob.go
··· 9 9 "path/filepath" 10 10 "slices" 11 11 "strings" 12 + "time" 12 13 13 14 "tangled.org/core/api/tangled" 14 15 "tangled.org/core/appview/config" 16 + "tangled.org/core/appview/db" 15 17 "tangled.org/core/appview/models" 16 18 "tangled.org/core/appview/pages" 17 19 "tangled.org/core/appview/pages/markup" 18 20 "tangled.org/core/appview/reporesolver" 19 21 xrpcclient "tangled.org/core/appview/xrpcclient" 22 + "tangled.org/core/types" 20 23 21 24 indigoxrpc "github.com/bluesky-social/indigo/xrpc" 22 25 "github.com/go-chi/chi/v5" 26 + "github.com/go-git/go-git/v5/plumbing" 23 27 ) 24 28 25 29 // the content can be one of the following: ··· 78 82 79 83 user := rp.oauth.GetMultiAccountUser(r) 80 84 85 + // Get email to DID mapping for commit author 86 + var emails []string 87 + if resp.LastCommit != nil && resp.LastCommit.Author != nil { 88 + emails = append(emails, resp.LastCommit.Author.Email) 89 + } 90 + emailToDidMap, err := db.GetEmailToDid(rp.db, emails, true) 91 + if err != nil { 92 + l.Error("failed to get email to did mapping", "err", err) 93 + emailToDidMap = make(map[string]string) 94 + } 95 + 96 + var lastCommitInfo *types.LastCommitInfo 97 + if resp.LastCommit != nil { 98 + when, _ := time.Parse(time.RFC3339, resp.LastCommit.When) 99 + lastCommitInfo = &types.LastCommitInfo{ 100 + Hash: plumbing.NewHash(resp.LastCommit.Hash), 101 + Message: resp.LastCommit.Message, 102 + When: when, 103 + } 104 + if resp.LastCommit.Author != nil { 105 + lastCommitInfo.Author.Name = resp.LastCommit.Author.Name 106 + lastCommitInfo.Author.Email = resp.LastCommit.Author.Email 107 + lastCommitInfo.Author.When, _ = time.Parse(time.RFC3339, resp.LastCommit.Author.When) 108 + } 109 + } 110 + 81 111 rp.pages.RepoBlob(w, pages.RepoBlobParams{ 82 112 LoggedInUser: user, 83 113 RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 84 114 BreadCrumbs: breadcrumbs, 85 115 BlobView: blobView, 116 + EmailToDid: emailToDidMap, 117 + LastCommitInfo: lastCommitInfo, 86 118 RepoBlob_Output: resp, 87 119 }) 88 120 } ··· 219 251 if resp.Content != nil { 220 252 bytes, _ := base64.StdEncoding.DecodeString(*resp.Content) 221 253 view.Contents = string(bytes) 222 - view.Lines = strings.Count(view.Contents, "\n") + 1 254 + view.Lines = countLines(view.Contents) 223 255 } 224 256 225 257 case ".mp4", ".webm", ".ogg", ".mov", ".avi": ··· 238 270 239 271 if resp.Content != nil { 240 272 view.Contents = *resp.Content 241 - view.Lines = strings.Count(view.Contents, "\n") + 1 273 + view.Lines = countLines(view.Contents) 242 274 } 243 275 244 276 // with text, we may be dealing with markdown ··· 291 323 } 292 324 return slices.Contains(textualTypes, mimeType) 293 325 } 326 + 327 + // TODO: dedup with strings 328 + func countLines(content string) int { 329 + if content == "" { 330 + return 0 331 + } 332 + 333 + count := strings.Count(content, "\n") 334 + 335 + if !strings.HasSuffix(content, "\n") { 336 + count++ 337 + } 338 + 339 + return count 340 + }
+4 -4
appview/repo/feed.go
··· 37 37 38 38 feed := &feeds.Feed{ 39 39 Title: fmt.Sprintf("activity feed for @%s", ownerSlashRepo), 40 - Link: &feeds.Link{Href: fmt.Sprintf("%s/%s", rp.config.Core.Url(), ownerSlashRepo), Type: "text/html", Rel: "alternate"}, 40 + Link: &feeds.Link{Href: fmt.Sprintf("%s/%s", rp.config.Core.BaseUrl(), ownerSlashRepo), Type: "text/html", Rel: "alternate"}, 41 41 Items: make([]*feeds.Item, 0), 42 42 Updated: time.UnixMilli(0), 43 43 } ··· 86 86 mainItem := &feeds.Item{ 87 87 Title: fmt.Sprintf("[PR #%d] %s", pull.PullId, pull.Title), 88 88 Description: description, 89 - Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d", rp.config.Core.Url(), ownerSlashRepo, pull.PullId)}, 89 + Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d", rp.config.Core.BaseUrl(), ownerSlashRepo, pull.PullId)}, 90 90 Created: pull.Created, 91 91 Author: &feeds.Author{Name: fmt.Sprintf("@%s", owner.Handle)}, 92 92 } ··· 100 100 roundItem := &feeds.Item{ 101 101 Title: fmt.Sprintf("[PR #%d] %s (round #%d)", pull.PullId, pull.Title, round.RoundNumber), 102 102 Description: fmt.Sprintf("@%s submitted changes (at round #%d) on PR #%d in @%s", owner.Handle, round.RoundNumber, pull.PullId, ownerSlashRepo), 103 - Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d/round/%d/", rp.config.Core.Url(), ownerSlashRepo, pull.PullId, round.RoundNumber)}, 103 + Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d/round/%d/", rp.config.Core.BaseUrl(), ownerSlashRepo, pull.PullId, round.RoundNumber)}, 104 104 Created: round.Created, 105 105 Author: &feeds.Author{Name: fmt.Sprintf("@%s", owner.Handle)}, 106 106 } ··· 124 124 return &feeds.Item{ 125 125 Title: fmt.Sprintf("[Issue #%d] %s", issue.IssueId, issue.Title), 126 126 Description: fmt.Sprintf("@%s %s issue #%d in @%s", owner.Handle, state, issue.IssueId, ownerSlashRepo), 127 - Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/issues/%d", rp.config.Core.Url(), ownerSlashRepo, issue.IssueId)}, 127 + Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/issues/%d", rp.config.Core.BaseUrl(), ownerSlashRepo, issue.IssueId)}, 128 128 Created: issue.Created, 129 129 Author: &feeds.Author{Name: fmt.Sprintf("@%s", owner.Handle)}, 130 130 }, nil
+5 -1
appview/repo/repo.go
··· 20 20 "tangled.org/core/appview/oauth" 21 21 "tangled.org/core/appview/pages" 22 22 "tangled.org/core/appview/reporesolver" 23 + "tangled.org/core/appview/validator" 23 24 xrpcclient "tangled.org/core/appview/xrpcclient" 24 25 "tangled.org/core/eventconsumer" 25 26 "tangled.org/core/idresolver" ··· 48 49 notifier notify.Notifier 49 50 logger *slog.Logger 50 51 serviceAuth *serviceauth.ServiceAuth 52 + validator *validator.Validator 51 53 } 52 54 53 55 func New( ··· 61 63 notifier notify.Notifier, 62 64 enforcer *rbac.Enforcer, 63 65 logger *slog.Logger, 66 + validator *validator.Validator, 64 67 ) *Repo { 65 68 return &Repo{oauth: oauth, 66 69 repoResolver: repoResolver, ··· 72 75 notifier: notifier, 73 76 enforcer: enforcer, 74 77 logger: logger, 78 + validator: validator, 75 79 } 76 80 } 77 81 ··· 221 225 Multiple: multiple, 222 226 Created: time.Now(), 223 227 } 224 - if err := label.Validate(); err != nil { 228 + if err := rp.validator.ValidateLabelDefinition(&label); err != nil { 225 229 fail(err.Error(), err) 226 230 return 227 231 }
+1
appview/repo/router.go
··· 23 23 r.Route("/tags", func(r chi.Router) { 24 24 r.Get("/", rp.Tags) 25 25 r.Route("/{tag}", func(r chi.Router) { 26 + r.Get("/", rp.Tag) 26 27 r.Get("/download/{file}", rp.DownloadArtifact) 27 28 28 29 // require repo:push to upload or delete artifacts
+6 -83
appview/repo/settings.go
··· 4 4 "encoding/json" 5 5 "fmt" 6 6 "net/http" 7 - "net/url" 8 - "regexp" 9 7 "slices" 10 8 "strings" 11 9 "time" ··· 17 15 "tangled.org/core/appview/pages" 18 16 xrpcclient "tangled.org/core/appview/xrpcclient" 19 17 "tangled.org/core/orm" 20 - "tangled.org/core/sets" 21 18 "tangled.org/core/types" 22 19 23 20 comatproto "github.com/bluesky-social/indigo/api/atproto" 24 21 lexutil "github.com/bluesky-social/indigo/lex/util" 25 22 indigoxrpc "github.com/bluesky-social/indigo/xrpc" 26 - ) 27 - 28 - type tab = map[string]any 29 - 30 - var ( 31 - // would be great to have ordered maps right about now 32 - settingsTabs []tab = []tab{ 33 - {"Name": "general", "Icon": "sliders-horizontal"}, 34 - {"Name": "access", "Icon": "users"}, 35 - {"Name": "pipelines", "Icon": "layers-2"}, 36 - } 37 23 ) 38 24 39 25 func (rp *Repo) SetDefaultBranch(w http.ResponseWriter, r *http.Request) { ··· 265 251 DefaultLabels: defaultLabels, 266 252 SubscribedLabels: subscribedLabels, 267 253 ShouldSubscribeAll: shouldSubscribeAll, 268 - Tabs: settingsTabs, 269 - Tab: "general", 270 254 }) 271 255 } 272 256 ··· 311 295 rp.pages.RepoAccessSettings(w, pages.RepoAccessSettingsParams{ 312 296 LoggedInUser: user, 313 297 RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 314 - Tabs: settingsTabs, 315 - Tab: "access", 316 298 Collaborators: collaborators, 317 299 }) 318 300 } ··· 372 354 rp.pages.RepoPipelineSettings(w, pages.RepoPipelineSettingsParams{ 373 355 LoggedInUser: user, 374 356 RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 375 - Tabs: settingsTabs, 376 - Tab: "pipelines", 377 357 Spindles: spindles, 378 358 CurrentSpindle: f.Spindle, 379 359 Secrets: niceSecret, ··· 405 385 topicStr = r.FormValue("topics") 406 386 ) 407 387 408 - if website != "" { 409 - if err := validateURI(website); err != nil { 410 - l.Error("invalid uri", "err", err) 411 - rp.pages.Notice(w, noticeId, err.Error()) 412 - return 413 - } 388 + err = rp.validator.ValidateURI(website) 389 + if website != "" && err != nil { 390 + l.Error("invalid uri", "err", err) 391 + rp.pages.Notice(w, noticeId, err.Error()) 392 + return 414 393 } 415 394 416 - topics, err := parseRepoTopicStr(topicStr) 395 + topics, err := rp.validator.ValidateRepoTopicStr(topicStr) 417 396 if err != nil { 418 397 l.Error("invalid topics", "err", err) 419 398 rp.pages.Notice(w, noticeId, err.Error()) ··· 473 452 474 453 rp.pages.HxRefresh(w) 475 454 } 476 - 477 - const ( 478 - maxTopicLen = 50 479 - maxTopics = 20 480 - ) 481 - 482 - var ( 483 - topicRE = regexp.MustCompile(`\A[a-z0-9-]+\z`) 484 - ) 485 - 486 - // parseRepoTopicStr parses and validates whitespace-separated topic string. 487 - // 488 - // Rules: 489 - // - topics are separated by whitespace 490 - // - each topic may contain lowercase letters, digits, and hyphens only 491 - // - each topic must be <= 50 characters long 492 - // - no more than 20 topics allowed 493 - // - duplicates are removed 494 - func parseRepoTopicStr(topicStr string) ([]string, error) { 495 - topicStr = strings.TrimSpace(topicStr) 496 - if topicStr == "" { 497 - return nil, nil 498 - } 499 - parts := strings.Fields(topicStr) 500 - if len(parts) > maxTopics { 501 - return nil, fmt.Errorf("too many topics: %d (maximum %d)", len(parts), maxTopics) 502 - } 503 - 504 - topicSet := sets.New[string]() 505 - 506 - for _, t := range parts { 507 - if topicSet.Contains(t) { 508 - continue 509 - } 510 - if len(t) > maxTopicLen { 511 - return nil, fmt.Errorf("topic '%s' is too long (maximum %d characters)", t, maxTopics) 512 - } 513 - if !topicRE.MatchString(t) { 514 - return nil, fmt.Errorf("topic '%s' contains invalid characters (allowed: lowercase letters, digits, hyphens)", t) 515 - } 516 - topicSet.Insert(t) 517 - } 518 - return slices.Collect(topicSet.All()), nil 519 - } 520 - 521 - // TODO(boltless): move this to models.Repo instead 522 - func validateURI(uri string) error { 523 - parsed, err := url.Parse(uri) 524 - if err != nil { 525 - return fmt.Errorf("invalid uri format") 526 - } 527 - if parsed.Scheme == "" { 528 - return fmt.Errorf("uri scheme missing") 529 - } 530 - return nil 531 - }
+58
appview/repo/tags.go
··· 14 14 "tangled.org/core/types" 15 15 16 16 indigoxrpc "github.com/bluesky-social/indigo/xrpc" 17 + "github.com/go-chi/chi/v5" 17 18 "github.com/go-git/go-git/v5/plumbing" 18 19 ) 19 20 ··· 70 71 } 71 72 } 72 73 user := rp.oauth.GetMultiAccountUser(r) 74 + 73 75 rp.pages.RepoTags(w, pages.RepoTagsParams{ 74 76 LoggedInUser: user, 75 77 RepoInfo: rp.repoResolver.GetRepoInfo(r, user), ··· 78 80 DanglingArtifacts: danglingArtifacts, 79 81 }) 80 82 } 83 + 84 + func (rp *Repo) Tag(w http.ResponseWriter, r *http.Request) { 85 + l := rp.logger.With("handler", "RepoTag") 86 + f, err := rp.repoResolver.Resolve(r) 87 + if err != nil { 88 + l.Error("failed to get repo and knot", "err", err) 89 + return 90 + } 91 + scheme := "http" 92 + if !rp.config.Core.Dev { 93 + scheme = "https" 94 + } 95 + host := fmt.Sprintf("%s://%s", scheme, f.Knot) 96 + xrpcc := &indigoxrpc.Client{ 97 + Host: host, 98 + } 99 + repo := fmt.Sprintf("%s/%s", f.Did, f.Name) 100 + tag := chi.URLParam(r, "tag") 101 + 102 + xrpcBytes, err := tangled.RepoTag(r.Context(), xrpcc, repo, tag) 103 + if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil { 104 + l.Error("failed to call XRPC repo.tags", "err", xrpcerr) 105 + rp.pages.Error503(w) 106 + return 107 + } 108 + var result types.RepoTagResponse 109 + if err := json.Unmarshal(xrpcBytes, &result); err != nil { 110 + l.Error("failed to decode XRPC response", "err", err) 111 + rp.pages.Error503(w) 112 + return 113 + } 114 + 115 + filters := []orm.Filter{orm.FilterEq("repo_at", f.RepoAt())} 116 + if result.Tag.Tag != nil { 117 + filters = append(filters, orm.FilterEq("tag", result.Tag.Tag.Hash[:])) 118 + } 119 + 120 + artifacts, err := db.GetArtifact(rp.db, filters...) 121 + if err != nil { 122 + l.Error("failed grab artifacts", "err", err) 123 + return 124 + } 125 + // convert artifacts to map for easy UI building 126 + artifactMap := make(map[plumbing.Hash][]models.Artifact) 127 + for _, a := range artifacts { 128 + artifactMap[a.Tag] = append(artifactMap[a.Tag], a) 129 + } 130 + 131 + user := rp.oauth.GetMultiAccountUser(r) 132 + rp.pages.RepoTag(w, pages.RepoTagParams{ 133 + LoggedInUser: user, 134 + RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 135 + RepoTagResponse: result, 136 + ArtifactMap: artifactMap, 137 + }) 138 + }
+29
appview/repo/tree.go
··· 8 8 "time" 9 9 10 10 "tangled.org/core/api/tangled" 11 + "tangled.org/core/appview/db" 11 12 "tangled.org/core/appview/pages" 12 13 "tangled.org/core/appview/reporesolver" 13 14 xrpcclient "tangled.org/core/appview/xrpcclient" ··· 98 99 } 99 100 sortFiles(result.Files) 100 101 102 + // Get email to DID mapping for commit author 103 + var emails []string 104 + if xrpcResp.LastCommit != nil && xrpcResp.LastCommit.Author != nil { 105 + emails = append(emails, xrpcResp.LastCommit.Author.Email) 106 + } 107 + emailToDidMap, err := db.GetEmailToDid(rp.db, emails, true) 108 + if err != nil { 109 + l.Error("failed to get email to did mapping", "err", err) 110 + emailToDidMap = make(map[string]string) 111 + } 112 + 113 + var lastCommitInfo *types.LastCommitInfo 114 + if xrpcResp.LastCommit != nil { 115 + when, _ := time.Parse(time.RFC3339, xrpcResp.LastCommit.When) 116 + lastCommitInfo = &types.LastCommitInfo{ 117 + Hash: plumbing.NewHash(xrpcResp.LastCommit.Hash), 118 + Message: xrpcResp.LastCommit.Message, 119 + When: when, 120 + } 121 + if xrpcResp.LastCommit.Author != nil { 122 + lastCommitInfo.Author.Name = xrpcResp.LastCommit.Author.Name 123 + lastCommitInfo.Author.Email = xrpcResp.LastCommit.Author.Email 124 + lastCommitInfo.Author.When, _ = time.Parse(time.RFC3339, xrpcResp.LastCommit.Author.When) 125 + } 126 + } 127 + 101 128 rp.pages.RepoTree(w, pages.RepoTreeParams{ 102 129 LoggedInUser: user, 103 130 BreadCrumbs: breadcrumbs, 104 131 TreePath: treePath, 105 132 RepoInfo: rp.repoResolver.GetRepoInfo(r, user), 133 + EmailToDid: emailToDidMap, 134 + LastCommitInfo: lastCommitInfo, 106 135 RepoTreeResponse: result, 107 136 }) 108 137 }
+37 -61
appview/settings/settings.go
··· 24 24 comatproto "github.com/bluesky-social/indigo/api/atproto" 25 25 "github.com/bluesky-social/indigo/atproto/syntax" 26 26 lexutil "github.com/bluesky-social/indigo/lex/util" 27 + "github.com/gliderlabs/ssh" 27 28 "github.com/google/uuid" 28 29 ) 29 30 ··· 34 35 Config *config.Config 35 36 } 36 37 37 - type tab = map[string]any 38 - 39 - var ( 40 - settingsTabs []tab = []tab{ 41 - {"Name": "profile", "Icon": "user"}, 42 - {"Name": "keys", "Icon": "key"}, 43 - {"Name": "emails", "Icon": "mail"}, 44 - {"Name": "notifications", "Icon": "bell"}, 45 - {"Name": "knots", "Icon": "volleyball"}, 46 - {"Name": "spindles", "Icon": "spool"}, 47 - } 48 - ) 49 - 50 38 func (s *Settings) Router() http.Handler { 51 39 r := chi.NewRouter() 52 40 ··· 84 72 85 73 s.Pages.UserProfileSettings(w, pages.UserProfileSettingsParams{ 86 74 LoggedInUser: user, 87 - Tabs: settingsTabs, 88 - Tab: "profile", 89 75 }) 90 76 } 91 77 ··· 103 89 s.Pages.UserNotificationSettings(w, pages.UserNotificationSettingsParams{ 104 90 LoggedInUser: user, 105 91 Preferences: prefs, 106 - Tabs: settingsTabs, 107 - Tab: "notifications", 108 92 }) 109 93 } 110 94 ··· 145 129 s.Pages.UserKeysSettings(w, pages.UserKeysSettingsParams{ 146 130 LoggedInUser: user, 147 131 PubKeys: pubKeys, 148 - Tabs: settingsTabs, 149 - Tab: "keys", 150 132 }) 151 133 } 152 134 ··· 160 142 s.Pages.UserEmailsSettings(w, pages.UserEmailsSettingsParams{ 161 143 LoggedInUser: user, 162 144 Emails: emails, 163 - Tabs: settingsTabs, 164 - Tab: "emails", 165 145 }) 166 146 } 167 147 ··· 299 279 func (s *Settings) verifyUrl(did string, email string, code string) string { 300 280 return fmt.Sprintf( 301 281 "%s/settings/emails/verify?did=%s&email=%s&code=%s", 302 - s.Config.Core.Url(), 282 + s.Config.Core.BaseUrl(), 303 283 url.QueryEscape(did), 304 284 url.QueryEscape(email), 305 285 url.QueryEscape(code), ··· 438 418 log.Println("unimplemented") 439 419 return 440 420 case http.MethodPut: 441 - created := time.Now() 442 - pubKey := models.PublicKey{ 443 - Did: s.OAuth.GetDid(r), 444 - Rkey: tid.TID(), 445 - Name: r.FormValue("name"), 446 - Key: strings.TrimSpace(r.FormValue("key")), 447 - Created: &created, 421 + did := s.OAuth.GetDid(r) 422 + key := r.FormValue("key") 423 + key = strings.TrimSpace(key) 424 + name := r.FormValue("name") 425 + client, err := s.OAuth.AuthorizedClient(r) 426 + if err != nil { 427 + s.Pages.Notice(w, "settings-keys", "Failed to authorize. Try again later.") 428 + return 448 429 } 449 430 450 - if err := pubKey.Validate(); err != nil { 431 + _, _, _, _, err = ssh.ParseAuthorizedKey([]byte(key)) 432 + if err != nil { 451 433 log.Printf("parsing public key: %s", err) 452 434 s.Pages.Notice(w, "settings-keys", "That doesn't look like a valid public key. Make sure it's a <strong>public</strong> key.") 453 435 return 454 436 } 455 437 438 + rkey := tid.TID() 439 + 456 440 tx, err := s.Db.Begin() 457 441 if err != nil { 458 442 log.Printf("failed to start tx; adding public key: %s", err) ··· 461 445 } 462 446 defer tx.Rollback() 463 447 464 - if err = db.UpsertPublicKey(s.Db, pubKey); err != nil { 448 + if err := db.AddPublicKey(tx, did, name, key, rkey); err != nil { 465 449 log.Printf("adding public key: %s", err) 466 450 s.Pages.Notice(w, "settings-keys", "Failed to add public key.") 467 451 return 468 452 } 469 453 470 - client, err := s.OAuth.AuthorizedClient(r) 471 - if err != nil { 472 - s.Pages.Notice(w, "settings-keys", "Failed to authorize. Try again later.") 473 - return 474 - } 475 - 476 454 // store in pds too 477 - record := pubKey.AsRecord() 478 455 resp, err := comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 479 456 Collection: tangled.PublicKeyNSID, 480 - Repo: pubKey.Did, 481 - Rkey: pubKey.Rkey, 457 + Repo: did, 458 + Rkey: rkey, 482 459 Record: &lexutil.LexiconTypeDecoder{ 483 - Val: &record, 484 - }, 460 + Val: &tangled.PublicKey{ 461 + CreatedAt: time.Now().Format(time.RFC3339), 462 + Key: key, 463 + Name: name, 464 + }}, 485 465 }) 486 466 // invalid record 487 467 if err != nil { ··· 508 488 509 489 name := q.Get("name") 510 490 rkey := q.Get("rkey") 491 + key := q.Get("key") 511 492 512 493 log.Println(name) 513 494 log.Println(rkey) 495 + log.Println(key) 514 496 515 - if rkey == "" { 516 - if err := db.DeletePublicKeyLegacy(s.Db, did, name); err != nil { 517 - log.Printf("removing public key: %s", err) 518 - s.Pages.Notice(w, "settings-keys", "Failed to remove public key.") 519 - return 520 - } 521 - } else { 522 - if err := db.DeletePublicKeyByRkey(s.Db, did, rkey); err != nil { 523 - log.Printf("removing public key: %s", err) 524 - s.Pages.Notice(w, "settings-keys", "Failed to remove public key.") 525 - return 526 - } 497 + client, err := s.OAuth.AuthorizedClient(r) 498 + if err != nil { 499 + log.Printf("failed to authorize client: %s", err) 500 + s.Pages.Notice(w, "settings-keys", "Failed to authorize client.") 501 + return 502 + } 527 503 528 - client, err := s.OAuth.AuthorizedClient(r) 529 - if err != nil { 530 - log.Printf("failed to authorize client: %s", err) 531 - s.Pages.Notice(w, "settings-keys", "Failed to authorize client.") 532 - return 533 - } 504 + if err := db.DeletePublicKey(s.Db, did, name, key); err != nil { 505 + log.Printf("removing public key: %s", err) 506 + s.Pages.Notice(w, "settings-keys", "Failed to remove public key.") 507 + return 508 + } 534 509 510 + if rkey != "" { 535 511 // remove from pds too 536 - _, err = comatproto.RepoDeleteRecord(r.Context(), client, &comatproto.RepoDeleteRecord_Input{ 512 + _, err := comatproto.RepoDeleteRecord(r.Context(), client, &comatproto.RepoDeleteRecord_Input{ 537 513 Collection: tangled.PublicKeyNSID, 538 514 Repo: did, 539 515 Rkey: rkey,
-15
appview/spindles/spindles.go
··· 39 39 Logger *slog.Logger 40 40 } 41 41 42 - type tab = map[string]any 43 - 44 - var ( 45 - spindlesTabs []tab = []tab{ 46 - {"Name": "profile", "Icon": "user"}, 47 - {"Name": "keys", "Icon": "key"}, 48 - {"Name": "emails", "Icon": "mail"}, 49 - {"Name": "notifications", "Icon": "bell"}, 50 - {"Name": "knots", "Icon": "volleyball"}, 51 - {"Name": "spindles", "Icon": "spool"}, 52 - } 53 - ) 54 - 55 42 func (s *Spindles) Router() http.Handler { 56 43 r := chi.NewRouter() 57 44 ··· 83 70 s.Pages.Spindles(w, pages.SpindlesParams{ 84 71 LoggedInUser: user, 85 72 Spindles: all, 86 - Tabs: spindlesTabs, 87 73 Tab: "spindles", 88 74 }) 89 75 } ··· 143 129 Spindle: spindle, 144 130 Members: members, 145 131 Repos: repoMap, 146 - Tabs: spindlesTabs, 147 132 Tab: "spindles", 148 133 }) 149 134 }
+34 -59
appview/state/follow.go
··· 6 6 "time" 7 7 8 8 comatproto "github.com/bluesky-social/indigo/api/atproto" 9 - "github.com/bluesky-social/indigo/atproto/syntax" 10 9 lexutil "github.com/bluesky-social/indigo/lex/util" 11 10 "tangled.org/core/api/tangled" 12 11 "tangled.org/core/appview/db" ··· 43 42 44 43 switch r.Method { 45 44 case http.MethodPost: 46 - follow := models.Follow{ 47 - UserDid: currentUser.Active.Did, 48 - SubjectDid: subjectIdent.DID.String(), 49 - Rkey: tid.TID(), 50 - FollowedAt: time.Now(), 51 - } 52 - 53 - tx, err := s.db.BeginTx(r.Context(), nil) 54 - if err != nil { 55 - s.logger.Error("failed to start transaction", "err", err) 56 - return 57 - } 58 - defer tx.Rollback() 59 - 60 - if err := db.UpsertFollow(tx, follow); err != nil { 61 - s.logger.Error("failed to follow", "err", err) 62 - return 63 - } 64 - 65 - record := follow.AsRecord() 45 + createdAt := time.Now().Format(time.RFC3339) 46 + rkey := tid.TID() 66 47 resp, err := comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 67 48 Collection: tangled.GraphFollowNSID, 68 49 Repo: currentUser.Active.Did, 69 - Rkey: follow.Rkey, 50 + Rkey: rkey, 70 51 Record: &lexutil.LexiconTypeDecoder{ 71 - Val: &record, 72 - }, 52 + Val: &tangled.GraphFollow{ 53 + Subject: subjectIdent.DID.String(), 54 + CreatedAt: createdAt, 55 + }}, 73 56 }) 74 57 if err != nil { 75 58 log.Println("failed to create atproto record", err) 76 59 return 77 60 } 61 + 78 62 log.Println("created atproto record: ", resp.Uri) 79 63 80 - if err := tx.Commit(); err != nil { 81 - s.logger.Error("failed to commit transaction", "err", err) 82 - // DB op failed but record is created in PDS. Ingester will backfill the missed operation 64 + follow := &models.Follow{ 65 + UserDid: currentUser.Active.Did, 66 + SubjectDid: subjectIdent.DID.String(), 67 + Rkey: rkey, 68 + } 69 + 70 + err = db.AddFollow(s.db, follow) 71 + if err != nil { 72 + log.Println("failed to follow", err) 73 + return 83 74 } 84 75 85 - s.notifier.NewFollow(r.Context(), &follow) 76 + s.notifier.NewFollow(r.Context(), follow) 86 77 87 78 followStats, err := db.GetFollowerFollowingCount(s.db, subjectIdent.DID.String()) 88 79 if err != nil { ··· 97 88 98 89 return 99 90 case http.MethodDelete: 100 - tx, err := s.db.BeginTx(r.Context(), nil) 101 - if err != nil { 102 - s.logger.Error("failed to start transaction", "err", err) 103 - } 104 - defer tx.Rollback() 105 - 106 - follows, err := db.DeleteFollow(tx, syntax.DID(currentUser.Active.Did), subjectIdent.DID) 91 + // find the record in the db 92 + follow, err := db.GetFollow(s.db, currentUser.Active.Did, subjectIdent.DID.String()) 107 93 if err != nil { 108 - s.logger.Error("failed to delete follows from db", "err", err) 94 + log.Println("failed to get follow relationship") 109 95 return 110 96 } 111 97 112 - var writes []*comatproto.RepoApplyWrites_Input_Writes_Elem 113 - for _, followAt := range follows { 114 - writes = append(writes, &comatproto.RepoApplyWrites_Input_Writes_Elem{ 115 - RepoApplyWrites_Delete: &comatproto.RepoApplyWrites_Delete{ 116 - Collection: tangled.GraphFollowNSID, 117 - Rkey: followAt.RecordKey().String(), 118 - }, 119 - }) 120 - } 121 - _, err = comatproto.RepoApplyWrites(r.Context(), client, &comatproto.RepoApplyWrites_Input{ 122 - Repo: currentUser.Active.Did, 123 - Writes: writes, 98 + _, err = comatproto.RepoDeleteRecord(r.Context(), client, &comatproto.RepoDeleteRecord_Input{ 99 + Collection: tangled.GraphFollowNSID, 100 + Repo: currentUser.Active.Did, 101 + Rkey: follow.Rkey, 124 102 }) 103 + 125 104 if err != nil { 126 - s.logger.Error("failed to delete follows from PDS", "err", err) 105 + log.Println("failed to unfollow") 127 106 return 128 107 } 129 108 130 - if err := tx.Commit(); err != nil { 131 - s.logger.Error("failed to commit transaction", "err", err) 132 - // DB op failed but record is created in PDS. Ingester will backfill the missed operation 109 + err = db.DeleteFollowByRkey(s.db, currentUser.Active.Did, follow.Rkey) 110 + if err != nil { 111 + log.Println("failed to delete follow from DB") 112 + // this is not an issue, the firehose event might have already done this 133 113 } 134 - 135 - s.notifier.DeleteFollow(r.Context(), &models.Follow{ 136 - UserDid: currentUser.Active.Did, 137 - SubjectDid: subjectIdent.DID.String(), 138 - // Rkey 139 - // FollowedAt 140 - }) 141 114 142 115 followStats, err := db.GetFollowerFollowingCount(s.db, subjectIdent.DID.String()) 143 116 if err != nil { ··· 149 122 FollowStatus: models.IsNotFollowing, 150 123 FollowersCount: followStats.Followers, 151 124 }) 125 + 126 + s.notifier.DeleteFollow(r.Context(), follow) 152 127 153 128 return 154 129 }
+88 -2
appview/state/knotstream.go
··· 18 18 "tangled.org/core/log" 19 19 "tangled.org/core/orm" 20 20 "tangled.org/core/rbac" 21 + "tangled.org/core/workflow" 21 22 23 + "github.com/bluesky-social/indigo/atproto/syntax" 22 24 "github.com/go-git/go-git/v5/plumbing" 23 25 "github.com/posthog/posthog-go" 24 26 ) ··· 53 55 WorkerCount: c.Knotstream.WorkerCount, 54 56 QueueSize: c.Knotstream.QueueSize, 55 57 Logger: logger, 56 - Dev: false, 58 + Dev: c.Core.Dev, 57 59 CursorStore: &cursorStore, 58 60 } 59 61 ··· 65 67 switch msg.Nsid { 66 68 case tangled.GitRefUpdateNSID: 67 69 return ingestRefUpdate(d, enforcer, posthog, dev, source, msg) 70 + case tangled.PipelineNSID: 71 + return ingestPipeline(d, source, msg) 68 72 } 69 73 70 74 return nil ··· 118 122 if ce == nil { 119 123 continue 120 124 } 121 - if ce.Email == ke.Address { 125 + if ce.Email == ke.Address || ce.Email == record.CommitterDid { 122 126 count += int(ce.Count) 123 127 } 124 128 } ··· 186 190 187 191 return tx.Commit() 188 192 } 193 + 194 + func ingestPipeline(d *db.DB, source ec.Source, msg ec.Message) error { 195 + var record tangled.Pipeline 196 + err := json.Unmarshal(msg.EventJson, &record) 197 + if err != nil { 198 + return err 199 + } 200 + 201 + if record.TriggerMetadata == nil { 202 + return fmt.Errorf("empty trigger metadata: nsid %s, rkey %s", msg.Nsid, msg.Rkey) 203 + } 204 + 205 + if record.TriggerMetadata.Repo == nil { 206 + return fmt.Errorf("empty repo: nsid %s, rkey %s", msg.Nsid, msg.Rkey) 207 + } 208 + 209 + // does this repo have a spindle configured? 210 + repos, err := db.GetRepos( 211 + d, 212 + 0, 213 + orm.FilterEq("did", record.TriggerMetadata.Repo.Did), 214 + orm.FilterEq("name", record.TriggerMetadata.Repo.Repo), 215 + ) 216 + if err != nil { 217 + return fmt.Errorf("failed to look for repo in DB: nsid %s, rkey %s, %w", msg.Nsid, msg.Rkey, err) 218 + } 219 + if len(repos) != 1 { 220 + return fmt.Errorf("incorrect number of repos returned: %d (expected 1)", len(repos)) 221 + } 222 + if repos[0].Spindle == "" { 223 + return fmt.Errorf("repo does not have a spindle configured yet: nsid %s, rkey %s", msg.Nsid, msg.Rkey) 224 + } 225 + 226 + // trigger info 227 + var trigger models.Trigger 228 + var sha string 229 + trigger.Kind = workflow.TriggerKind(record.TriggerMetadata.Kind) 230 + switch trigger.Kind { 231 + case workflow.TriggerKindPush: 232 + trigger.PushRef = &record.TriggerMetadata.Push.Ref 233 + trigger.PushNewSha = &record.TriggerMetadata.Push.NewSha 234 + trigger.PushOldSha = &record.TriggerMetadata.Push.OldSha 235 + sha = *trigger.PushNewSha 236 + case workflow.TriggerKindPullRequest: 237 + trigger.PRSourceBranch = &record.TriggerMetadata.PullRequest.SourceBranch 238 + trigger.PRTargetBranch = &record.TriggerMetadata.PullRequest.TargetBranch 239 + trigger.PRSourceSha = &record.TriggerMetadata.PullRequest.SourceSha 240 + trigger.PRAction = &record.TriggerMetadata.PullRequest.Action 241 + sha = *trigger.PRSourceSha 242 + } 243 + 244 + tx, err := d.Begin() 245 + if err != nil { 246 + return fmt.Errorf("failed to start txn: %w", err) 247 + } 248 + 249 + triggerId, err := db.AddTrigger(tx, trigger) 250 + if err != nil { 251 + return fmt.Errorf("failed to add trigger entry: %w", err) 252 + } 253 + 254 + pipeline := models.Pipeline{ 255 + Rkey: msg.Rkey, 256 + Knot: source.Key(), 257 + RepoOwner: syntax.DID(record.TriggerMetadata.Repo.Did), 258 + RepoName: record.TriggerMetadata.Repo.Repo, 259 + TriggerId: int(triggerId), 260 + Sha: sha, 261 + } 262 + 263 + err = db.AddPipeline(tx, pipeline) 264 + if err != nil { 265 + return fmt.Errorf("failed to add pipeline: %w", err) 266 + } 267 + 268 + err = tx.Commit() 269 + if err != nil { 270 + return fmt.Errorf("failed to commit txn: %w", err) 271 + } 272 + 273 + return nil 274 + }
+202 -23
appview/state/profile.go
··· 20 20 "tangled.org/core/appview/models" 21 21 "tangled.org/core/appview/pages" 22 22 "tangled.org/core/orm" 23 + "tangled.org/core/xrpc" 23 24 ) 24 25 25 26 func (s *State) Profile(w http.ResponseWriter, r *http.Request) { ··· 415 416 416 417 feed := feeds.Feed{ 417 418 Title: fmt.Sprintf("%s's timeline", author.Name), 418 - Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s", s.config.Core.Url(), id.Handle), Type: "text/html", Rel: "alternate"}, 419 + Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s", s.config.Core.BaseUrl(), id.Handle), Type: "text/html", Rel: "alternate"}, 419 420 Items: make([]*feeds.Item, 0), 420 421 Updated: time.UnixMilli(0), 421 422 Author: author, ··· 483 484 func (s *State) createPullRequestItem(pull *models.Pull, owner *identity.Identity, author *feeds.Author) *feeds.Item { 484 485 return &feeds.Item{ 485 486 Title: fmt.Sprintf("%s created pull request '%s' in @%s/%s", author.Name, pull.Title, owner.Handle, pull.Repo.Name), 486 - Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s/%s/pulls/%d", s.config.Core.Url(), owner.Handle, pull.Repo.Name, pull.PullId), Type: "text/html", Rel: "alternate"}, 487 + Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s/%s/pulls/%d", s.config.Core.BaseUrl(), owner.Handle, pull.Repo.Name, pull.PullId), Type: "text/html", Rel: "alternate"}, 487 488 Created: pull.Created, 488 489 Author: author, 489 490 } ··· 492 493 func (s *State) createIssueItem(issue *models.Issue, owner *identity.Identity, author *feeds.Author) *feeds.Item { 493 494 return &feeds.Item{ 494 495 Title: fmt.Sprintf("%s created issue '%s' in @%s/%s", author.Name, issue.Title, owner.Handle, issue.Repo.Name), 495 - Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s/%s/issues/%d", s.config.Core.Url(), owner.Handle, issue.Repo.Name, issue.IssueId), Type: "text/html", Rel: "alternate"}, 496 + Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s/%s/issues/%d", s.config.Core.BaseUrl(), owner.Handle, issue.Repo.Name, issue.IssueId), Type: "text/html", Rel: "alternate"}, 496 497 Created: issue.Created, 497 498 Author: author, 498 499 } ··· 512 513 513 514 return &feeds.Item{ 514 515 Title: title, 515 - Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s/%s", s.config.Core.Url(), author.Name[1:], repo.Repo.Name), Type: "text/html", Rel: "alternate"}, // Remove @ prefix 516 + Link: &feeds.Link{Href: fmt.Sprintf("%s/@%s/%s", s.config.Core.BaseUrl(), author.Name[1:], repo.Repo.Name), Type: "text/html", Rel: "alternate"}, // Remove @ prefix 516 517 Created: repo.Repo.Created, 517 518 Author: author, 518 519 }, nil ··· 549 550 stat0 := r.FormValue("stat0") 550 551 stat1 := r.FormValue("stat1") 551 552 552 - if stat0 != "" { 553 - profile.Stats[0].Kind = models.VanityStatKind(stat0) 554 - } 555 - 556 - if stat1 != "" { 557 - profile.Stats[1].Kind = models.VanityStatKind(stat1) 558 - } 553 + profile.Stats[0].Kind = models.ParseVanityStatKind(stat0) 554 + profile.Stats[1].Kind = models.ParseVanityStatKind(stat1) 559 555 560 556 if err := db.ValidateProfile(s.db, profile); err != nil { 561 557 log.Println("invalid profile", err) ··· 613 609 s.pages.Notice(w, "update-profile", "Failed to update profile, try again later.") 614 610 return 615 611 } 616 - defer tx.Rollback() 617 - 618 - err = db.UpsertProfile(tx, profile) 619 - if err != nil { 620 - log.Println("failed to update profile", err) 621 - s.pages.Notice(w, "update-profile", "Failed to update profile, try again later.") 622 - return 623 - } 624 612 625 613 client, err := s.oauth.AuthorizedClient(r) 626 614 if err != nil { ··· 669 657 return 670 658 } 671 659 672 - if err := tx.Commit(); err != nil { 673 - s.logger.Error("failed to commit transaction", "err", err) 674 - // db failed, but PDS operation succeed. 675 - // log error and continue 660 + err = db.UpsertProfile(tx, profile) 661 + if err != nil { 662 + log.Println("failed to update profile", err) 663 + s.pages.Notice(w, "update-profile", "Failed to update profile, try again later.") 664 + return 676 665 } 677 666 678 667 s.notifier.UpdateProfile(r.Context(), profile) ··· 735 724 AllRepos: allRepos, 736 725 }) 737 726 } 727 + 728 + func (s *State) UploadProfileAvatar(w http.ResponseWriter, r *http.Request) { 729 + l := s.logger.With("handler", "UploadProfileAvatar") 730 + user := s.oauth.GetUser(r) 731 + l = l.With("did", user.Did) 732 + 733 + // Parse multipart form (10MB max) 734 + if err := r.ParseMultipartForm(10 << 20); err != nil { 735 + l.Error("failed to parse form", "err", err) 736 + s.pages.Notice(w, "avatar-error", "Failed to parse form") 737 + return 738 + } 739 + 740 + file, header, err := r.FormFile("avatar") 741 + if err != nil { 742 + l.Error("failed to read avatar file", "err", err) 743 + s.pages.Notice(w, "avatar-error", "Failed to read avatar file") 744 + return 745 + } 746 + defer file.Close() 747 + 748 + if header.Size > 1000000 { 749 + l.Warn("avatar file too large", "size", header.Size) 750 + s.pages.Notice(w, "avatar-error", "Avatar file too large (max 1MB)") 751 + return 752 + } 753 + 754 + contentType := header.Header.Get("Content-Type") 755 + if contentType != "image/png" && contentType != "image/jpeg" { 756 + l.Warn("invalid image type", "contentType", contentType) 757 + s.pages.Notice(w, "avatar-error", "Invalid image type (only PNG and JPEG allowed)") 758 + return 759 + } 760 + 761 + client, err := s.oauth.AuthorizedClient(r) 762 + if err != nil { 763 + l.Error("failed to get PDS client", "err", err) 764 + s.pages.Notice(w, "avatar-error", "Failed to connect to your PDS") 765 + return 766 + } 767 + 768 + uploadBlobResp, err := xrpc.RepoUploadBlob(r.Context(), client, file, header.Header.Get("Content-Type")) 769 + if err != nil { 770 + l.Error("failed to upload avatar blob", "err", err) 771 + s.pages.Notice(w, "avatar-error", "Failed to upload avatar to your PDS") 772 + return 773 + } 774 + 775 + l.Info("uploaded avatar blob", "cid", uploadBlobResp.Blob.Ref.String()) 776 + 777 + // get current profile record from PDS to get its CID for swap 778 + getRecordResp, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.ActorProfileNSID, user.Did, "self") 779 + if err != nil { 780 + l.Error("failed to get current profile record", "err", err) 781 + s.pages.Notice(w, "avatar-error", "Failed to get current profile from your PDS") 782 + return 783 + } 784 + 785 + var profileRecord *tangled.ActorProfile 786 + if getRecordResp.Value != nil { 787 + if val, ok := getRecordResp.Value.Val.(*tangled.ActorProfile); ok { 788 + profileRecord = val 789 + } else { 790 + l.Warn("profile record type assertion failed, creating new record") 791 + profileRecord = &tangled.ActorProfile{} 792 + } 793 + } else { 794 + l.Warn("no existing profile record, creating new record") 795 + profileRecord = &tangled.ActorProfile{} 796 + } 797 + 798 + profileRecord.Avatar = uploadBlobResp.Blob 799 + 800 + _, err = comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 801 + Collection: tangled.ActorProfileNSID, 802 + Repo: user.Did, 803 + Rkey: "self", 804 + Record: &lexutil.LexiconTypeDecoder{Val: profileRecord}, 805 + SwapRecord: getRecordResp.Cid, 806 + }) 807 + 808 + if err != nil { 809 + l.Error("failed to update profile record", "err", err) 810 + s.pages.Notice(w, "avatar-error", "Failed to update profile on your PDS") 811 + return 812 + } 813 + 814 + l.Info("successfully updated profile with avatar") 815 + 816 + profile, err := db.GetProfile(s.db, user.Did) 817 + if err != nil { 818 + l.Warn("getting profile data from DB", "err", err) 819 + profile = &models.Profile{Did: user.Did} 820 + } 821 + profile.Avatar = uploadBlobResp.Blob.Ref.String() 822 + 823 + tx, err := s.db.BeginTx(r.Context(), nil) 824 + if err != nil { 825 + l.Error("failed to start transaction", "err", err) 826 + s.pages.HxRefresh(w) 827 + w.WriteHeader(http.StatusOK) 828 + return 829 + } 830 + 831 + err = db.UpsertProfile(tx, profile) 832 + if err != nil { 833 + l.Error("failed to update profile in DB", "err", err) 834 + s.pages.HxRefresh(w) 835 + w.WriteHeader(http.StatusOK) 836 + return 837 + } 838 + 839 + s.pages.HxRedirect(w, r.Header.Get("Referer")) 840 + } 841 + 842 + func (s *State) RemoveProfileAvatar(w http.ResponseWriter, r *http.Request) { 843 + l := s.logger.With("handler", "RemoveProfileAvatar") 844 + user := s.oauth.GetUser(r) 845 + l = l.With("did", user.Did) 846 + 847 + client, err := s.oauth.AuthorizedClient(r) 848 + if err != nil { 849 + l.Error("failed to get PDS client", "err", err) 850 + s.pages.Notice(w, "avatar-error", "Failed to connect to your PDS") 851 + return 852 + } 853 + 854 + getRecordResp, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.ActorProfileNSID, user.Did, "self") 855 + if err != nil { 856 + l.Error("failed to get current profile record", "err", err) 857 + s.pages.Notice(w, "avatar-error", "Failed to get current profile from your PDS") 858 + return 859 + } 860 + 861 + var profileRecord *tangled.ActorProfile 862 + if getRecordResp.Value != nil { 863 + if val, ok := getRecordResp.Value.Val.(*tangled.ActorProfile); ok { 864 + profileRecord = val 865 + } else { 866 + l.Warn("profile record type assertion failed") 867 + profileRecord = &tangled.ActorProfile{} 868 + } 869 + } else { 870 + l.Warn("no existing profile record") 871 + profileRecord = &tangled.ActorProfile{} 872 + } 873 + 874 + profileRecord.Avatar = nil 875 + 876 + _, err = comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 877 + Collection: tangled.ActorProfileNSID, 878 + Repo: user.Did, 879 + Rkey: "self", 880 + Record: &lexutil.LexiconTypeDecoder{Val: profileRecord}, 881 + SwapRecord: getRecordResp.Cid, 882 + }) 883 + 884 + if err != nil { 885 + l.Error("failed to update profile record", "err", err) 886 + s.pages.Notice(w, "avatar-error", "Failed to remove avatar from your PDS") 887 + return 888 + } 889 + 890 + l.Info("successfully removed avatar from PDS") 891 + 892 + profile, err := db.GetProfile(s.db, user.Did) 893 + if err != nil { 894 + l.Warn("getting profile data from DB", "err", err) 895 + profile = &models.Profile{Did: user.Did} 896 + } 897 + profile.Avatar = "" 898 + 899 + tx, err := s.db.BeginTx(r.Context(), nil) 900 + if err != nil { 901 + l.Error("failed to start transaction", "err", err) 902 + s.pages.HxRefresh(w) 903 + w.WriteHeader(http.StatusOK) 904 + return 905 + } 906 + 907 + err = db.UpsertProfile(tx, profile) 908 + if err != nil { 909 + l.Error("failed to update profile in DB", "err", err) 910 + s.pages.HxRefresh(w) 911 + w.WriteHeader(http.StatusOK) 912 + return 913 + } 914 + 915 + s.pages.HxRedirect(w, r.Header.Get("Referer")) 916 + }
+26 -51
appview/state/reaction.go
··· 45 45 46 46 switch r.Method { 47 47 case http.MethodPost: 48 - reaction := models.Reaction{ 49 - ReactedByDid: currentUser.Active.Did, 50 - Rkey: tid.TID(), 51 - Kind: reactionKind, 52 - ThreadAt: subjectUri, 53 - Created: time.Now(), 54 - } 55 - 56 - tx, err := s.db.BeginTx(r.Context(), nil) 57 - if err != nil { 58 - s.logger.Error("failed to start transaction", "err", err) 59 - return 60 - } 61 - defer tx.Rollback() 62 - 63 - if err := db.UpsertReaction(tx, reaction); err != nil { 64 - log.Println("failed to react", err) 65 - return 66 - } 67 - 68 - record := reaction.AsRecord() 48 + createdAt := time.Now().Format(time.RFC3339) 49 + rkey := tid.TID() 69 50 resp, err := comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 70 51 Collection: tangled.FeedReactionNSID, 71 52 Repo: currentUser.Active.Did, 72 - Rkey: reaction.Rkey, 53 + Rkey: rkey, 73 54 Record: &lexutil.LexiconTypeDecoder{ 74 - Val: &record, 55 + Val: &tangled.FeedReaction{ 56 + Subject: subjectUri.String(), 57 + Reaction: reactionKind.String(), 58 + CreatedAt: createdAt, 59 + }, 75 60 }, 76 61 }) 77 62 if err != nil { 78 63 log.Println("failed to create atproto record", err) 79 64 return 80 65 } 81 - log.Println("created atproto record: ", resp.Uri) 82 66 83 - if err := tx.Commit(); err != nil { 84 - s.logger.Error("failed to commit transaction", "err", err) 85 - // DB op failed but record is created in PDS. Ingester will backfill the missed operation 67 + err = db.AddReaction(s.db, currentUser.Active.Did, subjectUri, reactionKind, rkey) 68 + if err != nil { 69 + log.Println("failed to react", err) 70 + return 86 71 } 87 72 88 73 reactionMap, err := db.GetReactionMap(s.db, 20, subjectUri) ··· 90 75 log.Println("failed to get reactions for ", subjectUri) 91 76 } 92 77 78 + log.Println("created atproto record: ", resp.Uri) 79 + 93 80 s.pages.ThreadReactionFragment(w, pages.ThreadReactionFragmentParams{ 94 81 ThreadAt: subjectUri, 95 82 Kind: reactionKind, ··· 100 87 101 88 return 102 89 case http.MethodDelete: 103 - tx, err := s.db.BeginTx(r.Context(), nil) 90 + reaction, err := db.GetReaction(s.db, currentUser.Active.Did, subjectUri, reactionKind) 104 91 if err != nil { 105 - s.logger.Error("failed to start transaction", "err", err) 106 - } 107 - defer tx.Rollback() 108 - 109 - reactions, err := db.DeleteReaction(tx, syntax.DID(currentUser.Active.Did), subjectUri, reactionKind) 110 - if err != nil { 111 - s.logger.Error("failed to delete reactions from db", "err", err) 92 + log.Println("failed to get reaction relationship for", currentUser.Active.Did, subjectUri) 112 93 return 113 94 } 114 95 115 - var writes []*comatproto.RepoApplyWrites_Input_Writes_Elem 116 - for _, reactionAt := range reactions { 117 - writes = append(writes, &comatproto.RepoApplyWrites_Input_Writes_Elem{ 118 - RepoApplyWrites_Delete: &comatproto.RepoApplyWrites_Delete{ 119 - Collection: tangled.FeedReactionNSID, 120 - Rkey: reactionAt.RecordKey().String(), 121 - }, 122 - }) 123 - } 124 - _, err = comatproto.RepoApplyWrites(r.Context(), client, &comatproto.RepoApplyWrites_Input{ 125 - Repo: currentUser.Active.Did, 126 - Writes: writes, 96 + _, err = comatproto.RepoDeleteRecord(r.Context(), client, &comatproto.RepoDeleteRecord_Input{ 97 + Collection: tangled.FeedReactionNSID, 98 + Repo: currentUser.Active.Did, 99 + Rkey: reaction.Rkey, 127 100 }) 101 + 128 102 if err != nil { 129 - s.logger.Error("failed to delete reactions from PDS", "err", err) 103 + log.Println("failed to remove reaction") 130 104 return 131 105 } 132 106 133 - if err := tx.Commit(); err != nil { 134 - s.logger.Error("failed to commit transaction", "err", err) 135 - // DB op failed but record is created in PDS. Ingester will backfill the missed operation 107 + err = db.DeleteReactionByRkey(s.db, currentUser.Active.Did, reaction.Rkey) 108 + if err != nil { 109 + log.Println("failed to delete reaction from DB") 110 + // this is not an issue, the firehose event might have already done this 136 111 } 137 112 138 113 reactionMap, err := db.GetReactionMap(s.db, 20, subjectUri)
+6
appview/state/router.go
··· 165 165 r.Get("/edit-pins", s.EditPinsFragment) 166 166 r.Post("/bio", s.UpdateProfileBio) 167 167 r.Post("/pins", s.UpdateProfilePins) 168 + r.Post("/avatar", s.UploadProfileAvatar) 169 + r.Delete("/avatar", s.RemoveProfileAvatar) 168 170 }) 169 171 170 172 r.Mount("/settings", s.SettingsRouter()) ··· 274 276 s.db, 275 277 s.config, 276 278 s.notifier, 279 + s.validator, 277 280 s.indexer.Issues, 278 281 log.SubLogger(s.logger, "issues"), 279 282 ) ··· 291 294 s.config, 292 295 s.notifier, 293 296 s.enforcer, 297 + s.validator, 294 298 s.indexer.Pulls, 295 299 log.SubLogger(s.logger, "pulls"), 296 300 ) ··· 309 313 s.notifier, 310 314 s.enforcer, 311 315 log.SubLogger(s.logger, "repo"), 316 + s.validator, 312 317 ) 313 318 return repo.Router(mw) 314 319 } ··· 333 338 s.oauth, 334 339 s.pages, 335 340 s.db, 341 + s.validator, 336 342 s.enforcer, 337 343 log.SubLogger(s.logger, "labels"), 338 344 )
+1 -90
appview/state/spindlestream.go
··· 20 20 "tangled.org/core/orm" 21 21 "tangled.org/core/rbac" 22 22 spindle "tangled.org/core/spindle/models" 23 - "tangled.org/core/workflow" 24 23 ) 25 24 26 25 func Spindlestream(ctx context.Context, c *config.Config, d *db.DB, enforcer *rbac.Enforcer) (*ec.Consumer, error) { ··· 53 52 WorkerCount: c.Spindlestream.WorkerCount, 54 53 QueueSize: c.Spindlestream.QueueSize, 55 54 Logger: logger, 56 - Dev: false, 55 + Dev: c.Core.Dev, 57 56 CursorStore: &cursorStore, 58 57 } 59 58 ··· 63 62 func spindleIngester(ctx context.Context, logger *slog.Logger, d *db.DB) ec.ProcessFunc { 64 63 return func(ctx context.Context, source ec.Source, msg ec.Message) error { 65 64 switch msg.Nsid { 66 - case tangled.PipelineNSID: 67 - return ingestPipeline(logger, d, source, msg) 68 65 case tangled.PipelineStatusNSID: 69 66 return ingestPipelineStatus(ctx, logger, d, source, msg) 70 67 } 71 68 72 69 return nil 73 70 } 74 - } 75 - 76 - func ingestPipeline(l *slog.Logger, d *db.DB, source ec.Source, msg ec.Message) error { 77 - var record tangled.Pipeline 78 - err := json.Unmarshal(msg.EventJson, &record) 79 - if err != nil { 80 - return err 81 - } 82 - 83 - if record.TriggerMetadata == nil { 84 - return fmt.Errorf("empty trigger metadata: nsid %s, rkey %s", msg.Nsid, msg.Rkey) 85 - } 86 - 87 - if record.TriggerMetadata.Repo == nil { 88 - return fmt.Errorf("empty repo: nsid %s, rkey %s", msg.Nsid, msg.Rkey) 89 - } 90 - 91 - // does this repo have a spindle configured? 92 - repos, err := db.GetRepos( 93 - d, 94 - 0, 95 - orm.FilterEq("did", record.TriggerMetadata.Repo.Did), 96 - orm.FilterEq("name", record.TriggerMetadata.Repo.Repo), 97 - ) 98 - if err != nil { 99 - return fmt.Errorf("failed to look for repo in DB: nsid %s, rkey %s, %w", msg.Nsid, msg.Rkey, err) 100 - } 101 - if len(repos) != 1 { 102 - return fmt.Errorf("incorrect number of repos returned: %d (expected 1)", len(repos)) 103 - } 104 - if repos[0].Spindle == "" { 105 - return fmt.Errorf("repo does not have a spindle configured yet: nsid %s, rkey %s", msg.Nsid, msg.Rkey) 106 - } 107 - 108 - // trigger info 109 - var trigger models.Trigger 110 - var sha string 111 - trigger.Kind = workflow.TriggerKind(record.TriggerMetadata.Kind) 112 - switch trigger.Kind { 113 - case workflow.TriggerKindPush: 114 - trigger.PushRef = &record.TriggerMetadata.Push.Ref 115 - trigger.PushNewSha = &record.TriggerMetadata.Push.NewSha 116 - trigger.PushOldSha = &record.TriggerMetadata.Push.OldSha 117 - sha = *trigger.PushNewSha 118 - case workflow.TriggerKindPullRequest: 119 - trigger.PRSourceBranch = &record.TriggerMetadata.PullRequest.SourceBranch 120 - trigger.PRTargetBranch = &record.TriggerMetadata.PullRequest.TargetBranch 121 - trigger.PRSourceSha = &record.TriggerMetadata.PullRequest.SourceSha 122 - trigger.PRAction = &record.TriggerMetadata.PullRequest.Action 123 - sha = *trigger.PRSourceSha 124 - } 125 - 126 - tx, err := d.Begin() 127 - if err != nil { 128 - return fmt.Errorf("failed to start txn: %w", err) 129 - } 130 - 131 - triggerId, err := db.AddTrigger(tx, trigger) 132 - if err != nil { 133 - return fmt.Errorf("failed to add trigger entry: %w", err) 134 - } 135 - 136 - // TODO: we shouldn't even use knot to identify pipelines 137 - knot := record.TriggerMetadata.Repo.Knot 138 - pipeline := models.Pipeline{ 139 - Rkey: msg.Rkey, 140 - Knot: knot, 141 - RepoOwner: syntax.DID(record.TriggerMetadata.Repo.Did), 142 - RepoName: record.TriggerMetadata.Repo.Repo, 143 - TriggerId: int(triggerId), 144 - Sha: sha, 145 - } 146 - 147 - err = db.AddPipeline(tx, pipeline) 148 - if err != nil { 149 - return fmt.Errorf("failed to add pipeline: %w", err) 150 - } 151 - 152 - err = tx.Commit() 153 - if err != nil { 154 - return fmt.Errorf("failed to commit txn: %w", err) 155 - } 156 - 157 - l.Info("added pipeline", "pipeline", pipeline) 158 - 159 - return nil 160 71 } 161 72 162 73 func ingestPipelineStatus(ctx context.Context, logger *slog.Logger, d *db.DB, source ec.Source, msg ec.Message) error {
+33 -58
appview/state/star.go
··· 38 38 39 39 switch r.Method { 40 40 case http.MethodPost: 41 - star := models.Star{ 42 - Did: currentUser.Active.Did, 43 - Rkey: tid.TID(), 44 - RepoAt: subjectUri, 45 - Created: time.Now(), 46 - } 47 - 48 - tx, err := s.db.BeginTx(r.Context(), nil) 49 - if err != nil { 50 - s.logger.Error("failed to start transaction", "err", err) 51 - return 52 - } 53 - defer tx.Rollback() 54 - 55 - if err := db.UpsertStar(tx, star); err != nil { 56 - s.logger.Error("failed to star", "err", err) 57 - return 58 - } 59 - 60 - record := star.AsRecord() 41 + createdAt := time.Now().Format(time.RFC3339) 42 + rkey := tid.TID() 61 43 resp, err := comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{ 62 44 Collection: tangled.FeedStarNSID, 63 45 Repo: currentUser.Active.Did, 64 - Rkey: star.Rkey, 46 + Rkey: rkey, 65 47 Record: &lexutil.LexiconTypeDecoder{ 66 - Val: &record, 67 - }, 48 + Val: &tangled.FeedStar{ 49 + Subject: subjectUri.String(), 50 + CreatedAt: createdAt, 51 + }}, 68 52 }) 69 53 if err != nil { 70 54 log.Println("failed to create atproto record", err) ··· 72 56 } 73 57 log.Println("created atproto record: ", resp.Uri) 74 58 75 - if err := tx.Commit(); err != nil { 76 - s.logger.Error("failed to commit transaction", "err", err) 77 - // DB op failed but record is created in PDS. Ingester will backfill the missed operation 59 + star := &models.Star{ 60 + Did: currentUser.Active.Did, 61 + RepoAt: subjectUri, 62 + Rkey: rkey, 78 63 } 79 64 80 - s.notifier.NewStar(r.Context(), &star) 65 + err = db.AddStar(s.db, star) 66 + if err != nil { 67 + log.Println("failed to star", err) 68 + return 69 + } 81 70 82 71 starCount, err := db.GetStarCount(s.db, subjectUri) 83 72 if err != nil { 84 73 log.Println("failed to get star count for ", subjectUri) 85 74 } 75 + 76 + s.notifier.NewStar(r.Context(), star) 86 77 87 78 s.pages.StarBtnFragment(w, pages.StarBtnFragmentParams{ 88 79 IsStarred: true, ··· 92 83 93 84 return 94 85 case http.MethodDelete: 95 - tx, err := s.db.BeginTx(r.Context(), nil) 96 - if err != nil { 97 - s.logger.Error("failed to start transaction", "err", err) 98 - } 99 - defer tx.Rollback() 100 - 101 - stars, err := db.DeleteStar(tx, syntax.DID(currentUser.Active.Did), subjectUri) 86 + // find the record in the db 87 + star, err := db.GetStar(s.db, currentUser.Active.Did, subjectUri) 102 88 if err != nil { 103 - s.logger.Error("failed to delete stars from db", "err", err) 89 + log.Println("failed to get star relationship") 104 90 return 105 91 } 106 92 107 - var writes []*comatproto.RepoApplyWrites_Input_Writes_Elem 108 - for _, starAt := range stars { 109 - writes = append(writes, &comatproto.RepoApplyWrites_Input_Writes_Elem{ 110 - RepoApplyWrites_Delete: &comatproto.RepoApplyWrites_Delete{ 111 - Collection: tangled.FeedStarNSID, 112 - Rkey: starAt.RecordKey().String(), 113 - }, 114 - }) 115 - } 116 - _, err = comatproto.RepoApplyWrites(r.Context(), client, &comatproto.RepoApplyWrites_Input{ 117 - Repo: currentUser.Active.Did, 118 - Writes: writes, 93 + _, err = comatproto.RepoDeleteRecord(r.Context(), client, &comatproto.RepoDeleteRecord_Input{ 94 + Collection: tangled.FeedStarNSID, 95 + Repo: currentUser.Active.Did, 96 + Rkey: star.Rkey, 119 97 }) 98 + 120 99 if err != nil { 121 - s.logger.Error("failed to delete stars from PDS", "err", err) 100 + log.Println("failed to unstar") 122 101 return 123 102 } 124 103 125 - if err := tx.Commit(); err != nil { 126 - s.logger.Error("failed to commit transaction", "err", err) 127 - // DB op failed but record is created in PDS. Ingester will backfill the missed operation 104 + err = db.DeleteStarByRkey(s.db, currentUser.Active.Did, star.Rkey) 105 + if err != nil { 106 + log.Println("failed to delete star from DB") 107 + // this is not an issue, the firehose event might have already done this 128 108 } 129 109 130 - s.notifier.DeleteStar(r.Context(), &models.Star{ 131 - Did: currentUser.Active.Did, 132 - RepoAt: subjectUri, 133 - // Rkey 134 - // Created 135 - }) 136 - 137 110 starCount, err := db.GetStarCount(s.db, subjectUri) 138 111 if err != nil { 139 112 log.Println("failed to get star count for ", subjectUri) 140 113 return 141 114 } 115 + 116 + s.notifier.DeleteStar(r.Context(), star) 142 117 143 118 s.pages.StarBtnFragment(w, pages.StarBtnFragmentParams{ 144 119 IsStarred: false,
+8 -2
appview/state/state.go
··· 23 23 "tangled.org/core/appview/oauth" 24 24 "tangled.org/core/appview/pages" 25 25 "tangled.org/core/appview/reporesolver" 26 + "tangled.org/core/appview/validator" 26 27 xrpcclient "tangled.org/core/appview/xrpcclient" 27 28 "tangled.org/core/eventconsumer" 28 29 "tangled.org/core/idresolver" ··· 58 59 knotstream *eventconsumer.Consumer 59 60 spindlestream *eventconsumer.Consumer 60 61 logger *slog.Logger 62 + validator *validator.Validator 61 63 } 62 64 63 65 func Make(ctx context.Context, config *config.Config) (*State, error) { ··· 90 92 return nil, fmt.Errorf("failed to create posthog client: %w", err) 91 93 } 92 94 93 - pages := pages.NewPages(config, res, log.SubLogger(logger, "pages")) 95 + pages := pages.NewPages(config, res, d, log.SubLogger(logger, "pages")) 94 96 oauth, err := oauth.New(config, posthog, d, enforcer, res, log.SubLogger(logger, "oauth")) 95 97 if err != nil { 96 98 return nil, fmt.Errorf("failed to start oauth handler: %w", err) 97 99 } 100 + validator := validator.New(d, res, enforcer) 98 101 99 102 repoResolver := reporesolver.New(config, enforcer, d) 100 103 ··· 141 144 IdResolver: res, 142 145 Config: config, 143 146 Logger: log.SubLogger(logger, "ingester"), 147 + Validator: validator, 144 148 } 145 149 err = jc.StartJetstream(ctx, ingester.Ingest()) 146 150 if err != nil { ··· 169 173 notifiers = append(notifiers, phnotify.NewPosthogNotifier(posthog)) 170 174 } 171 175 notifiers = append(notifiers, indexer) 172 - notifier := notify.NewMergedNotifier(notifiers, tlog.SubLogger(logger, "notify")) 176 + notifier := notify.NewMergedNotifier(notifiers) 177 + notifier = notify.NewLoggingNotifier(notifier, tlog.SubLogger(logger, "notify")) 173 178 174 179 state := &State{ 175 180 d, ··· 187 192 knotstream, 188 193 spindlestream, 189 194 logger, 195 + validator, 190 196 } 191 197 192 198 return state, nil
+55
appview/validator/issue.go
··· 1 + package validator 2 + 3 + import ( 4 + "fmt" 5 + "strings" 6 + 7 + "tangled.org/core/appview/db" 8 + "tangled.org/core/appview/models" 9 + "tangled.org/core/orm" 10 + ) 11 + 12 + func (v *Validator) ValidateIssueComment(comment *models.IssueComment) error { 13 + // if comments have parents, only ingest ones that are 1 level deep 14 + if comment.ReplyTo != nil { 15 + parents, err := db.GetIssueComments(v.db, orm.FilterEq("at_uri", *comment.ReplyTo)) 16 + if err != nil { 17 + return fmt.Errorf("failed to fetch parent comment: %w", err) 18 + } 19 + if len(parents) != 1 { 20 + return fmt.Errorf("incorrect number of parent comments returned: %d", len(parents)) 21 + } 22 + 23 + // depth check 24 + parent := parents[0] 25 + if parent.ReplyTo != nil { 26 + return fmt.Errorf("incorrect depth, this comment is replying at depth >1") 27 + } 28 + } 29 + 30 + if sb := strings.TrimSpace(v.sanitizer.SanitizeDefault(comment.Body)); sb == "" { 31 + return fmt.Errorf("body is empty after HTML sanitization") 32 + } 33 + 34 + return nil 35 + } 36 + 37 + func (v *Validator) ValidateIssue(issue *models.Issue) error { 38 + if issue.Title == "" { 39 + return fmt.Errorf("issue title is empty") 40 + } 41 + 42 + if issue.Body == "" { 43 + return fmt.Errorf("issue body is empty") 44 + } 45 + 46 + if st := strings.TrimSpace(v.sanitizer.SanitizeDescription(issue.Title)); st == "" { 47 + return fmt.Errorf("title is empty after HTML sanitization") 48 + } 49 + 50 + if sb := strings.TrimSpace(v.sanitizer.SanitizeDefault(issue.Body)); sb == "" { 51 + return fmt.Errorf("body is empty after HTML sanitization") 52 + } 53 + 54 + return nil 55 + }
+217
appview/validator/label.go
··· 1 + package validator 2 + 3 + import ( 4 + "context" 5 + "fmt" 6 + "regexp" 7 + "slices" 8 + "strings" 9 + 10 + "github.com/bluesky-social/indigo/atproto/syntax" 11 + "tangled.org/core/api/tangled" 12 + "tangled.org/core/appview/models" 13 + ) 14 + 15 + var ( 16 + // Label name should be alphanumeric with hyphens/underscores, but not start/end with them 17 + labelNameRegex = regexp.MustCompile(`^[a-zA-Z0-9]([a-zA-Z0-9_-]*[a-zA-Z0-9])?$`) 18 + // Color should be a valid hex color 19 + colorRegex = regexp.MustCompile(`^#[a-fA-F0-9]{6}$`) 20 + // You can only label issues and pulls presently 21 + validScopes = []string{tangled.RepoIssueNSID, tangled.RepoPullNSID} 22 + ) 23 + 24 + func (v *Validator) ValidateLabelDefinition(label *models.LabelDefinition) error { 25 + if label.Name == "" { 26 + return fmt.Errorf("label name is empty") 27 + } 28 + if len(label.Name) > 40 { 29 + return fmt.Errorf("label name too long (max 40 graphemes)") 30 + } 31 + if len(label.Name) < 1 { 32 + return fmt.Errorf("label name too short (min 1 grapheme)") 33 + } 34 + if !labelNameRegex.MatchString(label.Name) { 35 + return fmt.Errorf("label name contains invalid characters (use only letters, numbers, hyphens, and underscores)") 36 + } 37 + 38 + if !label.ValueType.IsConcreteType() { 39 + return fmt.Errorf("invalid value type: %q (must be one of: null, boolean, integer, string)", label.ValueType.Type) 40 + } 41 + 42 + // null type checks: cannot be enums, multiple or explicit format 43 + if label.ValueType.IsNull() && label.ValueType.IsEnum() { 44 + return fmt.Errorf("null type cannot be used in conjunction with enum type") 45 + } 46 + if label.ValueType.IsNull() && label.Multiple { 47 + return fmt.Errorf("null type labels cannot be multiple") 48 + } 49 + if label.ValueType.IsNull() && !label.ValueType.IsAnyFormat() { 50 + return fmt.Errorf("format cannot be used in conjunction with null type") 51 + } 52 + 53 + // format checks: cannot be used with enum, or integers 54 + if !label.ValueType.IsAnyFormat() && label.ValueType.IsEnum() { 55 + return fmt.Errorf("enum types cannot be used in conjunction with format specification") 56 + } 57 + 58 + if !label.ValueType.IsAnyFormat() && !label.ValueType.IsString() { 59 + return fmt.Errorf("format specifications are only permitted on string types") 60 + } 61 + 62 + // validate scope (nsid format) 63 + if label.Scope == nil { 64 + return fmt.Errorf("scope is required") 65 + } 66 + for _, s := range label.Scope { 67 + if _, err := syntax.ParseNSID(s); err != nil { 68 + return fmt.Errorf("failed to parse scope: %w", err) 69 + } 70 + if !slices.Contains(validScopes, s) { 71 + return fmt.Errorf("invalid scope: scope must be present in %q", validScopes) 72 + } 73 + } 74 + 75 + // validate color if provided 76 + if label.Color != nil { 77 + color := strings.TrimSpace(*label.Color) 78 + if color == "" { 79 + // empty color is fine, set to nil 80 + label.Color = nil 81 + } else { 82 + if !colorRegex.MatchString(color) { 83 + return fmt.Errorf("color must be a valid hex color (e.g. #79FFE1 or #000)") 84 + } 85 + // expand 3-digit hex to 6-digit hex 86 + if len(color) == 4 { // #ABC 87 + color = fmt.Sprintf("#%c%c%c%c%c%c", color[1], color[1], color[2], color[2], color[3], color[3]) 88 + } 89 + // convert to uppercase for consistency 90 + color = strings.ToUpper(color) 91 + label.Color = &color 92 + } 93 + } 94 + 95 + return nil 96 + } 97 + 98 + func (v *Validator) ValidateLabelOp(labelDef *models.LabelDefinition, repo *models.Repo, labelOp *models.LabelOp) error { 99 + if labelDef == nil { 100 + return fmt.Errorf("label definition is required") 101 + } 102 + if repo == nil { 103 + return fmt.Errorf("repo is required") 104 + } 105 + if labelOp == nil { 106 + return fmt.Errorf("label operation is required") 107 + } 108 + 109 + // validate permissions: only collaborators can apply labels currently 110 + // 111 + // TODO: introduce a repo:triage permission 112 + ok, err := v.enforcer.IsPushAllowed(labelOp.Did, repo.Knot, repo.DidSlashRepo()) 113 + if err != nil { 114 + return fmt.Errorf("failed to enforce permissions: %w", err) 115 + } 116 + if !ok { 117 + return fmt.Errorf("unauhtorized label operation") 118 + } 119 + 120 + expectedKey := labelDef.AtUri().String() 121 + if labelOp.OperandKey != expectedKey { 122 + return fmt.Errorf("operand key %q does not match label definition URI %q", labelOp.OperandKey, expectedKey) 123 + } 124 + 125 + if labelOp.Operation != models.LabelOperationAdd && labelOp.Operation != models.LabelOperationDel { 126 + return fmt.Errorf("invalid operation: %q (must be 'add' or 'del')", labelOp.Operation) 127 + } 128 + 129 + if labelOp.Subject == "" { 130 + return fmt.Errorf("subject URI is required") 131 + } 132 + if _, err := syntax.ParseATURI(string(labelOp.Subject)); err != nil { 133 + return fmt.Errorf("invalid subject URI: %w", err) 134 + } 135 + 136 + if err := v.validateOperandValue(labelDef, labelOp); err != nil { 137 + return fmt.Errorf("invalid operand value: %w", err) 138 + } 139 + 140 + // Validate performed time is not zero/invalid 141 + if labelOp.PerformedAt.IsZero() { 142 + return fmt.Errorf("performed_at timestamp is required") 143 + } 144 + 145 + return nil 146 + } 147 + 148 + func (v *Validator) validateOperandValue(labelDef *models.LabelDefinition, labelOp *models.LabelOp) error { 149 + valueType := labelDef.ValueType 150 + 151 + // this is permitted, it "unsets" a label 152 + if labelOp.OperandValue == "" { 153 + labelOp.Operation = models.LabelOperationDel 154 + return nil 155 + } 156 + 157 + switch valueType.Type { 158 + case models.ConcreteTypeNull: 159 + // For null type, value should be empty 160 + if labelOp.OperandValue != "null" { 161 + return fmt.Errorf("null type requires empty value, got %q", labelOp.OperandValue) 162 + } 163 + 164 + case models.ConcreteTypeString: 165 + // For string type, validate enum constraints if present 166 + if valueType.IsEnum() { 167 + if !slices.Contains(valueType.Enum, labelOp.OperandValue) { 168 + return fmt.Errorf("value %q is not in allowed enum values %v", labelOp.OperandValue, valueType.Enum) 169 + } 170 + } 171 + 172 + switch valueType.Format { 173 + case models.ValueTypeFormatDid: 174 + id, err := v.resolver.ResolveIdent(context.Background(), labelOp.OperandValue) 175 + if err != nil { 176 + return fmt.Errorf("failed to resolve did/handle: %w", err) 177 + } 178 + 179 + labelOp.OperandValue = id.DID.String() 180 + 181 + case models.ValueTypeFormatAny, "": 182 + default: 183 + return fmt.Errorf("unsupported format constraint: %q", valueType.Format) 184 + } 185 + 186 + case models.ConcreteTypeInt: 187 + if labelOp.OperandValue == "" { 188 + return fmt.Errorf("integer type requires non-empty value") 189 + } 190 + if _, err := fmt.Sscanf(labelOp.OperandValue, "%d", new(int)); err != nil { 191 + return fmt.Errorf("value %q is not a valid integer", labelOp.OperandValue) 192 + } 193 + 194 + if valueType.IsEnum() { 195 + if !slices.Contains(valueType.Enum, labelOp.OperandValue) { 196 + return fmt.Errorf("value %q is not in allowed enum values %v", labelOp.OperandValue, valueType.Enum) 197 + } 198 + } 199 + 200 + case models.ConcreteTypeBool: 201 + if labelOp.OperandValue != "true" && labelOp.OperandValue != "false" { 202 + return fmt.Errorf("boolean type requires value to be 'true' or 'false', got %q", labelOp.OperandValue) 203 + } 204 + 205 + // validate enum constraints if present (though uncommon for booleans) 206 + if valueType.IsEnum() { 207 + if !slices.Contains(valueType.Enum, labelOp.OperandValue) { 208 + return fmt.Errorf("value %q is not in allowed enum values %v", labelOp.OperandValue, valueType.Enum) 209 + } 210 + } 211 + 212 + default: 213 + return fmt.Errorf("unsupported value type: %q", valueType.Type) 214 + } 215 + 216 + return nil 217 + }
+25
appview/validator/patch.go
··· 1 + package validator 2 + 3 + import ( 4 + "fmt" 5 + "strings" 6 + 7 + "tangled.org/core/patchutil" 8 + ) 9 + 10 + func (v *Validator) ValidatePatch(patch *string) error { 11 + if patch == nil || *patch == "" { 12 + return fmt.Errorf("patch is empty") 13 + } 14 + 15 + // add newline if not present to diff style patches 16 + if !patchutil.IsFormatPatch(*patch) && !strings.HasSuffix(*patch, "\n") { 17 + *patch = *patch + "\n" 18 + } 19 + 20 + if err := patchutil.IsPatchValid(*patch); err != nil { 21 + return err 22 + } 23 + 24 + return nil 25 + }
+53
appview/validator/repo_topics.go
··· 1 + package validator 2 + 3 + import ( 4 + "fmt" 5 + "maps" 6 + "regexp" 7 + "slices" 8 + "strings" 9 + ) 10 + 11 + const ( 12 + maxTopicLen = 50 13 + maxTopics = 20 14 + ) 15 + 16 + var ( 17 + topicRE = regexp.MustCompile(`\A[a-z0-9-]+\z`) 18 + ) 19 + 20 + // ValidateRepoTopicStr parses and validates whitespace-separated topic string. 21 + // 22 + // Rules: 23 + // - topics are separated by whitespace 24 + // - each topic may contain lowercase letters, digits, and hyphens only 25 + // - each topic must be <= 50 characters long 26 + // - no more than 20 topics allowed 27 + // - duplicates are removed 28 + func (v *Validator) ValidateRepoTopicStr(topicsStr string) ([]string, error) { 29 + topicsStr = strings.TrimSpace(topicsStr) 30 + if topicsStr == "" { 31 + return nil, nil 32 + } 33 + parts := strings.Fields(topicsStr) 34 + if len(parts) > maxTopics { 35 + return nil, fmt.Errorf("too many topics: %d (maximum %d)", len(parts), maxTopics) 36 + } 37 + 38 + topicSet := make(map[string]struct{}) 39 + 40 + for _, t := range parts { 41 + if _, exists := topicSet[t]; exists { 42 + continue 43 + } 44 + if len(t) > maxTopicLen { 45 + return nil, fmt.Errorf("topic '%s' is too long (maximum %d characters)", t, maxTopics) 46 + } 47 + if !topicRE.MatchString(t) { 48 + return nil, fmt.Errorf("topic '%s' contains invalid characters (allowed: lowercase letters, digits, hyphens)", t) 49 + } 50 + topicSet[t] = struct{}{} 51 + } 52 + return slices.Collect(maps.Keys(topicSet)), nil 53 + }
+27
appview/validator/string.go
··· 1 + package validator 2 + 3 + import ( 4 + "errors" 5 + "fmt" 6 + "unicode/utf8" 7 + 8 + "tangled.org/core/appview/models" 9 + ) 10 + 11 + func (v *Validator) ValidateString(s *models.String) error { 12 + var err error 13 + 14 + if utf8.RuneCountInString(s.Filename) > 140 { 15 + err = errors.Join(err, fmt.Errorf("filename too long")) 16 + } 17 + 18 + if utf8.RuneCountInString(s.Description) > 280 { 19 + err = errors.Join(err, fmt.Errorf("description too long")) 20 + } 21 + 22 + if len(s.Contents) == 0 { 23 + err = errors.Join(err, fmt.Errorf("contents is empty")) 24 + } 25 + 26 + return err 27 + }
+17
appview/validator/uri.go
··· 1 + package validator 2 + 3 + import ( 4 + "fmt" 5 + "net/url" 6 + ) 7 + 8 + func (v *Validator) ValidateURI(uri string) error { 9 + parsed, err := url.Parse(uri) 10 + if err != nil { 11 + return fmt.Errorf("invalid uri format") 12 + } 13 + if parsed.Scheme == "" { 14 + return fmt.Errorf("uri scheme missing") 15 + } 16 + return nil 17 + }
+24
appview/validator/validator.go
··· 1 + package validator 2 + 3 + import ( 4 + "tangled.org/core/appview/db" 5 + "tangled.org/core/appview/pages/markup" 6 + "tangled.org/core/idresolver" 7 + "tangled.org/core/rbac" 8 + ) 9 + 10 + type Validator struct { 11 + db *db.DB 12 + sanitizer markup.Sanitizer 13 + resolver *idresolver.Resolver 14 + enforcer *rbac.Enforcer 15 + } 16 + 17 + func New(db *db.DB, res *idresolver.Resolver, enforcer *rbac.Enforcer) *Validator { 18 + return &Validator{ 19 + db: db, 20 + sanitizer: markup.NewSanitizer(), 21 + resolver: res, 22 + enforcer: enforcer, 23 + } 24 + }
+3113 -3022
avatar/package-lock.json
··· 1 1 { 2 - "name": "avatar", 3 - "version": "0.0.0", 4 - "lockfileVersion": 3, 5 - "requires": true, 6 - "packages": { 7 - "": { 8 - "name": "avatar", 9 - "version": "0.0.0", 10 - "devDependencies": { 11 - "@cloudflare/vitest-pool-workers": "^0.8.19", 12 - "vitest": "~3.0.7", 13 - "wrangler": "^4.14.1" 14 - } 15 - }, 16 - "node_modules/@cloudflare/kv-asset-handler": { 17 - "version": "0.4.0", 18 - "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.0.tgz", 19 - "integrity": "sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA==", 20 - "dev": true, 21 - "license": "MIT OR Apache-2.0", 22 - "dependencies": { 23 - "mime": "^3.0.0" 24 - }, 25 - "engines": { 26 - "node": ">=18.0.0" 27 - } 28 - }, 29 - "node_modules/@cloudflare/unenv-preset": { 30 - "version": "2.3.1", 31 - "resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.3.1.tgz", 32 - "integrity": "sha512-Xq57Qd+ADpt6hibcVBO0uLG9zzRgyRhfCUgBT9s+g3+3Ivg5zDyVgLFy40ES1VdNcu8rPNSivm9A+kGP5IVaPg==", 33 - "dev": true, 34 - "license": "MIT OR Apache-2.0", 35 - "peerDependencies": { 36 - "unenv": "2.0.0-rc.15", 37 - "workerd": "^1.20250320.0" 38 - }, 39 - "peerDependenciesMeta": { 40 - "workerd": { 41 - "optional": true 42 - } 43 - } 44 - }, 45 - "node_modules/@cloudflare/vitest-pool-workers": { 46 - "version": "0.8.24", 47 - "resolved": "https://registry.npmjs.org/@cloudflare/vitest-pool-workers/-/vitest-pool-workers-0.8.24.tgz", 48 - "integrity": "sha512-wT2PABJQ9YLYWrVu4CRZOjvmjHkdbMyLTZPU9n/7JEMM3pgG8dY41F1Rj31UsXRQaXX39A/CTPGlk58dcMUysA==", 49 - "dev": true, 50 - "license": "MIT", 51 - "dependencies": { 52 - "birpc": "0.2.14", 53 - "cjs-module-lexer": "^1.2.3", 54 - "devalue": "^4.3.0", 55 - "miniflare": "4.20250428.1", 56 - "semver": "^7.7.1", 57 - "wrangler": "4.14.1", 58 - "zod": "^3.22.3" 59 - }, 60 - "peerDependencies": { 61 - "@vitest/runner": "2.0.x - 3.1.x", 62 - "@vitest/snapshot": "2.0.x - 3.1.x", 63 - "vitest": "2.0.x - 3.1.x" 64 - } 65 - }, 66 - "node_modules/@cloudflare/workerd-darwin-64": { 67 - "version": "1.20250428.0", 68 - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20250428.0.tgz", 69 - "integrity": "sha512-6nVe9oV4Hdec6ctzMtW80TiDvNTd2oFPi3VsKqSDVaJSJbL+4b6seyJ7G/UEPI+si6JhHBSLV2/9lNXNGLjClA==", 70 - "cpu": [ 71 - "x64" 72 - ], 73 - "dev": true, 74 - "license": "Apache-2.0", 75 - "optional": true, 76 - "os": [ 77 - "darwin" 78 - ], 79 - "engines": { 80 - "node": ">=16" 81 - } 82 - }, 83 - "node_modules/@cloudflare/workerd-darwin-arm64": { 84 - "version": "1.20250428.0", 85 - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20250428.0.tgz", 86 - "integrity": "sha512-/TB7bh7SIJ5f+6r4PHsAz7+9Qal/TK1cJuKFkUno1kqGlZbdrMwH0ATYwlWC/nBFeu2FB3NUolsTntEuy23hnQ==", 87 - "cpu": [ 88 - "arm64" 89 - ], 90 - "dev": true, 91 - "license": "Apache-2.0", 92 - "optional": true, 93 - "os": [ 94 - "darwin" 95 - ], 96 - "engines": { 97 - "node": ">=16" 98 - } 99 - }, 100 - "node_modules/@cloudflare/workerd-linux-64": { 101 - "version": "1.20250428.0", 102 - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20250428.0.tgz", 103 - "integrity": "sha512-9eCbj+R3CKqpiXP6DfAA20DxKge+OTj7Hyw3ZewiEhWH9INIHiJwJQYybu4iq9kJEGjnGvxgguLFjSCWm26hgg==", 104 - "cpu": [ 105 - "x64" 106 - ], 107 - "dev": true, 108 - "license": "Apache-2.0", 109 - "optional": true, 110 - "os": [ 111 - "linux" 112 - ], 113 - "engines": { 114 - "node": ">=16" 115 - } 116 - }, 117 - "node_modules/@cloudflare/workerd-linux-arm64": { 118 - "version": "1.20250428.0", 119 - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20250428.0.tgz", 120 - "integrity": "sha512-D9NRBnW46nl1EQsP13qfkYb5lbt4C6nxl38SBKY/NOcZAUoHzNB5K0GaK8LxvpkM7X/97ySojlMfR5jh5DNXYQ==", 121 - "cpu": [ 122 - "arm64" 123 - ], 124 - "dev": true, 125 - "license": "Apache-2.0", 126 - "optional": true, 127 - "os": [ 128 - "linux" 129 - ], 130 - "engines": { 131 - "node": ">=16" 132 - } 133 - }, 134 - "node_modules/@cloudflare/workerd-windows-64": { 135 - "version": "1.20250428.0", 136 - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20250428.0.tgz", 137 - "integrity": "sha512-RQCRj28eitjKD0tmei6iFOuWqMuHMHdNGEigRmbkmuTlpbWHNAoHikgCzZQ/dkKDdatA76TmcpbyECNf31oaTA==", 138 - "cpu": [ 139 - "x64" 140 - ], 141 - "dev": true, 142 - "license": "Apache-2.0", 143 - "optional": true, 144 - "os": [ 145 - "win32" 146 - ], 147 - "engines": { 148 - "node": ">=16" 149 - } 150 - }, 151 - "node_modules/@cspotcode/source-map-support": { 152 - "version": "0.8.1", 153 - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", 154 - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", 155 - "dev": true, 156 - "license": "MIT", 157 - "dependencies": { 158 - "@jridgewell/trace-mapping": "0.3.9" 159 - }, 160 - "engines": { 161 - "node": ">=12" 162 - } 163 - }, 164 - "node_modules/@emnapi/runtime": { 165 - "version": "1.4.3", 166 - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.3.tgz", 167 - "integrity": "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ==", 168 - "dev": true, 169 - "license": "MIT", 170 - "optional": true, 171 - "dependencies": { 172 - "tslib": "^2.4.0" 173 - } 174 - }, 175 - "node_modules/@esbuild/aix-ppc64": { 176 - "version": "0.25.3", 177 - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.3.tgz", 178 - "integrity": "sha512-W8bFfPA8DowP8l//sxjJLSLkD8iEjMc7cBVyP+u4cEv9sM7mdUCkgsj+t0n/BWPFtv7WWCN5Yzj0N6FJNUUqBQ==", 179 - "cpu": [ 180 - "ppc64" 181 - ], 182 - "dev": true, 183 - "license": "MIT", 184 - "optional": true, 185 - "os": [ 186 - "aix" 187 - ], 188 - "engines": { 189 - "node": ">=18" 190 - } 191 - }, 192 - "node_modules/@esbuild/android-arm": { 193 - "version": "0.25.3", 194 - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.3.tgz", 195 - "integrity": "sha512-PuwVXbnP87Tcff5I9ngV0lmiSu40xw1At6i3GsU77U7cjDDB4s0X2cyFuBiDa1SBk9DnvWwnGvVaGBqoFWPb7A==", 196 - "cpu": [ 197 - "arm" 198 - ], 199 - "dev": true, 200 - "license": "MIT", 201 - "optional": true, 202 - "os": [ 203 - "android" 204 - ], 205 - "engines": { 206 - "node": ">=18" 207 - } 208 - }, 209 - "node_modules/@esbuild/android-arm64": { 210 - "version": "0.25.3", 211 - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.3.tgz", 212 - "integrity": "sha512-XelR6MzjlZuBM4f5z2IQHK6LkK34Cvv6Rj2EntER3lwCBFdg6h2lKbtRjpTTsdEjD/WSe1q8UyPBXP1x3i/wYQ==", 213 - "cpu": [ 214 - "arm64" 215 - ], 216 - "dev": true, 217 - "license": "MIT", 218 - "optional": true, 219 - "os": [ 220 - "android" 221 - ], 222 - "engines": { 223 - "node": ">=18" 224 - } 225 - }, 226 - "node_modules/@esbuild/android-x64": { 227 - "version": "0.25.3", 228 - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.3.tgz", 229 - "integrity": "sha512-ogtTpYHT/g1GWS/zKM0cc/tIebFjm1F9Aw1boQ2Y0eUQ+J89d0jFY//s9ei9jVIlkYi8AfOjiixcLJSGNSOAdQ==", 230 - "cpu": [ 231 - "x64" 232 - ], 233 - "dev": true, 234 - "license": "MIT", 235 - "optional": true, 236 - "os": [ 237 - "android" 238 - ], 239 - "engines": { 240 - "node": ">=18" 241 - } 242 - }, 243 - "node_modules/@esbuild/darwin-arm64": { 244 - "version": "0.25.3", 245 - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.3.tgz", 246 - "integrity": "sha512-eESK5yfPNTqpAmDfFWNsOhmIOaQA59tAcF/EfYvo5/QWQCzXn5iUSOnqt3ra3UdzBv073ykTtmeLJZGt3HhA+w==", 247 - "cpu": [ 248 - "arm64" 249 - ], 250 - "dev": true, 251 - "license": "MIT", 252 - "optional": true, 253 - "os": [ 254 - "darwin" 255 - ], 256 - "engines": { 257 - "node": ">=18" 258 - } 259 - }, 260 - "node_modules/@esbuild/darwin-x64": { 261 - "version": "0.25.3", 262 - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.3.tgz", 263 - "integrity": "sha512-Kd8glo7sIZtwOLcPbW0yLpKmBNWMANZhrC1r6K++uDR2zyzb6AeOYtI6udbtabmQpFaxJ8uduXMAo1gs5ozz8A==", 264 - "cpu": [ 265 - "x64" 266 - ], 267 - "dev": true, 268 - "license": "MIT", 269 - "optional": true, 270 - "os": [ 271 - "darwin" 272 - ], 273 - "engines": { 274 - "node": ">=18" 275 - } 276 - }, 277 - "node_modules/@esbuild/freebsd-arm64": { 278 - "version": "0.25.3", 279 - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.3.tgz", 280 - "integrity": "sha512-EJiyS70BYybOBpJth3M0KLOus0n+RRMKTYzhYhFeMwp7e/RaajXvP+BWlmEXNk6uk+KAu46j/kaQzr6au+JcIw==", 281 - "cpu": [ 282 - "arm64" 283 - ], 284 - "dev": true, 285 - "license": "MIT", 286 - "optional": true, 287 - "os": [ 288 - "freebsd" 289 - ], 290 - "engines": { 291 - "node": ">=18" 292 - } 293 - }, 294 - "node_modules/@esbuild/freebsd-x64": { 295 - "version": "0.25.3", 296 - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.3.tgz", 297 - "integrity": "sha512-Q+wSjaLpGxYf7zC0kL0nDlhsfuFkoN+EXrx2KSB33RhinWzejOd6AvgmP5JbkgXKmjhmpfgKZq24pneodYqE8Q==", 298 - "cpu": [ 299 - "x64" 300 - ], 301 - "dev": true, 302 - "license": "MIT", 303 - "optional": true, 304 - "os": [ 305 - "freebsd" 306 - ], 307 - "engines": { 308 - "node": ">=18" 309 - } 310 - }, 311 - "node_modules/@esbuild/linux-arm": { 312 - "version": "0.25.3", 313 - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.3.tgz", 314 - "integrity": "sha512-dUOVmAUzuHy2ZOKIHIKHCm58HKzFqd+puLaS424h6I85GlSDRZIA5ycBixb3mFgM0Jdh+ZOSB6KptX30DD8YOQ==", 315 - "cpu": [ 316 - "arm" 317 - ], 318 - "dev": true, 319 - "license": "MIT", 320 - "optional": true, 321 - "os": [ 322 - "linux" 323 - ], 324 - "engines": { 325 - "node": ">=18" 326 - } 327 - }, 328 - "node_modules/@esbuild/linux-arm64": { 329 - "version": "0.25.3", 330 - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.3.tgz", 331 - "integrity": "sha512-xCUgnNYhRD5bb1C1nqrDV1PfkwgbswTTBRbAd8aH5PhYzikdf/ddtsYyMXFfGSsb/6t6QaPSzxtbfAZr9uox4A==", 332 - "cpu": [ 333 - "arm64" 334 - ], 335 - "dev": true, 336 - "license": "MIT", 337 - "optional": true, 338 - "os": [ 339 - "linux" 340 - ], 341 - "engines": { 342 - "node": ">=18" 343 - } 344 - }, 345 - "node_modules/@esbuild/linux-ia32": { 346 - "version": "0.25.3", 347 - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.3.tgz", 348 - "integrity": "sha512-yplPOpczHOO4jTYKmuYuANI3WhvIPSVANGcNUeMlxH4twz/TeXuzEP41tGKNGWJjuMhotpGabeFYGAOU2ummBw==", 349 - "cpu": [ 350 - "ia32" 351 - ], 352 - "dev": true, 353 - "license": "MIT", 354 - "optional": true, 355 - "os": [ 356 - "linux" 357 - ], 358 - "engines": { 359 - "node": ">=18" 360 - } 361 - }, 362 - "node_modules/@esbuild/linux-loong64": { 363 - "version": "0.25.3", 364 - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.3.tgz", 365 - "integrity": "sha512-P4BLP5/fjyihmXCELRGrLd793q/lBtKMQl8ARGpDxgzgIKJDRJ/u4r1A/HgpBpKpKZelGct2PGI4T+axcedf6g==", 366 - "cpu": [ 367 - "loong64" 368 - ], 369 - "dev": true, 370 - "license": "MIT", 371 - "optional": true, 372 - "os": [ 373 - "linux" 374 - ], 375 - "engines": { 376 - "node": ">=18" 377 - } 378 - }, 379 - "node_modules/@esbuild/linux-mips64el": { 380 - "version": "0.25.3", 381 - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.3.tgz", 382 - "integrity": "sha512-eRAOV2ODpu6P5divMEMa26RRqb2yUoYsuQQOuFUexUoQndm4MdpXXDBbUoKIc0iPa4aCO7gIhtnYomkn2x+bag==", 383 - "cpu": [ 384 - "mips64el" 385 - ], 386 - "dev": true, 387 - "license": "MIT", 388 - "optional": true, 389 - "os": [ 390 - "linux" 391 - ], 392 - "engines": { 393 - "node": ">=18" 394 - } 395 - }, 396 - "node_modules/@esbuild/linux-ppc64": { 397 - "version": "0.25.3", 398 - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.3.tgz", 399 - "integrity": "sha512-ZC4jV2p7VbzTlnl8nZKLcBkfzIf4Yad1SJM4ZMKYnJqZFD4rTI+pBG65u8ev4jk3/MPwY9DvGn50wi3uhdaghg==", 400 - "cpu": [ 401 - "ppc64" 402 - ], 403 - "dev": true, 404 - "license": "MIT", 405 - "optional": true, 406 - "os": [ 407 - "linux" 408 - ], 409 - "engines": { 410 - "node": ">=18" 411 - } 412 - }, 413 - "node_modules/@esbuild/linux-riscv64": { 414 - "version": "0.25.3", 415 - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.3.tgz", 416 - "integrity": "sha512-LDDODcFzNtECTrUUbVCs6j9/bDVqy7DDRsuIXJg6so+mFksgwG7ZVnTruYi5V+z3eE5y+BJZw7VvUadkbfg7QA==", 417 - "cpu": [ 418 - "riscv64" 419 - ], 420 - "dev": true, 421 - "license": "MIT", 422 - "optional": true, 423 - "os": [ 424 - "linux" 425 - ], 426 - "engines": { 427 - "node": ">=18" 428 - } 429 - }, 430 - "node_modules/@esbuild/linux-s390x": { 431 - "version": "0.25.3", 432 - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.3.tgz", 433 - "integrity": "sha512-s+w/NOY2k0yC2p9SLen+ymflgcpRkvwwa02fqmAwhBRI3SC12uiS10edHHXlVWwfAagYSY5UpmT/zISXPMW3tQ==", 434 - "cpu": [ 435 - "s390x" 436 - ], 437 - "dev": true, 438 - "license": "MIT", 439 - "optional": true, 440 - "os": [ 441 - "linux" 442 - ], 443 - "engines": { 444 - "node": ">=18" 445 - } 446 - }, 447 - "node_modules/@esbuild/linux-x64": { 448 - "version": "0.25.3", 449 - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.3.tgz", 450 - "integrity": "sha512-nQHDz4pXjSDC6UfOE1Fw9Q8d6GCAd9KdvMZpfVGWSJztYCarRgSDfOVBY5xwhQXseiyxapkiSJi/5/ja8mRFFA==", 451 - "cpu": [ 452 - "x64" 453 - ], 454 - "dev": true, 455 - "license": "MIT", 456 - "optional": true, 457 - "os": [ 458 - "linux" 459 - ], 460 - "engines": { 461 - "node": ">=18" 462 - } 463 - }, 464 - "node_modules/@esbuild/netbsd-arm64": { 465 - "version": "0.25.3", 466 - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.3.tgz", 467 - "integrity": "sha512-1QaLtOWq0mzK6tzzp0jRN3eccmN3hezey7mhLnzC6oNlJoUJz4nym5ZD7mDnS/LZQgkrhEbEiTn515lPeLpgWA==", 468 - "cpu": [ 469 - "arm64" 470 - ], 471 - "dev": true, 472 - "license": "MIT", 473 - "optional": true, 474 - "os": [ 475 - "netbsd" 476 - ], 477 - "engines": { 478 - "node": ">=18" 479 - } 480 - }, 481 - "node_modules/@esbuild/netbsd-x64": { 482 - "version": "0.25.3", 483 - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.3.tgz", 484 - "integrity": "sha512-i5Hm68HXHdgv8wkrt+10Bc50zM0/eonPb/a/OFVfB6Qvpiirco5gBA5bz7S2SHuU+Y4LWn/zehzNX14Sp4r27g==", 485 - "cpu": [ 486 - "x64" 487 - ], 488 - "dev": true, 489 - "license": "MIT", 490 - "optional": true, 491 - "os": [ 492 - "netbsd" 493 - ], 494 - "engines": { 495 - "node": ">=18" 496 - } 497 - }, 498 - "node_modules/@esbuild/openbsd-arm64": { 499 - "version": "0.25.3", 500 - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.3.tgz", 501 - "integrity": "sha512-zGAVApJEYTbOC6H/3QBr2mq3upG/LBEXr85/pTtKiv2IXcgKV0RT0QA/hSXZqSvLEpXeIxah7LczB4lkiYhTAQ==", 502 - "cpu": [ 503 - "arm64" 504 - ], 505 - "dev": true, 506 - "license": "MIT", 507 - "optional": true, 508 - "os": [ 509 - "openbsd" 510 - ], 511 - "engines": { 512 - "node": ">=18" 513 - } 514 - }, 515 - "node_modules/@esbuild/openbsd-x64": { 516 - "version": "0.25.3", 517 - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.3.tgz", 518 - "integrity": "sha512-fpqctI45NnCIDKBH5AXQBsD0NDPbEFczK98hk/aa6HJxbl+UtLkJV2+Bvy5hLSLk3LHmqt0NTkKNso1A9y1a4w==", 519 - "cpu": [ 520 - "x64" 521 - ], 522 - "dev": true, 523 - "license": "MIT", 524 - "optional": true, 525 - "os": [ 526 - "openbsd" 527 - ], 528 - "engines": { 529 - "node": ">=18" 530 - } 531 - }, 532 - "node_modules/@esbuild/sunos-x64": { 533 - "version": "0.25.3", 534 - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.3.tgz", 535 - "integrity": "sha512-ROJhm7d8bk9dMCUZjkS8fgzsPAZEjtRJqCAmVgB0gMrvG7hfmPmz9k1rwO4jSiblFjYmNvbECL9uhaPzONMfgA==", 536 - "cpu": [ 537 - "x64" 538 - ], 539 - "dev": true, 540 - "license": "MIT", 541 - "optional": true, 542 - "os": [ 543 - "sunos" 544 - ], 545 - "engines": { 546 - "node": ">=18" 547 - } 548 - }, 549 - "node_modules/@esbuild/win32-arm64": { 550 - "version": "0.25.3", 551 - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.3.tgz", 552 - "integrity": "sha512-YWcow8peiHpNBiIXHwaswPnAXLsLVygFwCB3A7Bh5jRkIBFWHGmNQ48AlX4xDvQNoMZlPYzjVOQDYEzWCqufMQ==", 553 - "cpu": [ 554 - "arm64" 555 - ], 556 - "dev": true, 557 - "license": "MIT", 558 - "optional": true, 559 - "os": [ 560 - "win32" 561 - ], 562 - "engines": { 563 - "node": ">=18" 564 - } 565 - }, 566 - "node_modules/@esbuild/win32-ia32": { 567 - "version": "0.25.3", 568 - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.3.tgz", 569 - "integrity": "sha512-qspTZOIGoXVS4DpNqUYUs9UxVb04khS1Degaw/MnfMe7goQ3lTfQ13Vw4qY/Nj0979BGvMRpAYbs/BAxEvU8ew==", 570 - "cpu": [ 571 - "ia32" 572 - ], 573 - "dev": true, 574 - "license": "MIT", 575 - "optional": true, 576 - "os": [ 577 - "win32" 578 - ], 579 - "engines": { 580 - "node": ">=18" 581 - } 582 - }, 583 - "node_modules/@esbuild/win32-x64": { 584 - "version": "0.25.3", 585 - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.3.tgz", 586 - "integrity": "sha512-ICgUR+kPimx0vvRzf+N/7L7tVSQeE3BYY+NhHRHXS1kBuPO7z2+7ea2HbhDyZdTephgvNvKrlDDKUexuCVBVvg==", 587 - "cpu": [ 588 - "x64" 589 - ], 590 - "dev": true, 591 - "license": "MIT", 592 - "optional": true, 593 - "os": [ 594 - "win32" 595 - ], 596 - "engines": { 597 - "node": ">=18" 598 - } 599 - }, 600 - "node_modules/@fastify/busboy": { 601 - "version": "2.1.1", 602 - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", 603 - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", 604 - "dev": true, 605 - "license": "MIT", 606 - "engines": { 607 - "node": ">=14" 608 - } 609 - }, 610 - "node_modules/@img/sharp-darwin-arm64": { 611 - "version": "0.33.5", 612 - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", 613 - "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", 614 - "cpu": [ 615 - "arm64" 616 - ], 617 - "dev": true, 618 - "license": "Apache-2.0", 619 - "optional": true, 620 - "os": [ 621 - "darwin" 622 - ], 623 - "engines": { 624 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 625 - }, 626 - "funding": { 627 - "url": "https://opencollective.com/libvips" 628 - }, 629 - "optionalDependencies": { 630 - "@img/sharp-libvips-darwin-arm64": "1.0.4" 631 - } 632 - }, 633 - "node_modules/@img/sharp-darwin-x64": { 634 - "version": "0.33.5", 635 - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", 636 - "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", 637 - "cpu": [ 638 - "x64" 639 - ], 640 - "dev": true, 641 - "license": "Apache-2.0", 642 - "optional": true, 643 - "os": [ 644 - "darwin" 645 - ], 646 - "engines": { 647 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 648 - }, 649 - "funding": { 650 - "url": "https://opencollective.com/libvips" 651 - }, 652 - "optionalDependencies": { 653 - "@img/sharp-libvips-darwin-x64": "1.0.4" 654 - } 655 - }, 656 - "node_modules/@img/sharp-libvips-darwin-arm64": { 657 - "version": "1.0.4", 658 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", 659 - "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", 660 - "cpu": [ 661 - "arm64" 662 - ], 663 - "dev": true, 664 - "license": "LGPL-3.0-or-later", 665 - "optional": true, 666 - "os": [ 667 - "darwin" 668 - ], 669 - "funding": { 670 - "url": "https://opencollective.com/libvips" 671 - } 672 - }, 673 - "node_modules/@img/sharp-libvips-darwin-x64": { 674 - "version": "1.0.4", 675 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", 676 - "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", 677 - "cpu": [ 678 - "x64" 679 - ], 680 - "dev": true, 681 - "license": "LGPL-3.0-or-later", 682 - "optional": true, 683 - "os": [ 684 - "darwin" 685 - ], 686 - "funding": { 687 - "url": "https://opencollective.com/libvips" 688 - } 689 - }, 690 - "node_modules/@img/sharp-libvips-linux-arm": { 691 - "version": "1.0.5", 692 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", 693 - "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", 694 - "cpu": [ 695 - "arm" 696 - ], 697 - "dev": true, 698 - "license": "LGPL-3.0-or-later", 699 - "optional": true, 700 - "os": [ 701 - "linux" 702 - ], 703 - "funding": { 704 - "url": "https://opencollective.com/libvips" 705 - } 706 - }, 707 - "node_modules/@img/sharp-libvips-linux-arm64": { 708 - "version": "1.0.4", 709 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", 710 - "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", 711 - "cpu": [ 712 - "arm64" 713 - ], 714 - "dev": true, 715 - "license": "LGPL-3.0-or-later", 716 - "optional": true, 717 - "os": [ 718 - "linux" 719 - ], 720 - "funding": { 721 - "url": "https://opencollective.com/libvips" 722 - } 723 - }, 724 - "node_modules/@img/sharp-libvips-linux-s390x": { 725 - "version": "1.0.4", 726 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", 727 - "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", 728 - "cpu": [ 729 - "s390x" 730 - ], 731 - "dev": true, 732 - "license": "LGPL-3.0-or-later", 733 - "optional": true, 734 - "os": [ 735 - "linux" 736 - ], 737 - "funding": { 738 - "url": "https://opencollective.com/libvips" 739 - } 740 - }, 741 - "node_modules/@img/sharp-libvips-linux-x64": { 742 - "version": "1.0.4", 743 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", 744 - "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", 745 - "cpu": [ 746 - "x64" 747 - ], 748 - "dev": true, 749 - "license": "LGPL-3.0-or-later", 750 - "optional": true, 751 - "os": [ 752 - "linux" 753 - ], 754 - "funding": { 755 - "url": "https://opencollective.com/libvips" 756 - } 757 - }, 758 - "node_modules/@img/sharp-libvips-linuxmusl-arm64": { 759 - "version": "1.0.4", 760 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", 761 - "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", 762 - "cpu": [ 763 - "arm64" 764 - ], 765 - "dev": true, 766 - "license": "LGPL-3.0-or-later", 767 - "optional": true, 768 - "os": [ 769 - "linux" 770 - ], 771 - "funding": { 772 - "url": "https://opencollective.com/libvips" 773 - } 774 - }, 775 - "node_modules/@img/sharp-libvips-linuxmusl-x64": { 776 - "version": "1.0.4", 777 - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", 778 - "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", 779 - "cpu": [ 780 - "x64" 781 - ], 782 - "dev": true, 783 - "license": "LGPL-3.0-or-later", 784 - "optional": true, 785 - "os": [ 786 - "linux" 787 - ], 788 - "funding": { 789 - "url": "https://opencollective.com/libvips" 790 - } 791 - }, 792 - "node_modules/@img/sharp-linux-arm": { 793 - "version": "0.33.5", 794 - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", 795 - "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", 796 - "cpu": [ 797 - "arm" 798 - ], 799 - "dev": true, 800 - "license": "Apache-2.0", 801 - "optional": true, 802 - "os": [ 803 - "linux" 804 - ], 805 - "engines": { 806 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 807 - }, 808 - "funding": { 809 - "url": "https://opencollective.com/libvips" 810 - }, 811 - "optionalDependencies": { 812 - "@img/sharp-libvips-linux-arm": "1.0.5" 813 - } 814 - }, 815 - "node_modules/@img/sharp-linux-arm64": { 816 - "version": "0.33.5", 817 - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", 818 - "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", 819 - "cpu": [ 820 - "arm64" 821 - ], 822 - "dev": true, 823 - "license": "Apache-2.0", 824 - "optional": true, 825 - "os": [ 826 - "linux" 827 - ], 828 - "engines": { 829 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 830 - }, 831 - "funding": { 832 - "url": "https://opencollective.com/libvips" 833 - }, 834 - "optionalDependencies": { 835 - "@img/sharp-libvips-linux-arm64": "1.0.4" 836 - } 837 - }, 838 - "node_modules/@img/sharp-linux-s390x": { 839 - "version": "0.33.5", 840 - "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", 841 - "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", 842 - "cpu": [ 843 - "s390x" 844 - ], 845 - "dev": true, 846 - "license": "Apache-2.0", 847 - "optional": true, 848 - "os": [ 849 - "linux" 850 - ], 851 - "engines": { 852 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 853 - }, 854 - "funding": { 855 - "url": "https://opencollective.com/libvips" 856 - }, 857 - "optionalDependencies": { 858 - "@img/sharp-libvips-linux-s390x": "1.0.4" 859 - } 860 - }, 861 - "node_modules/@img/sharp-linux-x64": { 862 - "version": "0.33.5", 863 - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", 864 - "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", 865 - "cpu": [ 866 - "x64" 867 - ], 868 - "dev": true, 869 - "license": "Apache-2.0", 870 - "optional": true, 871 - "os": [ 872 - "linux" 873 - ], 874 - "engines": { 875 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 876 - }, 877 - "funding": { 878 - "url": "https://opencollective.com/libvips" 879 - }, 880 - "optionalDependencies": { 881 - "@img/sharp-libvips-linux-x64": "1.0.4" 882 - } 883 - }, 884 - "node_modules/@img/sharp-linuxmusl-arm64": { 885 - "version": "0.33.5", 886 - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", 887 - "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", 888 - "cpu": [ 889 - "arm64" 890 - ], 891 - "dev": true, 892 - "license": "Apache-2.0", 893 - "optional": true, 894 - "os": [ 895 - "linux" 896 - ], 897 - "engines": { 898 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 899 - }, 900 - "funding": { 901 - "url": "https://opencollective.com/libvips" 902 - }, 903 - "optionalDependencies": { 904 - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" 905 - } 906 - }, 907 - "node_modules/@img/sharp-linuxmusl-x64": { 908 - "version": "0.33.5", 909 - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", 910 - "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", 911 - "cpu": [ 912 - "x64" 913 - ], 914 - "dev": true, 915 - "license": "Apache-2.0", 916 - "optional": true, 917 - "os": [ 918 - "linux" 919 - ], 920 - "engines": { 921 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 922 - }, 923 - "funding": { 924 - "url": "https://opencollective.com/libvips" 925 - }, 926 - "optionalDependencies": { 927 - "@img/sharp-libvips-linuxmusl-x64": "1.0.4" 928 - } 929 - }, 930 - "node_modules/@img/sharp-wasm32": { 931 - "version": "0.33.5", 932 - "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", 933 - "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", 934 - "cpu": [ 935 - "wasm32" 936 - ], 937 - "dev": true, 938 - "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", 939 - "optional": true, 940 - "dependencies": { 941 - "@emnapi/runtime": "^1.2.0" 942 - }, 943 - "engines": { 944 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 945 - }, 946 - "funding": { 947 - "url": "https://opencollective.com/libvips" 948 - } 949 - }, 950 - "node_modules/@img/sharp-win32-ia32": { 951 - "version": "0.33.5", 952 - "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", 953 - "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", 954 - "cpu": [ 955 - "ia32" 956 - ], 957 - "dev": true, 958 - "license": "Apache-2.0 AND LGPL-3.0-or-later", 959 - "optional": true, 960 - "os": [ 961 - "win32" 962 - ], 963 - "engines": { 964 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 965 - }, 966 - "funding": { 967 - "url": "https://opencollective.com/libvips" 968 - } 969 - }, 970 - "node_modules/@img/sharp-win32-x64": { 971 - "version": "0.33.5", 972 - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", 973 - "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", 974 - "cpu": [ 975 - "x64" 976 - ], 977 - "dev": true, 978 - "license": "Apache-2.0 AND LGPL-3.0-or-later", 979 - "optional": true, 980 - "os": [ 981 - "win32" 982 - ], 983 - "engines": { 984 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 985 - }, 986 - "funding": { 987 - "url": "https://opencollective.com/libvips" 988 - } 989 - }, 990 - "node_modules/@jridgewell/resolve-uri": { 991 - "version": "3.1.2", 992 - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", 993 - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", 994 - "dev": true, 995 - "license": "MIT", 996 - "engines": { 997 - "node": ">=6.0.0" 998 - } 999 - }, 1000 - "node_modules/@jridgewell/sourcemap-codec": { 1001 - "version": "1.5.0", 1002 - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", 1003 - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", 1004 - "dev": true, 1005 - "license": "MIT" 1006 - }, 1007 - "node_modules/@jridgewell/trace-mapping": { 1008 - "version": "0.3.9", 1009 - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", 1010 - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", 1011 - "dev": true, 1012 - "license": "MIT", 1013 - "dependencies": { 1014 - "@jridgewell/resolve-uri": "^3.0.3", 1015 - "@jridgewell/sourcemap-codec": "^1.4.10" 1016 - } 1017 - }, 1018 - "node_modules/@rollup/rollup-android-arm-eabi": { 1019 - "version": "4.40.1", 1020 - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.40.1.tgz", 1021 - "integrity": "sha512-kxz0YeeCrRUHz3zyqvd7n+TVRlNyTifBsmnmNPtk3hQURUyG9eAB+usz6DAwagMusjx/zb3AjvDUvhFGDAexGw==", 1022 - "cpu": [ 1023 - "arm" 1024 - ], 1025 - "dev": true, 1026 - "license": "MIT", 1027 - "optional": true, 1028 - "os": [ 1029 - "android" 1030 - ] 1031 - }, 1032 - "node_modules/@rollup/rollup-android-arm64": { 1033 - "version": "4.40.1", 1034 - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.40.1.tgz", 1035 - "integrity": "sha512-PPkxTOisoNC6TpnDKatjKkjRMsdaWIhyuMkA4UsBXT9WEZY4uHezBTjs6Vl4PbqQQeu6oION1w2voYZv9yquCw==", 1036 - "cpu": [ 1037 - "arm64" 1038 - ], 1039 - "dev": true, 1040 - "license": "MIT", 1041 - "optional": true, 1042 - "os": [ 1043 - "android" 1044 - ] 1045 - }, 1046 - "node_modules/@rollup/rollup-darwin-arm64": { 1047 - "version": "4.40.1", 1048 - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.40.1.tgz", 1049 - "integrity": "sha512-VWXGISWFY18v/0JyNUy4A46KCFCb9NVsH+1100XP31lud+TzlezBbz24CYzbnA4x6w4hx+NYCXDfnvDVO6lcAA==", 1050 - "cpu": [ 1051 - "arm64" 1052 - ], 1053 - "dev": true, 1054 - "license": "MIT", 1055 - "optional": true, 1056 - "os": [ 1057 - "darwin" 1058 - ] 1059 - }, 1060 - "node_modules/@rollup/rollup-darwin-x64": { 1061 - "version": "4.40.1", 1062 - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.40.1.tgz", 1063 - "integrity": "sha512-nIwkXafAI1/QCS7pxSpv/ZtFW6TXcNUEHAIA9EIyw5OzxJZQ1YDrX+CL6JAIQgZ33CInl1R6mHet9Y/UZTg2Bw==", 1064 - "cpu": [ 1065 - "x64" 1066 - ], 1067 - "dev": true, 1068 - "license": "MIT", 1069 - "optional": true, 1070 - "os": [ 1071 - "darwin" 1072 - ] 1073 - }, 1074 - "node_modules/@rollup/rollup-freebsd-arm64": { 1075 - "version": "4.40.1", 1076 - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.40.1.tgz", 1077 - "integrity": "sha512-BdrLJ2mHTrIYdaS2I99mriyJfGGenSaP+UwGi1kB9BLOCu9SR8ZpbkmmalKIALnRw24kM7qCN0IOm6L0S44iWw==", 1078 - "cpu": [ 1079 - "arm64" 1080 - ], 1081 - "dev": true, 1082 - "license": "MIT", 1083 - "optional": true, 1084 - "os": [ 1085 - "freebsd" 1086 - ] 1087 - }, 1088 - "node_modules/@rollup/rollup-freebsd-x64": { 1089 - "version": "4.40.1", 1090 - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.40.1.tgz", 1091 - "integrity": "sha512-VXeo/puqvCG8JBPNZXZf5Dqq7BzElNJzHRRw3vjBE27WujdzuOPecDPc/+1DcdcTptNBep3861jNq0mYkT8Z6Q==", 1092 - "cpu": [ 1093 - "x64" 1094 - ], 1095 - "dev": true, 1096 - "license": "MIT", 1097 - "optional": true, 1098 - "os": [ 1099 - "freebsd" 1100 - ] 1101 - }, 1102 - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { 1103 - "version": "4.40.1", 1104 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.40.1.tgz", 1105 - "integrity": "sha512-ehSKrewwsESPt1TgSE/na9nIhWCosfGSFqv7vwEtjyAqZcvbGIg4JAcV7ZEh2tfj/IlfBeZjgOXm35iOOjadcg==", 1106 - "cpu": [ 1107 - "arm" 1108 - ], 1109 - "dev": true, 1110 - "license": "MIT", 1111 - "optional": true, 1112 - "os": [ 1113 - "linux" 1114 - ] 1115 - }, 1116 - "node_modules/@rollup/rollup-linux-arm-musleabihf": { 1117 - "version": "4.40.1", 1118 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.40.1.tgz", 1119 - "integrity": "sha512-m39iO/aaurh5FVIu/F4/Zsl8xppd76S4qoID8E+dSRQvTyZTOI2gVk3T4oqzfq1PtcvOfAVlwLMK3KRQMaR8lg==", 1120 - "cpu": [ 1121 - "arm" 1122 - ], 1123 - "dev": true, 1124 - "license": "MIT", 1125 - "optional": true, 1126 - "os": [ 1127 - "linux" 1128 - ] 1129 - }, 1130 - "node_modules/@rollup/rollup-linux-arm64-gnu": { 1131 - "version": "4.40.1", 1132 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.40.1.tgz", 1133 - "integrity": "sha512-Y+GHnGaku4aVLSgrT0uWe2o2Rq8te9hi+MwqGF9r9ORgXhmHK5Q71N757u0F8yU1OIwUIFy6YiJtKjtyktk5hg==", 1134 - "cpu": [ 1135 - "arm64" 1136 - ], 1137 - "dev": true, 1138 - "license": "MIT", 1139 - "optional": true, 1140 - "os": [ 1141 - "linux" 1142 - ] 1143 - }, 1144 - "node_modules/@rollup/rollup-linux-arm64-musl": { 1145 - "version": "4.40.1", 1146 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.40.1.tgz", 1147 - "integrity": "sha512-jEwjn3jCA+tQGswK3aEWcD09/7M5wGwc6+flhva7dsQNRZZTe30vkalgIzV4tjkopsTS9Jd7Y1Bsj6a4lzz8gQ==", 1148 - "cpu": [ 1149 - "arm64" 1150 - ], 1151 - "dev": true, 1152 - "license": "MIT", 1153 - "optional": true, 1154 - "os": [ 1155 - "linux" 1156 - ] 1157 - }, 1158 - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { 1159 - "version": "4.40.1", 1160 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.40.1.tgz", 1161 - "integrity": "sha512-ySyWikVhNzv+BV/IDCsrraOAZ3UaC8SZB67FZlqVwXwnFhPihOso9rPOxzZbjp81suB1O2Topw+6Ug3JNegejQ==", 1162 - "cpu": [ 1163 - "loong64" 1164 - ], 1165 - "dev": true, 1166 - "license": "MIT", 1167 - "optional": true, 1168 - "os": [ 1169 - "linux" 1170 - ] 1171 - }, 1172 - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { 1173 - "version": "4.40.1", 1174 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.40.1.tgz", 1175 - "integrity": "sha512-BvvA64QxZlh7WZWqDPPdt0GH4bznuL6uOO1pmgPnnv86rpUpc8ZxgZwcEgXvo02GRIZX1hQ0j0pAnhwkhwPqWg==", 1176 - "cpu": [ 1177 - "ppc64" 1178 - ], 1179 - "dev": true, 1180 - "license": "MIT", 1181 - "optional": true, 1182 - "os": [ 1183 - "linux" 1184 - ] 1185 - }, 1186 - "node_modules/@rollup/rollup-linux-riscv64-gnu": { 1187 - "version": "4.40.1", 1188 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.40.1.tgz", 1189 - "integrity": "sha512-EQSP+8+1VuSulm9RKSMKitTav89fKbHymTf25n5+Yr6gAPZxYWpj3DzAsQqoaHAk9YX2lwEyAf9S4W8F4l3VBQ==", 1190 - "cpu": [ 1191 - "riscv64" 1192 - ], 1193 - "dev": true, 1194 - "license": "MIT", 1195 - "optional": true, 1196 - "os": [ 1197 - "linux" 1198 - ] 1199 - }, 1200 - "node_modules/@rollup/rollup-linux-riscv64-musl": { 1201 - "version": "4.40.1", 1202 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.40.1.tgz", 1203 - "integrity": "sha512-n/vQ4xRZXKuIpqukkMXZt9RWdl+2zgGNx7Uda8NtmLJ06NL8jiHxUawbwC+hdSq1rrw/9CghCpEONor+l1e2gA==", 1204 - "cpu": [ 1205 - "riscv64" 1206 - ], 1207 - "dev": true, 1208 - "license": "MIT", 1209 - "optional": true, 1210 - "os": [ 1211 - "linux" 1212 - ] 1213 - }, 1214 - "node_modules/@rollup/rollup-linux-s390x-gnu": { 1215 - "version": "4.40.1", 1216 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.40.1.tgz", 1217 - "integrity": "sha512-h8d28xzYb98fMQKUz0w2fMc1XuGzLLjdyxVIbhbil4ELfk5/orZlSTpF/xdI9C8K0I8lCkq+1En2RJsawZekkg==", 1218 - "cpu": [ 1219 - "s390x" 1220 - ], 1221 - "dev": true, 1222 - "license": "MIT", 1223 - "optional": true, 1224 - "os": [ 1225 - "linux" 1226 - ] 1227 - }, 1228 - "node_modules/@rollup/rollup-linux-x64-gnu": { 1229 - "version": "4.40.1", 1230 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.40.1.tgz", 1231 - "integrity": "sha512-XiK5z70PEFEFqcNj3/zRSz/qX4bp4QIraTy9QjwJAb/Z8GM7kVUsD0Uk8maIPeTyPCP03ChdI+VVmJriKYbRHQ==", 1232 - "cpu": [ 1233 - "x64" 1234 - ], 1235 - "dev": true, 1236 - "license": "MIT", 1237 - "optional": true, 1238 - "os": [ 1239 - "linux" 1240 - ] 1241 - }, 1242 - "node_modules/@rollup/rollup-linux-x64-musl": { 1243 - "version": "4.40.1", 1244 - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.40.1.tgz", 1245 - "integrity": "sha512-2BRORitq5rQ4Da9blVovzNCMaUlyKrzMSvkVR0D4qPuOy/+pMCrh1d7o01RATwVy+6Fa1WBw+da7QPeLWU/1mQ==", 1246 - "cpu": [ 1247 - "x64" 1248 - ], 1249 - "dev": true, 1250 - "license": "MIT", 1251 - "optional": true, 1252 - "os": [ 1253 - "linux" 1254 - ] 1255 - }, 1256 - "node_modules/@rollup/rollup-win32-arm64-msvc": { 1257 - "version": "4.40.1", 1258 - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.40.1.tgz", 1259 - "integrity": "sha512-b2bcNm9Kbde03H+q+Jjw9tSfhYkzrDUf2d5MAd1bOJuVplXvFhWz7tRtWvD8/ORZi7qSCy0idW6tf2HgxSXQSg==", 1260 - "cpu": [ 1261 - "arm64" 1262 - ], 1263 - "dev": true, 1264 - "license": "MIT", 1265 - "optional": true, 1266 - "os": [ 1267 - "win32" 1268 - ] 1269 - }, 1270 - "node_modules/@rollup/rollup-win32-ia32-msvc": { 1271 - "version": "4.40.1", 1272 - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.40.1.tgz", 1273 - "integrity": "sha512-DfcogW8N7Zg7llVEfpqWMZcaErKfsj9VvmfSyRjCyo4BI3wPEfrzTtJkZG6gKP/Z92wFm6rz2aDO7/JfiR/whA==", 1274 - "cpu": [ 1275 - "ia32" 1276 - ], 1277 - "dev": true, 1278 - "license": "MIT", 1279 - "optional": true, 1280 - "os": [ 1281 - "win32" 1282 - ] 1283 - }, 1284 - "node_modules/@rollup/rollup-win32-x64-msvc": { 1285 - "version": "4.40.1", 1286 - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.40.1.tgz", 1287 - "integrity": "sha512-ECyOuDeH3C1I8jH2MK1RtBJW+YPMvSfT0a5NN0nHfQYnDSJ6tUiZH3gzwVP5/Kfh/+Tt7tpWVF9LXNTnhTJ3kA==", 1288 - "cpu": [ 1289 - "x64" 1290 - ], 1291 - "dev": true, 1292 - "license": "MIT", 1293 - "optional": true, 1294 - "os": [ 1295 - "win32" 1296 - ] 1297 - }, 1298 - "node_modules/@types/estree": { 1299 - "version": "1.0.7", 1300 - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", 1301 - "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", 1302 - "dev": true, 1303 - "license": "MIT" 1304 - }, 1305 - "node_modules/@vitest/expect": { 1306 - "version": "3.0.9", 1307 - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.9.tgz", 1308 - "integrity": "sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==", 1309 - "dev": true, 1310 - "license": "MIT", 1311 - "dependencies": { 1312 - "@vitest/spy": "3.0.9", 1313 - "@vitest/utils": "3.0.9", 1314 - "chai": "^5.2.0", 1315 - "tinyrainbow": "^2.0.0" 1316 - }, 1317 - "funding": { 1318 - "url": "https://opencollective.com/vitest" 1319 - } 1320 - }, 1321 - "node_modules/@vitest/mocker": { 1322 - "version": "3.0.9", 1323 - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.9.tgz", 1324 - "integrity": "sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==", 1325 - "dev": true, 1326 - "license": "MIT", 1327 - "dependencies": { 1328 - "@vitest/spy": "3.0.9", 1329 - "estree-walker": "^3.0.3", 1330 - "magic-string": "^0.30.17" 1331 - }, 1332 - "funding": { 1333 - "url": "https://opencollective.com/vitest" 1334 - }, 1335 - "peerDependencies": { 1336 - "msw": "^2.4.9", 1337 - "vite": "^5.0.0 || ^6.0.0" 1338 - }, 1339 - "peerDependenciesMeta": { 1340 - "msw": { 1341 - "optional": true 1342 - }, 1343 - "vite": { 1344 - "optional": true 1345 - } 1346 - } 1347 - }, 1348 - "node_modules/@vitest/pretty-format": { 1349 - "version": "3.1.2", 1350 - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.1.2.tgz", 1351 - "integrity": "sha512-R0xAiHuWeDjTSB3kQ3OQpT8Rx3yhdOAIm/JM4axXxnG7Q/fS8XUwggv/A4xzbQA+drYRjzkMnpYnOGAc4oeq8w==", 1352 - "dev": true, 1353 - "license": "MIT", 1354 - "dependencies": { 1355 - "tinyrainbow": "^2.0.0" 1356 - }, 1357 - "funding": { 1358 - "url": "https://opencollective.com/vitest" 1359 - } 1360 - }, 1361 - "node_modules/@vitest/runner": { 1362 - "version": "3.0.9", 1363 - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.9.tgz", 1364 - "integrity": "sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==", 1365 - "dev": true, 1366 - "license": "MIT", 1367 - "dependencies": { 1368 - "@vitest/utils": "3.0.9", 1369 - "pathe": "^2.0.3" 1370 - }, 1371 - "funding": { 1372 - "url": "https://opencollective.com/vitest" 1373 - } 1374 - }, 1375 - "node_modules/@vitest/snapshot": { 1376 - "version": "3.0.9", 1377 - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.9.tgz", 1378 - "integrity": "sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==", 1379 - "dev": true, 1380 - "license": "MIT", 1381 - "dependencies": { 1382 - "@vitest/pretty-format": "3.0.9", 1383 - "magic-string": "^0.30.17", 1384 - "pathe": "^2.0.3" 1385 - }, 1386 - "funding": { 1387 - "url": "https://opencollective.com/vitest" 1388 - } 1389 - }, 1390 - "node_modules/@vitest/snapshot/node_modules/@vitest/pretty-format": { 1391 - "version": "3.0.9", 1392 - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", 1393 - "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", 1394 - "dev": true, 1395 - "license": "MIT", 1396 - "dependencies": { 1397 - "tinyrainbow": "^2.0.0" 1398 - }, 1399 - "funding": { 1400 - "url": "https://opencollective.com/vitest" 1401 - } 1402 - }, 1403 - "node_modules/@vitest/spy": { 1404 - "version": "3.0.9", 1405 - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.9.tgz", 1406 - "integrity": "sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==", 1407 - "dev": true, 1408 - "license": "MIT", 1409 - "dependencies": { 1410 - "tinyspy": "^3.0.2" 1411 - }, 1412 - "funding": { 1413 - "url": "https://opencollective.com/vitest" 1414 - } 1415 - }, 1416 - "node_modules/@vitest/utils": { 1417 - "version": "3.0.9", 1418 - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.9.tgz", 1419 - "integrity": "sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==", 1420 - "dev": true, 1421 - "license": "MIT", 1422 - "dependencies": { 1423 - "@vitest/pretty-format": "3.0.9", 1424 - "loupe": "^3.1.3", 1425 - "tinyrainbow": "^2.0.0" 1426 - }, 1427 - "funding": { 1428 - "url": "https://opencollective.com/vitest" 1429 - } 1430 - }, 1431 - "node_modules/@vitest/utils/node_modules/@vitest/pretty-format": { 1432 - "version": "3.0.9", 1433 - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", 1434 - "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", 1435 - "dev": true, 1436 - "license": "MIT", 1437 - "dependencies": { 1438 - "tinyrainbow": "^2.0.0" 1439 - }, 1440 - "funding": { 1441 - "url": "https://opencollective.com/vitest" 1442 - } 1443 - }, 1444 - "node_modules/acorn": { 1445 - "version": "8.14.0", 1446 - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", 1447 - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", 1448 - "dev": true, 1449 - "license": "MIT", 1450 - "bin": { 1451 - "acorn": "bin/acorn" 1452 - }, 1453 - "engines": { 1454 - "node": ">=0.4.0" 1455 - } 1456 - }, 1457 - "node_modules/acorn-walk": { 1458 - "version": "8.3.2", 1459 - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", 1460 - "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", 1461 - "dev": true, 1462 - "license": "MIT", 1463 - "engines": { 1464 - "node": ">=0.4.0" 1465 - } 1466 - }, 1467 - "node_modules/as-table": { 1468 - "version": "1.0.55", 1469 - "resolved": "https://registry.npmjs.org/as-table/-/as-table-1.0.55.tgz", 1470 - "integrity": "sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==", 1471 - "dev": true, 1472 - "license": "MIT", 1473 - "dependencies": { 1474 - "printable-characters": "^1.0.42" 1475 - } 1476 - }, 1477 - "node_modules/assertion-error": { 1478 - "version": "2.0.1", 1479 - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", 1480 - "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", 1481 - "dev": true, 1482 - "license": "MIT", 1483 - "engines": { 1484 - "node": ">=12" 1485 - } 1486 - }, 1487 - "node_modules/birpc": { 1488 - "version": "0.2.14", 1489 - "resolved": "https://registry.npmjs.org/birpc/-/birpc-0.2.14.tgz", 1490 - "integrity": "sha512-37FHE8rqsYM5JEKCnXFyHpBCzvgHEExwVVTq+nUmloInU7l8ezD1TpOhKpS8oe1DTYFqEK27rFZVKG43oTqXRA==", 1491 - "dev": true, 1492 - "license": "MIT", 1493 - "funding": { 1494 - "url": "https://github.com/sponsors/antfu" 1495 - } 1496 - }, 1497 - "node_modules/blake3-wasm": { 1498 - "version": "2.1.5", 1499 - "resolved": "https://registry.npmjs.org/blake3-wasm/-/blake3-wasm-2.1.5.tgz", 1500 - "integrity": "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==", 1501 - "dev": true, 1502 - "license": "MIT" 1503 - }, 1504 - "node_modules/cac": { 1505 - "version": "6.7.14", 1506 - "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", 1507 - "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", 1508 - "dev": true, 1509 - "license": "MIT", 1510 - "engines": { 1511 - "node": ">=8" 1512 - } 1513 - }, 1514 - "node_modules/chai": { 1515 - "version": "5.2.0", 1516 - "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", 1517 - "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", 1518 - "dev": true, 1519 - "license": "MIT", 1520 - "dependencies": { 1521 - "assertion-error": "^2.0.1", 1522 - "check-error": "^2.1.1", 1523 - "deep-eql": "^5.0.1", 1524 - "loupe": "^3.1.0", 1525 - "pathval": "^2.0.0" 1526 - }, 1527 - "engines": { 1528 - "node": ">=12" 1529 - } 1530 - }, 1531 - "node_modules/check-error": { 1532 - "version": "2.1.1", 1533 - "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", 1534 - "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", 1535 - "dev": true, 1536 - "license": "MIT", 1537 - "engines": { 1538 - "node": ">= 16" 1539 - } 1540 - }, 1541 - "node_modules/cjs-module-lexer": { 1542 - "version": "1.4.3", 1543 - "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", 1544 - "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", 1545 - "dev": true, 1546 - "license": "MIT" 1547 - }, 1548 - "node_modules/color": { 1549 - "version": "4.2.3", 1550 - "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", 1551 - "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", 1552 - "dev": true, 1553 - "license": "MIT", 1554 - "optional": true, 1555 - "dependencies": { 1556 - "color-convert": "^2.0.1", 1557 - "color-string": "^1.9.0" 1558 - }, 1559 - "engines": { 1560 - "node": ">=12.5.0" 1561 - } 1562 - }, 1563 - "node_modules/color-convert": { 1564 - "version": "2.0.1", 1565 - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", 1566 - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 1567 - "dev": true, 1568 - "license": "MIT", 1569 - "optional": true, 1570 - "dependencies": { 1571 - "color-name": "~1.1.4" 1572 - }, 1573 - "engines": { 1574 - "node": ">=7.0.0" 1575 - } 1576 - }, 1577 - "node_modules/color-name": { 1578 - "version": "1.1.4", 1579 - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", 1580 - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", 1581 - "dev": true, 1582 - "license": "MIT", 1583 - "optional": true 1584 - }, 1585 - "node_modules/color-string": { 1586 - "version": "1.9.1", 1587 - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", 1588 - "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", 1589 - "dev": true, 1590 - "license": "MIT", 1591 - "optional": true, 1592 - "dependencies": { 1593 - "color-name": "^1.0.0", 1594 - "simple-swizzle": "^0.2.2" 1595 - } 1596 - }, 1597 - "node_modules/cookie": { 1598 - "version": "0.7.2", 1599 - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", 1600 - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", 1601 - "dev": true, 1602 - "license": "MIT", 1603 - "engines": { 1604 - "node": ">= 0.6" 1605 - } 1606 - }, 1607 - "node_modules/data-uri-to-buffer": { 1608 - "version": "2.0.2", 1609 - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-2.0.2.tgz", 1610 - "integrity": "sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==", 1611 - "dev": true, 1612 - "license": "MIT" 1613 - }, 1614 - "node_modules/debug": { 1615 - "version": "4.4.0", 1616 - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", 1617 - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", 1618 - "dev": true, 1619 - "license": "MIT", 1620 - "dependencies": { 1621 - "ms": "^2.1.3" 1622 - }, 1623 - "engines": { 1624 - "node": ">=6.0" 1625 - }, 1626 - "peerDependenciesMeta": { 1627 - "supports-color": { 1628 - "optional": true 1629 - } 1630 - } 1631 - }, 1632 - "node_modules/deep-eql": { 1633 - "version": "5.0.2", 1634 - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", 1635 - "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", 1636 - "dev": true, 1637 - "license": "MIT", 1638 - "engines": { 1639 - "node": ">=6" 1640 - } 1641 - }, 1642 - "node_modules/defu": { 1643 - "version": "6.1.4", 1644 - "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", 1645 - "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", 1646 - "dev": true, 1647 - "license": "MIT" 1648 - }, 1649 - "node_modules/detect-libc": { 1650 - "version": "2.0.4", 1651 - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", 1652 - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", 1653 - "dev": true, 1654 - "license": "Apache-2.0", 1655 - "optional": true, 1656 - "engines": { 1657 - "node": ">=8" 1658 - } 1659 - }, 1660 - "node_modules/devalue": { 1661 - "version": "4.3.3", 1662 - "resolved": "https://registry.npmjs.org/devalue/-/devalue-4.3.3.tgz", 1663 - "integrity": "sha512-UH8EL6H2ifcY8TbD2QsxwCC/pr5xSwPvv85LrLXVihmHVC3T3YqTCIwnR5ak0yO1KYqlxrPVOA/JVZJYPy2ATg==", 1664 - "dev": true, 1665 - "license": "MIT" 1666 - }, 1667 - "node_modules/es-module-lexer": { 1668 - "version": "1.7.0", 1669 - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", 1670 - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", 1671 - "dev": true, 1672 - "license": "MIT" 1673 - }, 1674 - "node_modules/esbuild": { 1675 - "version": "0.25.3", 1676 - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.3.tgz", 1677 - "integrity": "sha512-qKA6Pvai73+M2FtftpNKRxJ78GIjmFXFxd/1DVBqGo/qNhLSfv+G12n9pNoWdytJC8U00TrViOwpjT0zgqQS8Q==", 1678 - "dev": true, 1679 - "hasInstallScript": true, 1680 - "license": "MIT", 1681 - "bin": { 1682 - "esbuild": "bin/esbuild" 1683 - }, 1684 - "engines": { 1685 - "node": ">=18" 1686 - }, 1687 - "optionalDependencies": { 1688 - "@esbuild/aix-ppc64": "0.25.3", 1689 - "@esbuild/android-arm": "0.25.3", 1690 - "@esbuild/android-arm64": "0.25.3", 1691 - "@esbuild/android-x64": "0.25.3", 1692 - "@esbuild/darwin-arm64": "0.25.3", 1693 - "@esbuild/darwin-x64": "0.25.3", 1694 - "@esbuild/freebsd-arm64": "0.25.3", 1695 - "@esbuild/freebsd-x64": "0.25.3", 1696 - "@esbuild/linux-arm": "0.25.3", 1697 - "@esbuild/linux-arm64": "0.25.3", 1698 - "@esbuild/linux-ia32": "0.25.3", 1699 - "@esbuild/linux-loong64": "0.25.3", 1700 - "@esbuild/linux-mips64el": "0.25.3", 1701 - "@esbuild/linux-ppc64": "0.25.3", 1702 - "@esbuild/linux-riscv64": "0.25.3", 1703 - "@esbuild/linux-s390x": "0.25.3", 1704 - "@esbuild/linux-x64": "0.25.3", 1705 - "@esbuild/netbsd-arm64": "0.25.3", 1706 - "@esbuild/netbsd-x64": "0.25.3", 1707 - "@esbuild/openbsd-arm64": "0.25.3", 1708 - "@esbuild/openbsd-x64": "0.25.3", 1709 - "@esbuild/sunos-x64": "0.25.3", 1710 - "@esbuild/win32-arm64": "0.25.3", 1711 - "@esbuild/win32-ia32": "0.25.3", 1712 - "@esbuild/win32-x64": "0.25.3" 1713 - } 1714 - }, 1715 - "node_modules/estree-walker": { 1716 - "version": "3.0.3", 1717 - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", 1718 - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", 1719 - "dev": true, 1720 - "license": "MIT", 1721 - "dependencies": { 1722 - "@types/estree": "^1.0.0" 1723 - } 1724 - }, 1725 - "node_modules/exit-hook": { 1726 - "version": "2.2.1", 1727 - "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-2.2.1.tgz", 1728 - "integrity": "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==", 1729 - "dev": true, 1730 - "license": "MIT", 1731 - "engines": { 1732 - "node": ">=6" 1733 - }, 1734 - "funding": { 1735 - "url": "https://github.com/sponsors/sindresorhus" 1736 - } 1737 - }, 1738 - "node_modules/expect-type": { 1739 - "version": "1.2.1", 1740 - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", 1741 - "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", 1742 - "dev": true, 1743 - "license": "Apache-2.0", 1744 - "engines": { 1745 - "node": ">=12.0.0" 1746 - } 1747 - }, 1748 - "node_modules/exsolve": { 1749 - "version": "1.0.5", 1750 - "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.5.tgz", 1751 - "integrity": "sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==", 1752 - "dev": true, 1753 - "license": "MIT" 1754 - }, 1755 - "node_modules/fdir": { 1756 - "version": "6.4.4", 1757 - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", 1758 - "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", 1759 - "dev": true, 1760 - "license": "MIT", 1761 - "peerDependencies": { 1762 - "picomatch": "^3 || ^4" 1763 - }, 1764 - "peerDependenciesMeta": { 1765 - "picomatch": { 1766 - "optional": true 1767 - } 1768 - } 1769 - }, 1770 - "node_modules/fsevents": { 1771 - "version": "2.3.3", 1772 - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", 1773 - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", 1774 - "dev": true, 1775 - "hasInstallScript": true, 1776 - "license": "MIT", 1777 - "optional": true, 1778 - "os": [ 1779 - "darwin" 1780 - ], 1781 - "engines": { 1782 - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" 1783 - } 1784 - }, 1785 - "node_modules/get-source": { 1786 - "version": "2.0.12", 1787 - "resolved": "https://registry.npmjs.org/get-source/-/get-source-2.0.12.tgz", 1788 - "integrity": "sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==", 1789 - "dev": true, 1790 - "license": "Unlicense", 1791 - "dependencies": { 1792 - "data-uri-to-buffer": "^2.0.0", 1793 - "source-map": "^0.6.1" 1794 - } 1795 - }, 1796 - "node_modules/glob-to-regexp": { 1797 - "version": "0.4.1", 1798 - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", 1799 - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", 1800 - "dev": true, 1801 - "license": "BSD-2-Clause" 1802 - }, 1803 - "node_modules/is-arrayish": { 1804 - "version": "0.3.2", 1805 - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", 1806 - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", 1807 - "dev": true, 1808 - "license": "MIT", 1809 - "optional": true 1810 - }, 1811 - "node_modules/loupe": { 1812 - "version": "3.1.3", 1813 - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", 1814 - "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", 1815 - "dev": true, 1816 - "license": "MIT" 1817 - }, 1818 - "node_modules/magic-string": { 1819 - "version": "0.30.17", 1820 - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", 1821 - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", 1822 - "dev": true, 1823 - "license": "MIT", 1824 - "dependencies": { 1825 - "@jridgewell/sourcemap-codec": "^1.5.0" 1826 - } 1827 - }, 1828 - "node_modules/mime": { 1829 - "version": "3.0.0", 1830 - "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", 1831 - "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", 1832 - "dev": true, 1833 - "license": "MIT", 1834 - "bin": { 1835 - "mime": "cli.js" 1836 - }, 1837 - "engines": { 1838 - "node": ">=10.0.0" 1839 - } 1840 - }, 1841 - "node_modules/miniflare": { 1842 - "version": "4.20250428.1", 1843 - "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20250428.1.tgz", 1844 - "integrity": "sha512-M3qcJXjeAEimHrEeWXEhrJiC3YHB5M3QSqqK67pOTI+lHn0QyVG/2iFUjVJ/nv+i10uxeAEva8GRGeu+tKRCmQ==", 1845 - "dev": true, 1846 - "license": "MIT", 1847 - "dependencies": { 1848 - "@cspotcode/source-map-support": "0.8.1", 1849 - "acorn": "8.14.0", 1850 - "acorn-walk": "8.3.2", 1851 - "exit-hook": "2.2.1", 1852 - "glob-to-regexp": "0.4.1", 1853 - "stoppable": "1.1.0", 1854 - "undici": "^5.28.5", 1855 - "workerd": "1.20250428.0", 1856 - "ws": "8.18.0", 1857 - "youch": "3.3.4", 1858 - "zod": "3.22.3" 1859 - }, 1860 - "bin": { 1861 - "miniflare": "bootstrap.js" 1862 - }, 1863 - "engines": { 1864 - "node": ">=18.0.0" 1865 - } 1866 - }, 1867 - "node_modules/miniflare/node_modules/zod": { 1868 - "version": "3.22.3", 1869 - "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.3.tgz", 1870 - "integrity": "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug==", 1871 - "dev": true, 1872 - "license": "MIT", 1873 - "funding": { 1874 - "url": "https://github.com/sponsors/colinhacks" 1875 - } 1876 - }, 1877 - "node_modules/ms": { 1878 - "version": "2.1.3", 1879 - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 1880 - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", 1881 - "dev": true, 1882 - "license": "MIT" 1883 - }, 1884 - "node_modules/mustache": { 1885 - "version": "4.2.0", 1886 - "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", 1887 - "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", 1888 - "dev": true, 1889 - "license": "MIT", 1890 - "bin": { 1891 - "mustache": "bin/mustache" 1892 - } 1893 - }, 1894 - "node_modules/nanoid": { 1895 - "version": "3.3.11", 1896 - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", 1897 - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", 1898 - "dev": true, 1899 - "funding": [ 1900 - { 1901 - "type": "github", 1902 - "url": "https://github.com/sponsors/ai" 1903 - } 1904 - ], 1905 - "license": "MIT", 1906 - "bin": { 1907 - "nanoid": "bin/nanoid.cjs" 1908 - }, 1909 - "engines": { 1910 - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" 1911 - } 1912 - }, 1913 - "node_modules/ohash": { 1914 - "version": "2.0.11", 1915 - "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", 1916 - "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==", 1917 - "dev": true, 1918 - "license": "MIT" 1919 - }, 1920 - "node_modules/path-to-regexp": { 1921 - "version": "6.3.0", 1922 - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", 1923 - "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", 1924 - "dev": true, 1925 - "license": "MIT" 1926 - }, 1927 - "node_modules/pathe": { 1928 - "version": "2.0.3", 1929 - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", 1930 - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", 1931 - "dev": true, 1932 - "license": "MIT" 1933 - }, 1934 - "node_modules/pathval": { 1935 - "version": "2.0.0", 1936 - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", 1937 - "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", 1938 - "dev": true, 1939 - "license": "MIT", 1940 - "engines": { 1941 - "node": ">= 14.16" 1942 - } 1943 - }, 1944 - "node_modules/picocolors": { 1945 - "version": "1.1.1", 1946 - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", 1947 - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", 1948 - "dev": true, 1949 - "license": "ISC" 1950 - }, 1951 - "node_modules/picomatch": { 1952 - "version": "4.0.2", 1953 - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", 1954 - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", 1955 - "dev": true, 1956 - "license": "MIT", 1957 - "engines": { 1958 - "node": ">=12" 1959 - }, 1960 - "funding": { 1961 - "url": "https://github.com/sponsors/jonschlinkert" 1962 - } 1963 - }, 1964 - "node_modules/postcss": { 1965 - "version": "8.5.3", 1966 - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", 1967 - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", 1968 - "dev": true, 1969 - "funding": [ 1970 - { 1971 - "type": "opencollective", 1972 - "url": "https://opencollective.com/postcss/" 1973 - }, 1974 - { 1975 - "type": "tidelift", 1976 - "url": "https://tidelift.com/funding/github/npm/postcss" 1977 - }, 1978 - { 1979 - "type": "github", 1980 - "url": "https://github.com/sponsors/ai" 1981 - } 1982 - ], 1983 - "license": "MIT", 1984 - "dependencies": { 1985 - "nanoid": "^3.3.8", 1986 - "picocolors": "^1.1.1", 1987 - "source-map-js": "^1.2.1" 1988 - }, 1989 - "engines": { 1990 - "node": "^10 || ^12 || >=14" 1991 - } 1992 - }, 1993 - "node_modules/printable-characters": { 1994 - "version": "1.0.42", 1995 - "resolved": "https://registry.npmjs.org/printable-characters/-/printable-characters-1.0.42.tgz", 1996 - "integrity": "sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==", 1997 - "dev": true, 1998 - "license": "Unlicense" 1999 - }, 2000 - "node_modules/rollup": { 2001 - "version": "4.40.1", 2002 - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.40.1.tgz", 2003 - "integrity": "sha512-C5VvvgCCyfyotVITIAv+4efVytl5F7wt+/I2i9q9GZcEXW9BP52YYOXC58igUi+LFZVHukErIIqQSWwv/M3WRw==", 2004 - "dev": true, 2005 - "license": "MIT", 2006 - "dependencies": { 2007 - "@types/estree": "1.0.7" 2008 - }, 2009 - "bin": { 2010 - "rollup": "dist/bin/rollup" 2011 - }, 2012 - "engines": { 2013 - "node": ">=18.0.0", 2014 - "npm": ">=8.0.0" 2015 - }, 2016 - "optionalDependencies": { 2017 - "@rollup/rollup-android-arm-eabi": "4.40.1", 2018 - "@rollup/rollup-android-arm64": "4.40.1", 2019 - "@rollup/rollup-darwin-arm64": "4.40.1", 2020 - "@rollup/rollup-darwin-x64": "4.40.1", 2021 - "@rollup/rollup-freebsd-arm64": "4.40.1", 2022 - "@rollup/rollup-freebsd-x64": "4.40.1", 2023 - "@rollup/rollup-linux-arm-gnueabihf": "4.40.1", 2024 - "@rollup/rollup-linux-arm-musleabihf": "4.40.1", 2025 - "@rollup/rollup-linux-arm64-gnu": "4.40.1", 2026 - "@rollup/rollup-linux-arm64-musl": "4.40.1", 2027 - "@rollup/rollup-linux-loongarch64-gnu": "4.40.1", 2028 - "@rollup/rollup-linux-powerpc64le-gnu": "4.40.1", 2029 - "@rollup/rollup-linux-riscv64-gnu": "4.40.1", 2030 - "@rollup/rollup-linux-riscv64-musl": "4.40.1", 2031 - "@rollup/rollup-linux-s390x-gnu": "4.40.1", 2032 - "@rollup/rollup-linux-x64-gnu": "4.40.1", 2033 - "@rollup/rollup-linux-x64-musl": "4.40.1", 2034 - "@rollup/rollup-win32-arm64-msvc": "4.40.1", 2035 - "@rollup/rollup-win32-ia32-msvc": "4.40.1", 2036 - "@rollup/rollup-win32-x64-msvc": "4.40.1", 2037 - "fsevents": "~2.3.2" 2038 - } 2039 - }, 2040 - "node_modules/semver": { 2041 - "version": "7.7.1", 2042 - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", 2043 - "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", 2044 - "dev": true, 2045 - "license": "ISC", 2046 - "bin": { 2047 - "semver": "bin/semver.js" 2048 - }, 2049 - "engines": { 2050 - "node": ">=10" 2051 - } 2052 - }, 2053 - "node_modules/sharp": { 2054 - "version": "0.33.5", 2055 - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", 2056 - "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", 2057 - "dev": true, 2058 - "hasInstallScript": true, 2059 - "license": "Apache-2.0", 2060 - "optional": true, 2061 - "dependencies": { 2062 - "color": "^4.2.3", 2063 - "detect-libc": "^2.0.3", 2064 - "semver": "^7.6.3" 2065 - }, 2066 - "engines": { 2067 - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 2068 - }, 2069 - "funding": { 2070 - "url": "https://opencollective.com/libvips" 2071 - }, 2072 - "optionalDependencies": { 2073 - "@img/sharp-darwin-arm64": "0.33.5", 2074 - "@img/sharp-darwin-x64": "0.33.5", 2075 - "@img/sharp-libvips-darwin-arm64": "1.0.4", 2076 - "@img/sharp-libvips-darwin-x64": "1.0.4", 2077 - "@img/sharp-libvips-linux-arm": "1.0.5", 2078 - "@img/sharp-libvips-linux-arm64": "1.0.4", 2079 - "@img/sharp-libvips-linux-s390x": "1.0.4", 2080 - "@img/sharp-libvips-linux-x64": "1.0.4", 2081 - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", 2082 - "@img/sharp-libvips-linuxmusl-x64": "1.0.4", 2083 - "@img/sharp-linux-arm": "0.33.5", 2084 - "@img/sharp-linux-arm64": "0.33.5", 2085 - "@img/sharp-linux-s390x": "0.33.5", 2086 - "@img/sharp-linux-x64": "0.33.5", 2087 - "@img/sharp-linuxmusl-arm64": "0.33.5", 2088 - "@img/sharp-linuxmusl-x64": "0.33.5", 2089 - "@img/sharp-wasm32": "0.33.5", 2090 - "@img/sharp-win32-ia32": "0.33.5", 2091 - "@img/sharp-win32-x64": "0.33.5" 2092 - } 2093 - }, 2094 - "node_modules/siginfo": { 2095 - "version": "2.0.0", 2096 - "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", 2097 - "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", 2098 - "dev": true, 2099 - "license": "ISC" 2100 - }, 2101 - "node_modules/simple-swizzle": { 2102 - "version": "0.2.2", 2103 - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", 2104 - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", 2105 - "dev": true, 2106 - "license": "MIT", 2107 - "optional": true, 2108 - "dependencies": { 2109 - "is-arrayish": "^0.3.1" 2110 - } 2111 - }, 2112 - "node_modules/source-map": { 2113 - "version": "0.6.1", 2114 - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", 2115 - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", 2116 - "dev": true, 2117 - "license": "BSD-3-Clause", 2118 - "engines": { 2119 - "node": ">=0.10.0" 2120 - } 2121 - }, 2122 - "node_modules/source-map-js": { 2123 - "version": "1.2.1", 2124 - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", 2125 - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", 2126 - "dev": true, 2127 - "license": "BSD-3-Clause", 2128 - "engines": { 2129 - "node": ">=0.10.0" 2130 - } 2131 - }, 2132 - "node_modules/stackback": { 2133 - "version": "0.0.2", 2134 - "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", 2135 - "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", 2136 - "dev": true, 2137 - "license": "MIT" 2138 - }, 2139 - "node_modules/stacktracey": { 2140 - "version": "2.1.8", 2141 - "resolved": "https://registry.npmjs.org/stacktracey/-/stacktracey-2.1.8.tgz", 2142 - "integrity": "sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==", 2143 - "dev": true, 2144 - "license": "Unlicense", 2145 - "dependencies": { 2146 - "as-table": "^1.0.36", 2147 - "get-source": "^2.0.12" 2148 - } 2149 - }, 2150 - "node_modules/std-env": { 2151 - "version": "3.9.0", 2152 - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", 2153 - "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", 2154 - "dev": true, 2155 - "license": "MIT" 2156 - }, 2157 - "node_modules/stoppable": { 2158 - "version": "1.1.0", 2159 - "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz", 2160 - "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==", 2161 - "dev": true, 2162 - "license": "MIT", 2163 - "engines": { 2164 - "node": ">=4", 2165 - "npm": ">=6" 2166 - } 2167 - }, 2168 - "node_modules/tinybench": { 2169 - "version": "2.9.0", 2170 - "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", 2171 - "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", 2172 - "dev": true, 2173 - "license": "MIT" 2174 - }, 2175 - "node_modules/tinyexec": { 2176 - "version": "0.3.2", 2177 - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", 2178 - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", 2179 - "dev": true, 2180 - "license": "MIT" 2181 - }, 2182 - "node_modules/tinyglobby": { 2183 - "version": "0.2.13", 2184 - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", 2185 - "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", 2186 - "dev": true, 2187 - "license": "MIT", 2188 - "dependencies": { 2189 - "fdir": "^6.4.4", 2190 - "picomatch": "^4.0.2" 2191 - }, 2192 - "engines": { 2193 - "node": ">=12.0.0" 2194 - }, 2195 - "funding": { 2196 - "url": "https://github.com/sponsors/SuperchupuDev" 2197 - } 2198 - }, 2199 - "node_modules/tinypool": { 2200 - "version": "1.0.2", 2201 - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", 2202 - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", 2203 - "dev": true, 2204 - "license": "MIT", 2205 - "engines": { 2206 - "node": "^18.0.0 || >=20.0.0" 2207 - } 2208 - }, 2209 - "node_modules/tinyrainbow": { 2210 - "version": "2.0.0", 2211 - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", 2212 - "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", 2213 - "dev": true, 2214 - "license": "MIT", 2215 - "engines": { 2216 - "node": ">=14.0.0" 2217 - } 2218 - }, 2219 - "node_modules/tinyspy": { 2220 - "version": "3.0.2", 2221 - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", 2222 - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", 2223 - "dev": true, 2224 - "license": "MIT", 2225 - "engines": { 2226 - "node": ">=14.0.0" 2227 - } 2228 - }, 2229 - "node_modules/tslib": { 2230 - "version": "2.8.1", 2231 - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", 2232 - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", 2233 - "dev": true, 2234 - "license": "0BSD", 2235 - "optional": true 2236 - }, 2237 - "node_modules/ufo": { 2238 - "version": "1.6.1", 2239 - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz", 2240 - "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", 2241 - "dev": true, 2242 - "license": "MIT" 2243 - }, 2244 - "node_modules/undici": { 2245 - "version": "5.29.0", 2246 - "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", 2247 - "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", 2248 - "dev": true, 2249 - "license": "MIT", 2250 - "dependencies": { 2251 - "@fastify/busboy": "^2.0.0" 2252 - }, 2253 - "engines": { 2254 - "node": ">=14.0" 2255 - } 2256 - }, 2257 - "node_modules/unenv": { 2258 - "version": "2.0.0-rc.15", 2259 - "resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.15.tgz", 2260 - "integrity": "sha512-J/rEIZU8w6FOfLNz/hNKsnY+fFHWnu9MH4yRbSZF3xbbGHovcetXPs7sD+9p8L6CeNC//I9bhRYAOsBt2u7/OA==", 2261 - "dev": true, 2262 - "license": "MIT", 2263 - "dependencies": { 2264 - "defu": "^6.1.4", 2265 - "exsolve": "^1.0.4", 2266 - "ohash": "^2.0.11", 2267 - "pathe": "^2.0.3", 2268 - "ufo": "^1.5.4" 2269 - } 2270 - }, 2271 - "node_modules/vite": { 2272 - "version": "6.3.4", 2273 - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz", 2274 - "integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==", 2275 - "dev": true, 2276 - "license": "MIT", 2277 - "dependencies": { 2278 - "esbuild": "^0.25.0", 2279 - "fdir": "^6.4.4", 2280 - "picomatch": "^4.0.2", 2281 - "postcss": "^8.5.3", 2282 - "rollup": "^4.34.9", 2283 - "tinyglobby": "^0.2.13" 2284 - }, 2285 - "bin": { 2286 - "vite": "bin/vite.js" 2287 - }, 2288 - "engines": { 2289 - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" 2290 - }, 2291 - "funding": { 2292 - "url": "https://github.com/vitejs/vite?sponsor=1" 2293 - }, 2294 - "optionalDependencies": { 2295 - "fsevents": "~2.3.3" 2296 - }, 2297 - "peerDependencies": { 2298 - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", 2299 - "jiti": ">=1.21.0", 2300 - "less": "*", 2301 - "lightningcss": "^1.21.0", 2302 - "sass": "*", 2303 - "sass-embedded": "*", 2304 - "stylus": "*", 2305 - "sugarss": "*", 2306 - "terser": "^5.16.0", 2307 - "tsx": "^4.8.1", 2308 - "yaml": "^2.4.2" 2309 - }, 2310 - "peerDependenciesMeta": { 2311 - "@types/node": { 2312 - "optional": true 2313 - }, 2314 - "jiti": { 2315 - "optional": true 2316 - }, 2317 - "less": { 2318 - "optional": true 2319 - }, 2320 - "lightningcss": { 2321 - "optional": true 2322 - }, 2323 - "sass": { 2324 - "optional": true 2325 - }, 2326 - "sass-embedded": { 2327 - "optional": true 2328 - }, 2329 - "stylus": { 2330 - "optional": true 2331 - }, 2332 - "sugarss": { 2333 - "optional": true 2334 - }, 2335 - "terser": { 2336 - "optional": true 2337 - }, 2338 - "tsx": { 2339 - "optional": true 2340 - }, 2341 - "yaml": { 2342 - "optional": true 2343 - } 2344 - } 2345 - }, 2346 - "node_modules/vite-node": { 2347 - "version": "3.0.9", 2348 - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.9.tgz", 2349 - "integrity": "sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==", 2350 - "dev": true, 2351 - "license": "MIT", 2352 - "dependencies": { 2353 - "cac": "^6.7.14", 2354 - "debug": "^4.4.0", 2355 - "es-module-lexer": "^1.6.0", 2356 - "pathe": "^2.0.3", 2357 - "vite": "^5.0.0 || ^6.0.0" 2358 - }, 2359 - "bin": { 2360 - "vite-node": "vite-node.mjs" 2361 - }, 2362 - "engines": { 2363 - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" 2364 - }, 2365 - "funding": { 2366 - "url": "https://opencollective.com/vitest" 2367 - } 2368 - }, 2369 - "node_modules/vitest": { 2370 - "version": "3.0.9", 2371 - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", 2372 - "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", 2373 - "dev": true, 2374 - "license": "MIT", 2375 - "dependencies": { 2376 - "@vitest/expect": "3.0.9", 2377 - "@vitest/mocker": "3.0.9", 2378 - "@vitest/pretty-format": "^3.0.9", 2379 - "@vitest/runner": "3.0.9", 2380 - "@vitest/snapshot": "3.0.9", 2381 - "@vitest/spy": "3.0.9", 2382 - "@vitest/utils": "3.0.9", 2383 - "chai": "^5.2.0", 2384 - "debug": "^4.4.0", 2385 - "expect-type": "^1.1.0", 2386 - "magic-string": "^0.30.17", 2387 - "pathe": "^2.0.3", 2388 - "std-env": "^3.8.0", 2389 - "tinybench": "^2.9.0", 2390 - "tinyexec": "^0.3.2", 2391 - "tinypool": "^1.0.2", 2392 - "tinyrainbow": "^2.0.0", 2393 - "vite": "^5.0.0 || ^6.0.0", 2394 - "vite-node": "3.0.9", 2395 - "why-is-node-running": "^2.3.0" 2396 - }, 2397 - "bin": { 2398 - "vitest": "vitest.mjs" 2399 - }, 2400 - "engines": { 2401 - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" 2402 - }, 2403 - "funding": { 2404 - "url": "https://opencollective.com/vitest" 2405 - }, 2406 - "peerDependencies": { 2407 - "@edge-runtime/vm": "*", 2408 - "@types/debug": "^4.1.12", 2409 - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", 2410 - "@vitest/browser": "3.0.9", 2411 - "@vitest/ui": "3.0.9", 2412 - "happy-dom": "*", 2413 - "jsdom": "*" 2414 - }, 2415 - "peerDependenciesMeta": { 2416 - "@edge-runtime/vm": { 2417 - "optional": true 2418 - }, 2419 - "@types/debug": { 2420 - "optional": true 2421 - }, 2422 - "@types/node": { 2423 - "optional": true 2424 - }, 2425 - "@vitest/browser": { 2426 - "optional": true 2427 - }, 2428 - "@vitest/ui": { 2429 - "optional": true 2430 - }, 2431 - "happy-dom": { 2432 - "optional": true 2433 - }, 2434 - "jsdom": { 2435 - "optional": true 2436 - } 2437 - } 2438 - }, 2439 - "node_modules/why-is-node-running": { 2440 - "version": "2.3.0", 2441 - "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", 2442 - "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", 2443 - "dev": true, 2444 - "license": "MIT", 2445 - "dependencies": { 2446 - "siginfo": "^2.0.0", 2447 - "stackback": "0.0.2" 2448 - }, 2449 - "bin": { 2450 - "why-is-node-running": "cli.js" 2451 - }, 2452 - "engines": { 2453 - "node": ">=8" 2454 - } 2455 - }, 2456 - "node_modules/workerd": { 2457 - "version": "1.20250428.0", 2458 - "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20250428.0.tgz", 2459 - "integrity": "sha512-JJNWkHkwPQKQdvtM9UORijgYdcdJsihA4SfYjwh02IUQsdMyZ9jizV1sX9yWi9B9ptlohTW8UNHJEATuphGgdg==", 2460 - "dev": true, 2461 - "hasInstallScript": true, 2462 - "license": "Apache-2.0", 2463 - "bin": { 2464 - "workerd": "bin/workerd" 2465 - }, 2466 - "engines": { 2467 - "node": ">=16" 2468 - }, 2469 - "optionalDependencies": { 2470 - "@cloudflare/workerd-darwin-64": "1.20250428.0", 2471 - "@cloudflare/workerd-darwin-arm64": "1.20250428.0", 2472 - "@cloudflare/workerd-linux-64": "1.20250428.0", 2473 - "@cloudflare/workerd-linux-arm64": "1.20250428.0", 2474 - "@cloudflare/workerd-windows-64": "1.20250428.0" 2475 - } 2476 - }, 2477 - "node_modules/wrangler": { 2478 - "version": "4.14.1", 2479 - "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.14.1.tgz", 2480 - "integrity": "sha512-EU7IThP7i68TBftJJSveogvWZ5k/WRijcJh3UclDWiWWhDZTPbL6LOJEFhHKqFzHOaC4Y2Aewt48rfTz0e7oCw==", 2481 - "dev": true, 2482 - "license": "MIT OR Apache-2.0", 2483 - "dependencies": { 2484 - "@cloudflare/kv-asset-handler": "0.4.0", 2485 - "@cloudflare/unenv-preset": "2.3.1", 2486 - "blake3-wasm": "2.1.5", 2487 - "esbuild": "0.25.2", 2488 - "miniflare": "4.20250428.1", 2489 - "path-to-regexp": "6.3.0", 2490 - "unenv": "2.0.0-rc.15", 2491 - "workerd": "1.20250428.0" 2492 - }, 2493 - "bin": { 2494 - "wrangler": "bin/wrangler.js", 2495 - "wrangler2": "bin/wrangler.js" 2496 - }, 2497 - "engines": { 2498 - "node": ">=18.0.0" 2499 - }, 2500 - "optionalDependencies": { 2501 - "fsevents": "~2.3.2", 2502 - "sharp": "^0.33.5" 2503 - }, 2504 - "peerDependencies": { 2505 - "@cloudflare/workers-types": "^4.20250428.0" 2506 - }, 2507 - "peerDependenciesMeta": { 2508 - "@cloudflare/workers-types": { 2509 - "optional": true 2510 - } 2511 - } 2512 - }, 2513 - "node_modules/wrangler/node_modules/@esbuild/aix-ppc64": { 2514 - "version": "0.25.2", 2515 - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.2.tgz", 2516 - "integrity": "sha512-wCIboOL2yXZym2cgm6mlA742s9QeJ8DjGVaL39dLN4rRwrOgOyYSnOaFPhKZGLb2ngj4EyfAFjsNJwPXZvseag==", 2517 - "cpu": [ 2518 - "ppc64" 2519 - ], 2520 - "dev": true, 2521 - "license": "MIT", 2522 - "optional": true, 2523 - "os": [ 2524 - "aix" 2525 - ], 2526 - "engines": { 2527 - "node": ">=18" 2528 - } 2529 - }, 2530 - "node_modules/wrangler/node_modules/@esbuild/android-arm": { 2531 - "version": "0.25.2", 2532 - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.2.tgz", 2533 - "integrity": "sha512-NQhH7jFstVY5x8CKbcfa166GoV0EFkaPkCKBQkdPJFvo5u+nGXLEH/ooniLb3QI8Fk58YAx7nsPLozUWfCBOJA==", 2534 - "cpu": [ 2535 - "arm" 2536 - ], 2537 - "dev": true, 2538 - "license": "MIT", 2539 - "optional": true, 2540 - "os": [ 2541 - "android" 2542 - ], 2543 - "engines": { 2544 - "node": ">=18" 2545 - } 2546 - }, 2547 - "node_modules/wrangler/node_modules/@esbuild/android-arm64": { 2548 - "version": "0.25.2", 2549 - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.2.tgz", 2550 - "integrity": "sha512-5ZAX5xOmTligeBaeNEPnPaeEuah53Id2tX4c2CVP3JaROTH+j4fnfHCkr1PjXMd78hMst+TlkfKcW/DlTq0i4w==", 2551 - "cpu": [ 2552 - "arm64" 2553 - ], 2554 - "dev": true, 2555 - "license": "MIT", 2556 - "optional": true, 2557 - "os": [ 2558 - "android" 2559 - ], 2560 - "engines": { 2561 - "node": ">=18" 2562 - } 2563 - }, 2564 - "node_modules/wrangler/node_modules/@esbuild/android-x64": { 2565 - "version": "0.25.2", 2566 - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.2.tgz", 2567 - "integrity": "sha512-Ffcx+nnma8Sge4jzddPHCZVRvIfQ0kMsUsCMcJRHkGJ1cDmhe4SsrYIjLUKn1xpHZybmOqCWwB0zQvsjdEHtkg==", 2568 - "cpu": [ 2569 - "x64" 2570 - ], 2571 - "dev": true, 2572 - "license": "MIT", 2573 - "optional": true, 2574 - "os": [ 2575 - "android" 2576 - ], 2577 - "engines": { 2578 - "node": ">=18" 2579 - } 2580 - }, 2581 - "node_modules/wrangler/node_modules/@esbuild/darwin-arm64": { 2582 - "version": "0.25.2", 2583 - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.2.tgz", 2584 - "integrity": "sha512-MpM6LUVTXAzOvN4KbjzU/q5smzryuoNjlriAIx+06RpecwCkL9JpenNzpKd2YMzLJFOdPqBpuub6eVRP5IgiSA==", 2585 - "cpu": [ 2586 - "arm64" 2587 - ], 2588 - "dev": true, 2589 - "license": "MIT", 2590 - "optional": true, 2591 - "os": [ 2592 - "darwin" 2593 - ], 2594 - "engines": { 2595 - "node": ">=18" 2596 - } 2597 - }, 2598 - "node_modules/wrangler/node_modules/@esbuild/darwin-x64": { 2599 - "version": "0.25.2", 2600 - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.2.tgz", 2601 - "integrity": "sha512-5eRPrTX7wFyuWe8FqEFPG2cU0+butQQVNcT4sVipqjLYQjjh8a8+vUTfgBKM88ObB85ahsnTwF7PSIt6PG+QkA==", 2602 - "cpu": [ 2603 - "x64" 2604 - ], 2605 - "dev": true, 2606 - "license": "MIT", 2607 - "optional": true, 2608 - "os": [ 2609 - "darwin" 2610 - ], 2611 - "engines": { 2612 - "node": ">=18" 2613 - } 2614 - }, 2615 - "node_modules/wrangler/node_modules/@esbuild/freebsd-arm64": { 2616 - "version": "0.25.2", 2617 - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.2.tgz", 2618 - "integrity": "sha512-mLwm4vXKiQ2UTSX4+ImyiPdiHjiZhIaE9QvC7sw0tZ6HoNMjYAqQpGyui5VRIi5sGd+uWq940gdCbY3VLvsO1w==", 2619 - "cpu": [ 2620 - "arm64" 2621 - ], 2622 - "dev": true, 2623 - "license": "MIT", 2624 - "optional": true, 2625 - "os": [ 2626 - "freebsd" 2627 - ], 2628 - "engines": { 2629 - "node": ">=18" 2630 - } 2631 - }, 2632 - "node_modules/wrangler/node_modules/@esbuild/freebsd-x64": { 2633 - "version": "0.25.2", 2634 - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.2.tgz", 2635 - "integrity": "sha512-6qyyn6TjayJSwGpm8J9QYYGQcRgc90nmfdUb0O7pp1s4lTY+9D0H9O02v5JqGApUyiHOtkz6+1hZNvNtEhbwRQ==", 2636 - "cpu": [ 2637 - "x64" 2638 - ], 2639 - "dev": true, 2640 - "license": "MIT", 2641 - "optional": true, 2642 - "os": [ 2643 - "freebsd" 2644 - ], 2645 - "engines": { 2646 - "node": ">=18" 2647 - } 2648 - }, 2649 - "node_modules/wrangler/node_modules/@esbuild/linux-arm": { 2650 - "version": "0.25.2", 2651 - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.2.tgz", 2652 - "integrity": "sha512-UHBRgJcmjJv5oeQF8EpTRZs/1knq6loLxTsjc3nxO9eXAPDLcWW55flrMVc97qFPbmZP31ta1AZVUKQzKTzb0g==", 2653 - "cpu": [ 2654 - "arm" 2655 - ], 2656 - "dev": true, 2657 - "license": "MIT", 2658 - "optional": true, 2659 - "os": [ 2660 - "linux" 2661 - ], 2662 - "engines": { 2663 - "node": ">=18" 2664 - } 2665 - }, 2666 - "node_modules/wrangler/node_modules/@esbuild/linux-arm64": { 2667 - "version": "0.25.2", 2668 - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.2.tgz", 2669 - "integrity": "sha512-gq/sjLsOyMT19I8obBISvhoYiZIAaGF8JpeXu1u8yPv8BE5HlWYobmlsfijFIZ9hIVGYkbdFhEqC0NvM4kNO0g==", 2670 - "cpu": [ 2671 - "arm64" 2672 - ], 2673 - "dev": true, 2674 - "license": "MIT", 2675 - "optional": true, 2676 - "os": [ 2677 - "linux" 2678 - ], 2679 - "engines": { 2680 - "node": ">=18" 2681 - } 2682 - }, 2683 - "node_modules/wrangler/node_modules/@esbuild/linux-ia32": { 2684 - "version": "0.25.2", 2685 - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.2.tgz", 2686 - "integrity": "sha512-bBYCv9obgW2cBP+2ZWfjYTU+f5cxRoGGQ5SeDbYdFCAZpYWrfjjfYwvUpP8MlKbP0nwZ5gyOU/0aUzZ5HWPuvQ==", 2687 - "cpu": [ 2688 - "ia32" 2689 - ], 2690 - "dev": true, 2691 - "license": "MIT", 2692 - "optional": true, 2693 - "os": [ 2694 - "linux" 2695 - ], 2696 - "engines": { 2697 - "node": ">=18" 2698 - } 2699 - }, 2700 - "node_modules/wrangler/node_modules/@esbuild/linux-loong64": { 2701 - "version": "0.25.2", 2702 - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.2.tgz", 2703 - "integrity": "sha512-SHNGiKtvnU2dBlM5D8CXRFdd+6etgZ9dXfaPCeJtz+37PIUlixvlIhI23L5khKXs3DIzAn9V8v+qb1TRKrgT5w==", 2704 - "cpu": [ 2705 - "loong64" 2706 - ], 2707 - "dev": true, 2708 - "license": "MIT", 2709 - "optional": true, 2710 - "os": [ 2711 - "linux" 2712 - ], 2713 - "engines": { 2714 - "node": ">=18" 2715 - } 2716 - }, 2717 - "node_modules/wrangler/node_modules/@esbuild/linux-mips64el": { 2718 - "version": "0.25.2", 2719 - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.2.tgz", 2720 - "integrity": "sha512-hDDRlzE6rPeoj+5fsADqdUZl1OzqDYow4TB4Y/3PlKBD0ph1e6uPHzIQcv2Z65u2K0kpeByIyAjCmjn1hJgG0Q==", 2721 - "cpu": [ 2722 - "mips64el" 2723 - ], 2724 - "dev": true, 2725 - "license": "MIT", 2726 - "optional": true, 2727 - "os": [ 2728 - "linux" 2729 - ], 2730 - "engines": { 2731 - "node": ">=18" 2732 - } 2733 - }, 2734 - "node_modules/wrangler/node_modules/@esbuild/linux-ppc64": { 2735 - "version": "0.25.2", 2736 - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.2.tgz", 2737 - "integrity": "sha512-tsHu2RRSWzipmUi9UBDEzc0nLc4HtpZEI5Ba+Omms5456x5WaNuiG3u7xh5AO6sipnJ9r4cRWQB2tUjPyIkc6g==", 2738 - "cpu": [ 2739 - "ppc64" 2740 - ], 2741 - "dev": true, 2742 - "license": "MIT", 2743 - "optional": true, 2744 - "os": [ 2745 - "linux" 2746 - ], 2747 - "engines": { 2748 - "node": ">=18" 2749 - } 2750 - }, 2751 - "node_modules/wrangler/node_modules/@esbuild/linux-riscv64": { 2752 - "version": "0.25.2", 2753 - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.2.tgz", 2754 - "integrity": "sha512-k4LtpgV7NJQOml/10uPU0s4SAXGnowi5qBSjaLWMojNCUICNu7TshqHLAEbkBdAszL5TabfvQ48kK84hyFzjnw==", 2755 - "cpu": [ 2756 - "riscv64" 2757 - ], 2758 - "dev": true, 2759 - "license": "MIT", 2760 - "optional": true, 2761 - "os": [ 2762 - "linux" 2763 - ], 2764 - "engines": { 2765 - "node": ">=18" 2766 - } 2767 - }, 2768 - "node_modules/wrangler/node_modules/@esbuild/linux-s390x": { 2769 - "version": "0.25.2", 2770 - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.2.tgz", 2771 - "integrity": "sha512-GRa4IshOdvKY7M/rDpRR3gkiTNp34M0eLTaC1a08gNrh4u488aPhuZOCpkF6+2wl3zAN7L7XIpOFBhnaE3/Q8Q==", 2772 - "cpu": [ 2773 - "s390x" 2774 - ], 2775 - "dev": true, 2776 - "license": "MIT", 2777 - "optional": true, 2778 - "os": [ 2779 - "linux" 2780 - ], 2781 - "engines": { 2782 - "node": ">=18" 2783 - } 2784 - }, 2785 - "node_modules/wrangler/node_modules/@esbuild/linux-x64": { 2786 - "version": "0.25.2", 2787 - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.2.tgz", 2788 - "integrity": "sha512-QInHERlqpTTZ4FRB0fROQWXcYRD64lAoiegezDunLpalZMjcUcld3YzZmVJ2H/Cp0wJRZ8Xtjtj0cEHhYc/uUg==", 2789 - "cpu": [ 2790 - "x64" 2791 - ], 2792 - "dev": true, 2793 - "license": "MIT", 2794 - "optional": true, 2795 - "os": [ 2796 - "linux" 2797 - ], 2798 - "engines": { 2799 - "node": ">=18" 2800 - } 2801 - }, 2802 - "node_modules/wrangler/node_modules/@esbuild/netbsd-arm64": { 2803 - "version": "0.25.2", 2804 - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.2.tgz", 2805 - "integrity": "sha512-talAIBoY5M8vHc6EeI2WW9d/CkiO9MQJ0IOWX8hrLhxGbro/vBXJvaQXefW2cP0z0nQVTdQ/eNyGFV1GSKrxfw==", 2806 - "cpu": [ 2807 - "arm64" 2808 - ], 2809 - "dev": true, 2810 - "license": "MIT", 2811 - "optional": true, 2812 - "os": [ 2813 - "netbsd" 2814 - ], 2815 - "engines": { 2816 - "node": ">=18" 2817 - } 2818 - }, 2819 - "node_modules/wrangler/node_modules/@esbuild/netbsd-x64": { 2820 - "version": "0.25.2", 2821 - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.2.tgz", 2822 - "integrity": "sha512-voZT9Z+tpOxrvfKFyfDYPc4DO4rk06qamv1a/fkuzHpiVBMOhpjK+vBmWM8J1eiB3OLSMFYNaOaBNLXGChf5tg==", 2823 - "cpu": [ 2824 - "x64" 2825 - ], 2826 - "dev": true, 2827 - "license": "MIT", 2828 - "optional": true, 2829 - "os": [ 2830 - "netbsd" 2831 - ], 2832 - "engines": { 2833 - "node": ">=18" 2834 - } 2835 - }, 2836 - "node_modules/wrangler/node_modules/@esbuild/openbsd-arm64": { 2837 - "version": "0.25.2", 2838 - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.2.tgz", 2839 - "integrity": "sha512-dcXYOC6NXOqcykeDlwId9kB6OkPUxOEqU+rkrYVqJbK2hagWOMrsTGsMr8+rW02M+d5Op5NNlgMmjzecaRf7Tg==", 2840 - "cpu": [ 2841 - "arm64" 2842 - ], 2843 - "dev": true, 2844 - "license": "MIT", 2845 - "optional": true, 2846 - "os": [ 2847 - "openbsd" 2848 - ], 2849 - "engines": { 2850 - "node": ">=18" 2851 - } 2852 - }, 2853 - "node_modules/wrangler/node_modules/@esbuild/openbsd-x64": { 2854 - "version": "0.25.2", 2855 - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.2.tgz", 2856 - "integrity": "sha512-t/TkWwahkH0Tsgoq1Ju7QfgGhArkGLkF1uYz8nQS/PPFlXbP5YgRpqQR3ARRiC2iXoLTWFxc6DJMSK10dVXluw==", 2857 - "cpu": [ 2858 - "x64" 2859 - ], 2860 - "dev": true, 2861 - "license": "MIT", 2862 - "optional": true, 2863 - "os": [ 2864 - "openbsd" 2865 - ], 2866 - "engines": { 2867 - "node": ">=18" 2868 - } 2869 - }, 2870 - "node_modules/wrangler/node_modules/@esbuild/sunos-x64": { 2871 - "version": "0.25.2", 2872 - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.2.tgz", 2873 - "integrity": "sha512-cfZH1co2+imVdWCjd+D1gf9NjkchVhhdpgb1q5y6Hcv9TP6Zi9ZG/beI3ig8TvwT9lH9dlxLq5MQBBgwuj4xvA==", 2874 - "cpu": [ 2875 - "x64" 2876 - ], 2877 - "dev": true, 2878 - "license": "MIT", 2879 - "optional": true, 2880 - "os": [ 2881 - "sunos" 2882 - ], 2883 - "engines": { 2884 - "node": ">=18" 2885 - } 2886 - }, 2887 - "node_modules/wrangler/node_modules/@esbuild/win32-arm64": { 2888 - "version": "0.25.2", 2889 - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.2.tgz", 2890 - "integrity": "sha512-7Loyjh+D/Nx/sOTzV8vfbB3GJuHdOQyrOryFdZvPHLf42Tk9ivBU5Aedi7iyX+x6rbn2Mh68T4qq1SDqJBQO5Q==", 2891 - "cpu": [ 2892 - "arm64" 2893 - ], 2894 - "dev": true, 2895 - "license": "MIT", 2896 - "optional": true, 2897 - "os": [ 2898 - "win32" 2899 - ], 2900 - "engines": { 2901 - "node": ">=18" 2902 - } 2903 - }, 2904 - "node_modules/wrangler/node_modules/@esbuild/win32-ia32": { 2905 - "version": "0.25.2", 2906 - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.2.tgz", 2907 - "integrity": "sha512-WRJgsz9un0nqZJ4MfhabxaD9Ft8KioqU3JMinOTvobbX6MOSUigSBlogP8QB3uxpJDsFS6yN+3FDBdqE5lg9kg==", 2908 - "cpu": [ 2909 - "ia32" 2910 - ], 2911 - "dev": true, 2912 - "license": "MIT", 2913 - "optional": true, 2914 - "os": [ 2915 - "win32" 2916 - ], 2917 - "engines": { 2918 - "node": ">=18" 2919 - } 2920 - }, 2921 - "node_modules/wrangler/node_modules/@esbuild/win32-x64": { 2922 - "version": "0.25.2", 2923 - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.2.tgz", 2924 - "integrity": "sha512-kM3HKb16VIXZyIeVrM1ygYmZBKybX8N4p754bw390wGO3Tf2j4L2/WYL+4suWujpgf6GBYs3jv7TyUivdd05JA==", 2925 - "cpu": [ 2926 - "x64" 2927 - ], 2928 - "dev": true, 2929 - "license": "MIT", 2930 - "optional": true, 2931 - "os": [ 2932 - "win32" 2933 - ], 2934 - "engines": { 2935 - "node": ">=18" 2936 - } 2937 - }, 2938 - "node_modules/wrangler/node_modules/esbuild": { 2939 - "version": "0.25.2", 2940 - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.2.tgz", 2941 - "integrity": "sha512-16854zccKPnC+toMywC+uKNeYSv+/eXkevRAfwRD/G9Cleq66m8XFIrigkbvauLLlCfDL45Q2cWegSg53gGBnQ==", 2942 - "dev": true, 2943 - "hasInstallScript": true, 2944 - "license": "MIT", 2945 - "bin": { 2946 - "esbuild": "bin/esbuild" 2947 - }, 2948 - "engines": { 2949 - "node": ">=18" 2950 - }, 2951 - "optionalDependencies": { 2952 - "@esbuild/aix-ppc64": "0.25.2", 2953 - "@esbuild/android-arm": "0.25.2", 2954 - "@esbuild/android-arm64": "0.25.2", 2955 - "@esbuild/android-x64": "0.25.2", 2956 - "@esbuild/darwin-arm64": "0.25.2", 2957 - "@esbuild/darwin-x64": "0.25.2", 2958 - "@esbuild/freebsd-arm64": "0.25.2", 2959 - "@esbuild/freebsd-x64": "0.25.2", 2960 - "@esbuild/linux-arm": "0.25.2", 2961 - "@esbuild/linux-arm64": "0.25.2", 2962 - "@esbuild/linux-ia32": "0.25.2", 2963 - "@esbuild/linux-loong64": "0.25.2", 2964 - "@esbuild/linux-mips64el": "0.25.2", 2965 - "@esbuild/linux-ppc64": "0.25.2", 2966 - "@esbuild/linux-riscv64": "0.25.2", 2967 - "@esbuild/linux-s390x": "0.25.2", 2968 - "@esbuild/linux-x64": "0.25.2", 2969 - "@esbuild/netbsd-arm64": "0.25.2", 2970 - "@esbuild/netbsd-x64": "0.25.2", 2971 - "@esbuild/openbsd-arm64": "0.25.2", 2972 - "@esbuild/openbsd-x64": "0.25.2", 2973 - "@esbuild/sunos-x64": "0.25.2", 2974 - "@esbuild/win32-arm64": "0.25.2", 2975 - "@esbuild/win32-ia32": "0.25.2", 2976 - "@esbuild/win32-x64": "0.25.2" 2977 - } 2978 - }, 2979 - "node_modules/ws": { 2980 - "version": "8.18.0", 2981 - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", 2982 - "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", 2983 - "dev": true, 2984 - "license": "MIT", 2985 - "engines": { 2986 - "node": ">=10.0.0" 2987 - }, 2988 - "peerDependencies": { 2989 - "bufferutil": "^4.0.1", 2990 - "utf-8-validate": ">=5.0.2" 2991 - }, 2992 - "peerDependenciesMeta": { 2993 - "bufferutil": { 2994 - "optional": true 2995 - }, 2996 - "utf-8-validate": { 2997 - "optional": true 2998 - } 2999 - } 3000 - }, 3001 - "node_modules/youch": { 3002 - "version": "3.3.4", 3003 - "resolved": "https://registry.npmjs.org/youch/-/youch-3.3.4.tgz", 3004 - "integrity": "sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg==", 3005 - "dev": true, 3006 - "license": "MIT", 3007 - "dependencies": { 3008 - "cookie": "^0.7.1", 3009 - "mustache": "^4.2.0", 3010 - "stacktracey": "^2.1.8" 3011 - } 3012 - }, 3013 - "node_modules/zod": { 3014 - "version": "3.24.3", 3015 - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.3.tgz", 3016 - "integrity": "sha512-HhY1oqzWCQWuUqvBFnsyrtZRhyPeR7SUGv+C4+MsisMuVfSPx8HpwWqH8tRahSlt6M3PiFAcoeFhZAqIXTxoSg==", 3017 - "dev": true, 3018 - "license": "MIT", 3019 - "funding": { 3020 - "url": "https://github.com/sponsors/colinhacks" 3021 - } 3022 - } 3023 - } 2 + "name": "avatar", 3 + "version": "0.0.0", 4 + "lockfileVersion": 3, 5 + "requires": true, 6 + "packages": { 7 + "": { 8 + "name": "avatar", 9 + "version": "0.0.0", 10 + "dependencies": { 11 + "@atcute/identity-resolver": "^1.2.2" 12 + }, 13 + "devDependencies": { 14 + "@cloudflare/vitest-pool-workers": "^0.8.19", 15 + "vitest": "~3.0.7", 16 + "wrangler": "^4.14.1" 17 + } 18 + }, 19 + "node_modules/@atcute/identity": { 20 + "version": "1.1.3", 21 + "resolved": "https://registry.npmjs.org/@atcute/identity/-/identity-1.1.3.tgz", 22 + "integrity": "sha512-oIqPoI8TwWeQxvcLmFEZLdN2XdWcaLVtlm8pNk0E72As9HNzzD9pwKPrLr3rmTLRIoULPPFmq9iFNsTeCIU9ng==", 23 + "license": "0BSD", 24 + "peer": true, 25 + "dependencies": { 26 + "@atcute/lexicons": "^1.2.4", 27 + "@badrap/valita": "^0.4.6" 28 + } 29 + }, 30 + "node_modules/@atcute/identity-resolver": { 31 + "version": "1.2.2", 32 + "resolved": "https://registry.npmjs.org/@atcute/identity-resolver/-/identity-resolver-1.2.2.tgz", 33 + "integrity": "sha512-eUh/UH4bFvuXS0X7epYCeJC/kj4rbBXfSRumLEH4smMVwNOgTo7cL/0Srty+P/qVPoZEyXdfEbS0PHJyzoXmHw==", 34 + "license": "0BSD", 35 + "dependencies": { 36 + "@atcute/lexicons": "^1.2.6", 37 + "@atcute/util-fetch": "^1.0.5", 38 + "@badrap/valita": "^0.4.6" 39 + }, 40 + "peerDependencies": { 41 + "@atcute/identity": "^1.0.0" 42 + } 43 + }, 44 + "node_modules/@atcute/lexicons": { 45 + "version": "1.2.6", 46 + "resolved": "https://registry.npmjs.org/@atcute/lexicons/-/lexicons-1.2.6.tgz", 47 + "integrity": "sha512-s76UQd8D+XmHIzrjD9CJ9SOOeeLPHc+sMmcj7UFakAW/dDFXc579fcRdRfuUKvXBL5v1Gs2VgDdlh/IvvQZAwA==", 48 + "license": "0BSD", 49 + "dependencies": { 50 + "@atcute/uint8array": "^1.0.6", 51 + "@atcute/util-text": "^0.0.1", 52 + "@standard-schema/spec": "^1.1.0", 53 + "esm-env": "^1.2.2" 54 + } 55 + }, 56 + "node_modules/@atcute/uint8array": { 57 + "version": "1.0.6", 58 + "resolved": "https://registry.npmjs.org/@atcute/uint8array/-/uint8array-1.0.6.tgz", 59 + "integrity": "sha512-ucfRBQc7BFT8n9eCyGOzDHEMKF/nZwhS2pPao4Xtab1ML3HdFYcX2DM1tadCzas85QTGxHe5urnUAAcNKGRi9A==", 60 + "license": "0BSD" 61 + }, 62 + "node_modules/@atcute/util-fetch": { 63 + "version": "1.0.5", 64 + "resolved": "https://registry.npmjs.org/@atcute/util-fetch/-/util-fetch-1.0.5.tgz", 65 + "integrity": "sha512-qjHj01BGxjSjIFdPiAjSARnodJIIyKxnCMMEcXMESo9TAyND6XZQqrie5fia+LlYWVXdpsTds8uFQwc9jdKTig==", 66 + "license": "0BSD", 67 + "dependencies": { 68 + "@badrap/valita": "^0.4.6" 69 + } 70 + }, 71 + "node_modules/@atcute/util-text": { 72 + "version": "0.0.1", 73 + "resolved": "https://registry.npmjs.org/@atcute/util-text/-/util-text-0.0.1.tgz", 74 + "integrity": "sha512-t1KZqvn0AYy+h2KcJyHnKF9aEqfRfMUmyY8j1ELtAEIgqN9CxINAjxnoRCJIFUlvWzb+oY3uElQL/Vyk3yss0g==", 75 + "license": "0BSD", 76 + "dependencies": { 77 + "unicode-segmenter": "^0.14.4" 78 + } 79 + }, 80 + "node_modules/@badrap/valita": { 81 + "version": "0.4.6", 82 + "resolved": "https://registry.npmjs.org/@badrap/valita/-/valita-0.4.6.tgz", 83 + "integrity": "sha512-4kdqcjyxo/8RQ8ayjms47HCWZIF5981oE5nIenbfThKDxWXtEHKipAOWlflpPJzZx9y/JWYQkp18Awr7VuepFg==", 84 + "license": "MIT", 85 + "engines": { 86 + "node": ">= 18" 87 + } 88 + }, 89 + "node_modules/@cloudflare/kv-asset-handler": { 90 + "version": "0.4.0", 91 + "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.0.tgz", 92 + "integrity": "sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA==", 93 + "dev": true, 94 + "license": "MIT OR Apache-2.0", 95 + "dependencies": { 96 + "mime": "^3.0.0" 97 + }, 98 + "engines": { 99 + "node": ">=18.0.0" 100 + } 101 + }, 102 + "node_modules/@cloudflare/unenv-preset": { 103 + "version": "2.3.1", 104 + "resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.3.1.tgz", 105 + "integrity": "sha512-Xq57Qd+ADpt6hibcVBO0uLG9zzRgyRhfCUgBT9s+g3+3Ivg5zDyVgLFy40ES1VdNcu8rPNSivm9A+kGP5IVaPg==", 106 + "dev": true, 107 + "license": "MIT OR Apache-2.0", 108 + "peerDependencies": { 109 + "unenv": "2.0.0-rc.15", 110 + "workerd": "^1.20250320.0" 111 + }, 112 + "peerDependenciesMeta": { 113 + "workerd": { 114 + "optional": true 115 + } 116 + } 117 + }, 118 + "node_modules/@cloudflare/vitest-pool-workers": { 119 + "version": "0.8.24", 120 + "resolved": "https://registry.npmjs.org/@cloudflare/vitest-pool-workers/-/vitest-pool-workers-0.8.24.tgz", 121 + "integrity": "sha512-wT2PABJQ9YLYWrVu4CRZOjvmjHkdbMyLTZPU9n/7JEMM3pgG8dY41F1Rj31UsXRQaXX39A/CTPGlk58dcMUysA==", 122 + "dev": true, 123 + "license": "MIT", 124 + "dependencies": { 125 + "birpc": "0.2.14", 126 + "cjs-module-lexer": "^1.2.3", 127 + "devalue": "^4.3.0", 128 + "miniflare": "4.20250428.1", 129 + "semver": "^7.7.1", 130 + "wrangler": "4.14.1", 131 + "zod": "^3.22.3" 132 + }, 133 + "peerDependencies": { 134 + "@vitest/runner": "2.0.x - 3.1.x", 135 + "@vitest/snapshot": "2.0.x - 3.1.x", 136 + "vitest": "2.0.x - 3.1.x" 137 + } 138 + }, 139 + "node_modules/@cloudflare/workerd-darwin-64": { 140 + "version": "1.20250428.0", 141 + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20250428.0.tgz", 142 + "integrity": "sha512-6nVe9oV4Hdec6ctzMtW80TiDvNTd2oFPi3VsKqSDVaJSJbL+4b6seyJ7G/UEPI+si6JhHBSLV2/9lNXNGLjClA==", 143 + "cpu": [ 144 + "x64" 145 + ], 146 + "dev": true, 147 + "license": "Apache-2.0", 148 + "optional": true, 149 + "os": [ 150 + "darwin" 151 + ], 152 + "engines": { 153 + "node": ">=16" 154 + } 155 + }, 156 + "node_modules/@cloudflare/workerd-darwin-arm64": { 157 + "version": "1.20250428.0", 158 + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20250428.0.tgz", 159 + "integrity": "sha512-/TB7bh7SIJ5f+6r4PHsAz7+9Qal/TK1cJuKFkUno1kqGlZbdrMwH0ATYwlWC/nBFeu2FB3NUolsTntEuy23hnQ==", 160 + "cpu": [ 161 + "arm64" 162 + ], 163 + "dev": true, 164 + "license": "Apache-2.0", 165 + "optional": true, 166 + "os": [ 167 + "darwin" 168 + ], 169 + "engines": { 170 + "node": ">=16" 171 + } 172 + }, 173 + "node_modules/@cloudflare/workerd-linux-64": { 174 + "version": "1.20250428.0", 175 + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20250428.0.tgz", 176 + "integrity": "sha512-9eCbj+R3CKqpiXP6DfAA20DxKge+OTj7Hyw3ZewiEhWH9INIHiJwJQYybu4iq9kJEGjnGvxgguLFjSCWm26hgg==", 177 + "cpu": [ 178 + "x64" 179 + ], 180 + "dev": true, 181 + "license": "Apache-2.0", 182 + "optional": true, 183 + "os": [ 184 + "linux" 185 + ], 186 + "engines": { 187 + "node": ">=16" 188 + } 189 + }, 190 + "node_modules/@cloudflare/workerd-linux-arm64": { 191 + "version": "1.20250428.0", 192 + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20250428.0.tgz", 193 + "integrity": "sha512-D9NRBnW46nl1EQsP13qfkYb5lbt4C6nxl38SBKY/NOcZAUoHzNB5K0GaK8LxvpkM7X/97ySojlMfR5jh5DNXYQ==", 194 + "cpu": [ 195 + "arm64" 196 + ], 197 + "dev": true, 198 + "license": "Apache-2.0", 199 + "optional": true, 200 + "os": [ 201 + "linux" 202 + ], 203 + "engines": { 204 + "node": ">=16" 205 + } 206 + }, 207 + "node_modules/@cloudflare/workerd-windows-64": { 208 + "version": "1.20250428.0", 209 + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20250428.0.tgz", 210 + "integrity": "sha512-RQCRj28eitjKD0tmei6iFOuWqMuHMHdNGEigRmbkmuTlpbWHNAoHikgCzZQ/dkKDdatA76TmcpbyECNf31oaTA==", 211 + "cpu": [ 212 + "x64" 213 + ], 214 + "dev": true, 215 + "license": "Apache-2.0", 216 + "optional": true, 217 + "os": [ 218 + "win32" 219 + ], 220 + "engines": { 221 + "node": ">=16" 222 + } 223 + }, 224 + "node_modules/@cspotcode/source-map-support": { 225 + "version": "0.8.1", 226 + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", 227 + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", 228 + "dev": true, 229 + "license": "MIT", 230 + "dependencies": { 231 + "@jridgewell/trace-mapping": "0.3.9" 232 + }, 233 + "engines": { 234 + "node": ">=12" 235 + } 236 + }, 237 + "node_modules/@emnapi/runtime": { 238 + "version": "1.4.3", 239 + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.3.tgz", 240 + "integrity": "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ==", 241 + "dev": true, 242 + "license": "MIT", 243 + "optional": true, 244 + "dependencies": { 245 + "tslib": "^2.4.0" 246 + } 247 + }, 248 + "node_modules/@esbuild/aix-ppc64": { 249 + "version": "0.25.3", 250 + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.3.tgz", 251 + "integrity": "sha512-W8bFfPA8DowP8l//sxjJLSLkD8iEjMc7cBVyP+u4cEv9sM7mdUCkgsj+t0n/BWPFtv7WWCN5Yzj0N6FJNUUqBQ==", 252 + "cpu": [ 253 + "ppc64" 254 + ], 255 + "dev": true, 256 + "license": "MIT", 257 + "optional": true, 258 + "os": [ 259 + "aix" 260 + ], 261 + "engines": { 262 + "node": ">=18" 263 + } 264 + }, 265 + "node_modules/@esbuild/android-arm": { 266 + "version": "0.25.3", 267 + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.3.tgz", 268 + "integrity": "sha512-PuwVXbnP87Tcff5I9ngV0lmiSu40xw1At6i3GsU77U7cjDDB4s0X2cyFuBiDa1SBk9DnvWwnGvVaGBqoFWPb7A==", 269 + "cpu": [ 270 + "arm" 271 + ], 272 + "dev": true, 273 + "license": "MIT", 274 + "optional": true, 275 + "os": [ 276 + "android" 277 + ], 278 + "engines": { 279 + "node": ">=18" 280 + } 281 + }, 282 + "node_modules/@esbuild/android-arm64": { 283 + "version": "0.25.3", 284 + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.3.tgz", 285 + "integrity": "sha512-XelR6MzjlZuBM4f5z2IQHK6LkK34Cvv6Rj2EntER3lwCBFdg6h2lKbtRjpTTsdEjD/WSe1q8UyPBXP1x3i/wYQ==", 286 + "cpu": [ 287 + "arm64" 288 + ], 289 + "dev": true, 290 + "license": "MIT", 291 + "optional": true, 292 + "os": [ 293 + "android" 294 + ], 295 + "engines": { 296 + "node": ">=18" 297 + } 298 + }, 299 + "node_modules/@esbuild/android-x64": { 300 + "version": "0.25.3", 301 + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.3.tgz", 302 + "integrity": "sha512-ogtTpYHT/g1GWS/zKM0cc/tIebFjm1F9Aw1boQ2Y0eUQ+J89d0jFY//s9ei9jVIlkYi8AfOjiixcLJSGNSOAdQ==", 303 + "cpu": [ 304 + "x64" 305 + ], 306 + "dev": true, 307 + "license": "MIT", 308 + "optional": true, 309 + "os": [ 310 + "android" 311 + ], 312 + "engines": { 313 + "node": ">=18" 314 + } 315 + }, 316 + "node_modules/@esbuild/darwin-arm64": { 317 + "version": "0.25.3", 318 + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.3.tgz", 319 + "integrity": "sha512-eESK5yfPNTqpAmDfFWNsOhmIOaQA59tAcF/EfYvo5/QWQCzXn5iUSOnqt3ra3UdzBv073ykTtmeLJZGt3HhA+w==", 320 + "cpu": [ 321 + "arm64" 322 + ], 323 + "dev": true, 324 + "license": "MIT", 325 + "optional": true, 326 + "os": [ 327 + "darwin" 328 + ], 329 + "engines": { 330 + "node": ">=18" 331 + } 332 + }, 333 + "node_modules/@esbuild/darwin-x64": { 334 + "version": "0.25.3", 335 + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.3.tgz", 336 + "integrity": "sha512-Kd8glo7sIZtwOLcPbW0yLpKmBNWMANZhrC1r6K++uDR2zyzb6AeOYtI6udbtabmQpFaxJ8uduXMAo1gs5ozz8A==", 337 + "cpu": [ 338 + "x64" 339 + ], 340 + "dev": true, 341 + "license": "MIT", 342 + "optional": true, 343 + "os": [ 344 + "darwin" 345 + ], 346 + "engines": { 347 + "node": ">=18" 348 + } 349 + }, 350 + "node_modules/@esbuild/freebsd-arm64": { 351 + "version": "0.25.3", 352 + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.3.tgz", 353 + "integrity": "sha512-EJiyS70BYybOBpJth3M0KLOus0n+RRMKTYzhYhFeMwp7e/RaajXvP+BWlmEXNk6uk+KAu46j/kaQzr6au+JcIw==", 354 + "cpu": [ 355 + "arm64" 356 + ], 357 + "dev": true, 358 + "license": "MIT", 359 + "optional": true, 360 + "os": [ 361 + "freebsd" 362 + ], 363 + "engines": { 364 + "node": ">=18" 365 + } 366 + }, 367 + "node_modules/@esbuild/freebsd-x64": { 368 + "version": "0.25.3", 369 + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.3.tgz", 370 + "integrity": "sha512-Q+wSjaLpGxYf7zC0kL0nDlhsfuFkoN+EXrx2KSB33RhinWzejOd6AvgmP5JbkgXKmjhmpfgKZq24pneodYqE8Q==", 371 + "cpu": [ 372 + "x64" 373 + ], 374 + "dev": true, 375 + "license": "MIT", 376 + "optional": true, 377 + "os": [ 378 + "freebsd" 379 + ], 380 + "engines": { 381 + "node": ">=18" 382 + } 383 + }, 384 + "node_modules/@esbuild/linux-arm": { 385 + "version": "0.25.3", 386 + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.3.tgz", 387 + "integrity": "sha512-dUOVmAUzuHy2ZOKIHIKHCm58HKzFqd+puLaS424h6I85GlSDRZIA5ycBixb3mFgM0Jdh+ZOSB6KptX30DD8YOQ==", 388 + "cpu": [ 389 + "arm" 390 + ], 391 + "dev": true, 392 + "license": "MIT", 393 + "optional": true, 394 + "os": [ 395 + "linux" 396 + ], 397 + "engines": { 398 + "node": ">=18" 399 + } 400 + }, 401 + "node_modules/@esbuild/linux-arm64": { 402 + "version": "0.25.3", 403 + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.3.tgz", 404 + "integrity": "sha512-xCUgnNYhRD5bb1C1nqrDV1PfkwgbswTTBRbAd8aH5PhYzikdf/ddtsYyMXFfGSsb/6t6QaPSzxtbfAZr9uox4A==", 405 + "cpu": [ 406 + "arm64" 407 + ], 408 + "dev": true, 409 + "license": "MIT", 410 + "optional": true, 411 + "os": [ 412 + "linux" 413 + ], 414 + "engines": { 415 + "node": ">=18" 416 + } 417 + }, 418 + "node_modules/@esbuild/linux-ia32": { 419 + "version": "0.25.3", 420 + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.3.tgz", 421 + "integrity": "sha512-yplPOpczHOO4jTYKmuYuANI3WhvIPSVANGcNUeMlxH4twz/TeXuzEP41tGKNGWJjuMhotpGabeFYGAOU2ummBw==", 422 + "cpu": [ 423 + "ia32" 424 + ], 425 + "dev": true, 426 + "license": "MIT", 427 + "optional": true, 428 + "os": [ 429 + "linux" 430 + ], 431 + "engines": { 432 + "node": ">=18" 433 + } 434 + }, 435 + "node_modules/@esbuild/linux-loong64": { 436 + "version": "0.25.3", 437 + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.3.tgz", 438 + "integrity": "sha512-P4BLP5/fjyihmXCELRGrLd793q/lBtKMQl8ARGpDxgzgIKJDRJ/u4r1A/HgpBpKpKZelGct2PGI4T+axcedf6g==", 439 + "cpu": [ 440 + "loong64" 441 + ], 442 + "dev": true, 443 + "license": "MIT", 444 + "optional": true, 445 + "os": [ 446 + "linux" 447 + ], 448 + "engines": { 449 + "node": ">=18" 450 + } 451 + }, 452 + "node_modules/@esbuild/linux-mips64el": { 453 + "version": "0.25.3", 454 + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.3.tgz", 455 + "integrity": "sha512-eRAOV2ODpu6P5divMEMa26RRqb2yUoYsuQQOuFUexUoQndm4MdpXXDBbUoKIc0iPa4aCO7gIhtnYomkn2x+bag==", 456 + "cpu": [ 457 + "mips64el" 458 + ], 459 + "dev": true, 460 + "license": "MIT", 461 + "optional": true, 462 + "os": [ 463 + "linux" 464 + ], 465 + "engines": { 466 + "node": ">=18" 467 + } 468 + }, 469 + "node_modules/@esbuild/linux-ppc64": { 470 + "version": "0.25.3", 471 + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.3.tgz", 472 + "integrity": "sha512-ZC4jV2p7VbzTlnl8nZKLcBkfzIf4Yad1SJM4ZMKYnJqZFD4rTI+pBG65u8ev4jk3/MPwY9DvGn50wi3uhdaghg==", 473 + "cpu": [ 474 + "ppc64" 475 + ], 476 + "dev": true, 477 + "license": "MIT", 478 + "optional": true, 479 + "os": [ 480 + "linux" 481 + ], 482 + "engines": { 483 + "node": ">=18" 484 + } 485 + }, 486 + "node_modules/@esbuild/linux-riscv64": { 487 + "version": "0.25.3", 488 + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.3.tgz", 489 + "integrity": "sha512-LDDODcFzNtECTrUUbVCs6j9/bDVqy7DDRsuIXJg6so+mFksgwG7ZVnTruYi5V+z3eE5y+BJZw7VvUadkbfg7QA==", 490 + "cpu": [ 491 + "riscv64" 492 + ], 493 + "dev": true, 494 + "license": "MIT", 495 + "optional": true, 496 + "os": [ 497 + "linux" 498 + ], 499 + "engines": { 500 + "node": ">=18" 501 + } 502 + }, 503 + "node_modules/@esbuild/linux-s390x": { 504 + "version": "0.25.3", 505 + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.3.tgz", 506 + "integrity": "sha512-s+w/NOY2k0yC2p9SLen+ymflgcpRkvwwa02fqmAwhBRI3SC12uiS10edHHXlVWwfAagYSY5UpmT/zISXPMW3tQ==", 507 + "cpu": [ 508 + "s390x" 509 + ], 510 + "dev": true, 511 + "license": "MIT", 512 + "optional": true, 513 + "os": [ 514 + "linux" 515 + ], 516 + "engines": { 517 + "node": ">=18" 518 + } 519 + }, 520 + "node_modules/@esbuild/linux-x64": { 521 + "version": "0.25.3", 522 + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.3.tgz", 523 + "integrity": "sha512-nQHDz4pXjSDC6UfOE1Fw9Q8d6GCAd9KdvMZpfVGWSJztYCarRgSDfOVBY5xwhQXseiyxapkiSJi/5/ja8mRFFA==", 524 + "cpu": [ 525 + "x64" 526 + ], 527 + "dev": true, 528 + "license": "MIT", 529 + "optional": true, 530 + "os": [ 531 + "linux" 532 + ], 533 + "engines": { 534 + "node": ">=18" 535 + } 536 + }, 537 + "node_modules/@esbuild/netbsd-arm64": { 538 + "version": "0.25.3", 539 + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.3.tgz", 540 + "integrity": "sha512-1QaLtOWq0mzK6tzzp0jRN3eccmN3hezey7mhLnzC6oNlJoUJz4nym5ZD7mDnS/LZQgkrhEbEiTn515lPeLpgWA==", 541 + "cpu": [ 542 + "arm64" 543 + ], 544 + "dev": true, 545 + "license": "MIT", 546 + "optional": true, 547 + "os": [ 548 + "netbsd" 549 + ], 550 + "engines": { 551 + "node": ">=18" 552 + } 553 + }, 554 + "node_modules/@esbuild/netbsd-x64": { 555 + "version": "0.25.3", 556 + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.3.tgz", 557 + "integrity": "sha512-i5Hm68HXHdgv8wkrt+10Bc50zM0/eonPb/a/OFVfB6Qvpiirco5gBA5bz7S2SHuU+Y4LWn/zehzNX14Sp4r27g==", 558 + "cpu": [ 559 + "x64" 560 + ], 561 + "dev": true, 562 + "license": "MIT", 563 + "optional": true, 564 + "os": [ 565 + "netbsd" 566 + ], 567 + "engines": { 568 + "node": ">=18" 569 + } 570 + }, 571 + "node_modules/@esbuild/openbsd-arm64": { 572 + "version": "0.25.3", 573 + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.3.tgz", 574 + "integrity": "sha512-zGAVApJEYTbOC6H/3QBr2mq3upG/LBEXr85/pTtKiv2IXcgKV0RT0QA/hSXZqSvLEpXeIxah7LczB4lkiYhTAQ==", 575 + "cpu": [ 576 + "arm64" 577 + ], 578 + "dev": true, 579 + "license": "MIT", 580 + "optional": true, 581 + "os": [ 582 + "openbsd" 583 + ], 584 + "engines": { 585 + "node": ">=18" 586 + } 587 + }, 588 + "node_modules/@esbuild/openbsd-x64": { 589 + "version": "0.25.3", 590 + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.3.tgz", 591 + "integrity": "sha512-fpqctI45NnCIDKBH5AXQBsD0NDPbEFczK98hk/aa6HJxbl+UtLkJV2+Bvy5hLSLk3LHmqt0NTkKNso1A9y1a4w==", 592 + "cpu": [ 593 + "x64" 594 + ], 595 + "dev": true, 596 + "license": "MIT", 597 + "optional": true, 598 + "os": [ 599 + "openbsd" 600 + ], 601 + "engines": { 602 + "node": ">=18" 603 + } 604 + }, 605 + "node_modules/@esbuild/sunos-x64": { 606 + "version": "0.25.3", 607 + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.3.tgz", 608 + "integrity": "sha512-ROJhm7d8bk9dMCUZjkS8fgzsPAZEjtRJqCAmVgB0gMrvG7hfmPmz9k1rwO4jSiblFjYmNvbECL9uhaPzONMfgA==", 609 + "cpu": [ 610 + "x64" 611 + ], 612 + "dev": true, 613 + "license": "MIT", 614 + "optional": true, 615 + "os": [ 616 + "sunos" 617 + ], 618 + "engines": { 619 + "node": ">=18" 620 + } 621 + }, 622 + "node_modules/@esbuild/win32-arm64": { 623 + "version": "0.25.3", 624 + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.3.tgz", 625 + "integrity": "sha512-YWcow8peiHpNBiIXHwaswPnAXLsLVygFwCB3A7Bh5jRkIBFWHGmNQ48AlX4xDvQNoMZlPYzjVOQDYEzWCqufMQ==", 626 + "cpu": [ 627 + "arm64" 628 + ], 629 + "dev": true, 630 + "license": "MIT", 631 + "optional": true, 632 + "os": [ 633 + "win32" 634 + ], 635 + "engines": { 636 + "node": ">=18" 637 + } 638 + }, 639 + "node_modules/@esbuild/win32-ia32": { 640 + "version": "0.25.3", 641 + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.3.tgz", 642 + "integrity": "sha512-qspTZOIGoXVS4DpNqUYUs9UxVb04khS1Degaw/MnfMe7goQ3lTfQ13Vw4qY/Nj0979BGvMRpAYbs/BAxEvU8ew==", 643 + "cpu": [ 644 + "ia32" 645 + ], 646 + "dev": true, 647 + "license": "MIT", 648 + "optional": true, 649 + "os": [ 650 + "win32" 651 + ], 652 + "engines": { 653 + "node": ">=18" 654 + } 655 + }, 656 + "node_modules/@esbuild/win32-x64": { 657 + "version": "0.25.3", 658 + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.3.tgz", 659 + "integrity": "sha512-ICgUR+kPimx0vvRzf+N/7L7tVSQeE3BYY+NhHRHXS1kBuPO7z2+7ea2HbhDyZdTephgvNvKrlDDKUexuCVBVvg==", 660 + "cpu": [ 661 + "x64" 662 + ], 663 + "dev": true, 664 + "license": "MIT", 665 + "optional": true, 666 + "os": [ 667 + "win32" 668 + ], 669 + "engines": { 670 + "node": ">=18" 671 + } 672 + }, 673 + "node_modules/@fastify/busboy": { 674 + "version": "2.1.1", 675 + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", 676 + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", 677 + "dev": true, 678 + "license": "MIT", 679 + "engines": { 680 + "node": ">=14" 681 + } 682 + }, 683 + "node_modules/@img/sharp-darwin-arm64": { 684 + "version": "0.33.5", 685 + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", 686 + "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", 687 + "cpu": [ 688 + "arm64" 689 + ], 690 + "dev": true, 691 + "license": "Apache-2.0", 692 + "optional": true, 693 + "os": [ 694 + "darwin" 695 + ], 696 + "engines": { 697 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 698 + }, 699 + "funding": { 700 + "url": "https://opencollective.com/libvips" 701 + }, 702 + "optionalDependencies": { 703 + "@img/sharp-libvips-darwin-arm64": "1.0.4" 704 + } 705 + }, 706 + "node_modules/@img/sharp-darwin-x64": { 707 + "version": "0.33.5", 708 + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", 709 + "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", 710 + "cpu": [ 711 + "x64" 712 + ], 713 + "dev": true, 714 + "license": "Apache-2.0", 715 + "optional": true, 716 + "os": [ 717 + "darwin" 718 + ], 719 + "engines": { 720 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 721 + }, 722 + "funding": { 723 + "url": "https://opencollective.com/libvips" 724 + }, 725 + "optionalDependencies": { 726 + "@img/sharp-libvips-darwin-x64": "1.0.4" 727 + } 728 + }, 729 + "node_modules/@img/sharp-libvips-darwin-arm64": { 730 + "version": "1.0.4", 731 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", 732 + "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", 733 + "cpu": [ 734 + "arm64" 735 + ], 736 + "dev": true, 737 + "license": "LGPL-3.0-or-later", 738 + "optional": true, 739 + "os": [ 740 + "darwin" 741 + ], 742 + "funding": { 743 + "url": "https://opencollective.com/libvips" 744 + } 745 + }, 746 + "node_modules/@img/sharp-libvips-darwin-x64": { 747 + "version": "1.0.4", 748 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", 749 + "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", 750 + "cpu": [ 751 + "x64" 752 + ], 753 + "dev": true, 754 + "license": "LGPL-3.0-or-later", 755 + "optional": true, 756 + "os": [ 757 + "darwin" 758 + ], 759 + "funding": { 760 + "url": "https://opencollective.com/libvips" 761 + } 762 + }, 763 + "node_modules/@img/sharp-libvips-linux-arm": { 764 + "version": "1.0.5", 765 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", 766 + "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", 767 + "cpu": [ 768 + "arm" 769 + ], 770 + "dev": true, 771 + "license": "LGPL-3.0-or-later", 772 + "optional": true, 773 + "os": [ 774 + "linux" 775 + ], 776 + "funding": { 777 + "url": "https://opencollective.com/libvips" 778 + } 779 + }, 780 + "node_modules/@img/sharp-libvips-linux-arm64": { 781 + "version": "1.0.4", 782 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", 783 + "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", 784 + "cpu": [ 785 + "arm64" 786 + ], 787 + "dev": true, 788 + "license": "LGPL-3.0-or-later", 789 + "optional": true, 790 + "os": [ 791 + "linux" 792 + ], 793 + "funding": { 794 + "url": "https://opencollective.com/libvips" 795 + } 796 + }, 797 + "node_modules/@img/sharp-libvips-linux-s390x": { 798 + "version": "1.0.4", 799 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", 800 + "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", 801 + "cpu": [ 802 + "s390x" 803 + ], 804 + "dev": true, 805 + "license": "LGPL-3.0-or-later", 806 + "optional": true, 807 + "os": [ 808 + "linux" 809 + ], 810 + "funding": { 811 + "url": "https://opencollective.com/libvips" 812 + } 813 + }, 814 + "node_modules/@img/sharp-libvips-linux-x64": { 815 + "version": "1.0.4", 816 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", 817 + "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", 818 + "cpu": [ 819 + "x64" 820 + ], 821 + "dev": true, 822 + "license": "LGPL-3.0-or-later", 823 + "optional": true, 824 + "os": [ 825 + "linux" 826 + ], 827 + "funding": { 828 + "url": "https://opencollective.com/libvips" 829 + } 830 + }, 831 + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { 832 + "version": "1.0.4", 833 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", 834 + "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", 835 + "cpu": [ 836 + "arm64" 837 + ], 838 + "dev": true, 839 + "license": "LGPL-3.0-or-later", 840 + "optional": true, 841 + "os": [ 842 + "linux" 843 + ], 844 + "funding": { 845 + "url": "https://opencollective.com/libvips" 846 + } 847 + }, 848 + "node_modules/@img/sharp-libvips-linuxmusl-x64": { 849 + "version": "1.0.4", 850 + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", 851 + "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", 852 + "cpu": [ 853 + "x64" 854 + ], 855 + "dev": true, 856 + "license": "LGPL-3.0-or-later", 857 + "optional": true, 858 + "os": [ 859 + "linux" 860 + ], 861 + "funding": { 862 + "url": "https://opencollective.com/libvips" 863 + } 864 + }, 865 + "node_modules/@img/sharp-linux-arm": { 866 + "version": "0.33.5", 867 + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", 868 + "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", 869 + "cpu": [ 870 + "arm" 871 + ], 872 + "dev": true, 873 + "license": "Apache-2.0", 874 + "optional": true, 875 + "os": [ 876 + "linux" 877 + ], 878 + "engines": { 879 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 880 + }, 881 + "funding": { 882 + "url": "https://opencollective.com/libvips" 883 + }, 884 + "optionalDependencies": { 885 + "@img/sharp-libvips-linux-arm": "1.0.5" 886 + } 887 + }, 888 + "node_modules/@img/sharp-linux-arm64": { 889 + "version": "0.33.5", 890 + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", 891 + "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", 892 + "cpu": [ 893 + "arm64" 894 + ], 895 + "dev": true, 896 + "license": "Apache-2.0", 897 + "optional": true, 898 + "os": [ 899 + "linux" 900 + ], 901 + "engines": { 902 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 903 + }, 904 + "funding": { 905 + "url": "https://opencollective.com/libvips" 906 + }, 907 + "optionalDependencies": { 908 + "@img/sharp-libvips-linux-arm64": "1.0.4" 909 + } 910 + }, 911 + "node_modules/@img/sharp-linux-s390x": { 912 + "version": "0.33.5", 913 + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", 914 + "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", 915 + "cpu": [ 916 + "s390x" 917 + ], 918 + "dev": true, 919 + "license": "Apache-2.0", 920 + "optional": true, 921 + "os": [ 922 + "linux" 923 + ], 924 + "engines": { 925 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 926 + }, 927 + "funding": { 928 + "url": "https://opencollective.com/libvips" 929 + }, 930 + "optionalDependencies": { 931 + "@img/sharp-libvips-linux-s390x": "1.0.4" 932 + } 933 + }, 934 + "node_modules/@img/sharp-linux-x64": { 935 + "version": "0.33.5", 936 + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", 937 + "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", 938 + "cpu": [ 939 + "x64" 940 + ], 941 + "dev": true, 942 + "license": "Apache-2.0", 943 + "optional": true, 944 + "os": [ 945 + "linux" 946 + ], 947 + "engines": { 948 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 949 + }, 950 + "funding": { 951 + "url": "https://opencollective.com/libvips" 952 + }, 953 + "optionalDependencies": { 954 + "@img/sharp-libvips-linux-x64": "1.0.4" 955 + } 956 + }, 957 + "node_modules/@img/sharp-linuxmusl-arm64": { 958 + "version": "0.33.5", 959 + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", 960 + "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", 961 + "cpu": [ 962 + "arm64" 963 + ], 964 + "dev": true, 965 + "license": "Apache-2.0", 966 + "optional": true, 967 + "os": [ 968 + "linux" 969 + ], 970 + "engines": { 971 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 972 + }, 973 + "funding": { 974 + "url": "https://opencollective.com/libvips" 975 + }, 976 + "optionalDependencies": { 977 + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" 978 + } 979 + }, 980 + "node_modules/@img/sharp-linuxmusl-x64": { 981 + "version": "0.33.5", 982 + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", 983 + "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", 984 + "cpu": [ 985 + "x64" 986 + ], 987 + "dev": true, 988 + "license": "Apache-2.0", 989 + "optional": true, 990 + "os": [ 991 + "linux" 992 + ], 993 + "engines": { 994 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 995 + }, 996 + "funding": { 997 + "url": "https://opencollective.com/libvips" 998 + }, 999 + "optionalDependencies": { 1000 + "@img/sharp-libvips-linuxmusl-x64": "1.0.4" 1001 + } 1002 + }, 1003 + "node_modules/@img/sharp-wasm32": { 1004 + "version": "0.33.5", 1005 + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", 1006 + "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", 1007 + "cpu": [ 1008 + "wasm32" 1009 + ], 1010 + "dev": true, 1011 + "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", 1012 + "optional": true, 1013 + "dependencies": { 1014 + "@emnapi/runtime": "^1.2.0" 1015 + }, 1016 + "engines": { 1017 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 1018 + }, 1019 + "funding": { 1020 + "url": "https://opencollective.com/libvips" 1021 + } 1022 + }, 1023 + "node_modules/@img/sharp-win32-ia32": { 1024 + "version": "0.33.5", 1025 + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", 1026 + "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", 1027 + "cpu": [ 1028 + "ia32" 1029 + ], 1030 + "dev": true, 1031 + "license": "Apache-2.0 AND LGPL-3.0-or-later", 1032 + "optional": true, 1033 + "os": [ 1034 + "win32" 1035 + ], 1036 + "engines": { 1037 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 1038 + }, 1039 + "funding": { 1040 + "url": "https://opencollective.com/libvips" 1041 + } 1042 + }, 1043 + "node_modules/@img/sharp-win32-x64": { 1044 + "version": "0.33.5", 1045 + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", 1046 + "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", 1047 + "cpu": [ 1048 + "x64" 1049 + ], 1050 + "dev": true, 1051 + "license": "Apache-2.0 AND LGPL-3.0-or-later", 1052 + "optional": true, 1053 + "os": [ 1054 + "win32" 1055 + ], 1056 + "engines": { 1057 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 1058 + }, 1059 + "funding": { 1060 + "url": "https://opencollective.com/libvips" 1061 + } 1062 + }, 1063 + "node_modules/@jridgewell/resolve-uri": { 1064 + "version": "3.1.2", 1065 + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", 1066 + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", 1067 + "dev": true, 1068 + "license": "MIT", 1069 + "engines": { 1070 + "node": ">=6.0.0" 1071 + } 1072 + }, 1073 + "node_modules/@jridgewell/sourcemap-codec": { 1074 + "version": "1.5.0", 1075 + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", 1076 + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", 1077 + "dev": true, 1078 + "license": "MIT" 1079 + }, 1080 + "node_modules/@jridgewell/trace-mapping": { 1081 + "version": "0.3.9", 1082 + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", 1083 + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", 1084 + "dev": true, 1085 + "license": "MIT", 1086 + "dependencies": { 1087 + "@jridgewell/resolve-uri": "^3.0.3", 1088 + "@jridgewell/sourcemap-codec": "^1.4.10" 1089 + } 1090 + }, 1091 + "node_modules/@rollup/rollup-android-arm-eabi": { 1092 + "version": "4.40.1", 1093 + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.40.1.tgz", 1094 + "integrity": "sha512-kxz0YeeCrRUHz3zyqvd7n+TVRlNyTifBsmnmNPtk3hQURUyG9eAB+usz6DAwagMusjx/zb3AjvDUvhFGDAexGw==", 1095 + "cpu": [ 1096 + "arm" 1097 + ], 1098 + "dev": true, 1099 + "license": "MIT", 1100 + "optional": true, 1101 + "os": [ 1102 + "android" 1103 + ] 1104 + }, 1105 + "node_modules/@rollup/rollup-android-arm64": { 1106 + "version": "4.40.1", 1107 + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.40.1.tgz", 1108 + "integrity": "sha512-PPkxTOisoNC6TpnDKatjKkjRMsdaWIhyuMkA4UsBXT9WEZY4uHezBTjs6Vl4PbqQQeu6oION1w2voYZv9yquCw==", 1109 + "cpu": [ 1110 + "arm64" 1111 + ], 1112 + "dev": true, 1113 + "license": "MIT", 1114 + "optional": true, 1115 + "os": [ 1116 + "android" 1117 + ] 1118 + }, 1119 + "node_modules/@rollup/rollup-darwin-arm64": { 1120 + "version": "4.40.1", 1121 + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.40.1.tgz", 1122 + "integrity": "sha512-VWXGISWFY18v/0JyNUy4A46KCFCb9NVsH+1100XP31lud+TzlezBbz24CYzbnA4x6w4hx+NYCXDfnvDVO6lcAA==", 1123 + "cpu": [ 1124 + "arm64" 1125 + ], 1126 + "dev": true, 1127 + "license": "MIT", 1128 + "optional": true, 1129 + "os": [ 1130 + "darwin" 1131 + ] 1132 + }, 1133 + "node_modules/@rollup/rollup-darwin-x64": { 1134 + "version": "4.40.1", 1135 + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.40.1.tgz", 1136 + "integrity": "sha512-nIwkXafAI1/QCS7pxSpv/ZtFW6TXcNUEHAIA9EIyw5OzxJZQ1YDrX+CL6JAIQgZ33CInl1R6mHet9Y/UZTg2Bw==", 1137 + "cpu": [ 1138 + "x64" 1139 + ], 1140 + "dev": true, 1141 + "license": "MIT", 1142 + "optional": true, 1143 + "os": [ 1144 + "darwin" 1145 + ] 1146 + }, 1147 + "node_modules/@rollup/rollup-freebsd-arm64": { 1148 + "version": "4.40.1", 1149 + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.40.1.tgz", 1150 + "integrity": "sha512-BdrLJ2mHTrIYdaS2I99mriyJfGGenSaP+UwGi1kB9BLOCu9SR8ZpbkmmalKIALnRw24kM7qCN0IOm6L0S44iWw==", 1151 + "cpu": [ 1152 + "arm64" 1153 + ], 1154 + "dev": true, 1155 + "license": "MIT", 1156 + "optional": true, 1157 + "os": [ 1158 + "freebsd" 1159 + ] 1160 + }, 1161 + "node_modules/@rollup/rollup-freebsd-x64": { 1162 + "version": "4.40.1", 1163 + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.40.1.tgz", 1164 + "integrity": "sha512-VXeo/puqvCG8JBPNZXZf5Dqq7BzElNJzHRRw3vjBE27WujdzuOPecDPc/+1DcdcTptNBep3861jNq0mYkT8Z6Q==", 1165 + "cpu": [ 1166 + "x64" 1167 + ], 1168 + "dev": true, 1169 + "license": "MIT", 1170 + "optional": true, 1171 + "os": [ 1172 + "freebsd" 1173 + ] 1174 + }, 1175 + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { 1176 + "version": "4.40.1", 1177 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.40.1.tgz", 1178 + "integrity": "sha512-ehSKrewwsESPt1TgSE/na9nIhWCosfGSFqv7vwEtjyAqZcvbGIg4JAcV7ZEh2tfj/IlfBeZjgOXm35iOOjadcg==", 1179 + "cpu": [ 1180 + "arm" 1181 + ], 1182 + "dev": true, 1183 + "license": "MIT", 1184 + "optional": true, 1185 + "os": [ 1186 + "linux" 1187 + ] 1188 + }, 1189 + "node_modules/@rollup/rollup-linux-arm-musleabihf": { 1190 + "version": "4.40.1", 1191 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.40.1.tgz", 1192 + "integrity": "sha512-m39iO/aaurh5FVIu/F4/Zsl8xppd76S4qoID8E+dSRQvTyZTOI2gVk3T4oqzfq1PtcvOfAVlwLMK3KRQMaR8lg==", 1193 + "cpu": [ 1194 + "arm" 1195 + ], 1196 + "dev": true, 1197 + "license": "MIT", 1198 + "optional": true, 1199 + "os": [ 1200 + "linux" 1201 + ] 1202 + }, 1203 + "node_modules/@rollup/rollup-linux-arm64-gnu": { 1204 + "version": "4.40.1", 1205 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.40.1.tgz", 1206 + "integrity": "sha512-Y+GHnGaku4aVLSgrT0uWe2o2Rq8te9hi+MwqGF9r9ORgXhmHK5Q71N757u0F8yU1OIwUIFy6YiJtKjtyktk5hg==", 1207 + "cpu": [ 1208 + "arm64" 1209 + ], 1210 + "dev": true, 1211 + "license": "MIT", 1212 + "optional": true, 1213 + "os": [ 1214 + "linux" 1215 + ] 1216 + }, 1217 + "node_modules/@rollup/rollup-linux-arm64-musl": { 1218 + "version": "4.40.1", 1219 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.40.1.tgz", 1220 + "integrity": "sha512-jEwjn3jCA+tQGswK3aEWcD09/7M5wGwc6+flhva7dsQNRZZTe30vkalgIzV4tjkopsTS9Jd7Y1Bsj6a4lzz8gQ==", 1221 + "cpu": [ 1222 + "arm64" 1223 + ], 1224 + "dev": true, 1225 + "license": "MIT", 1226 + "optional": true, 1227 + "os": [ 1228 + "linux" 1229 + ] 1230 + }, 1231 + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { 1232 + "version": "4.40.1", 1233 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.40.1.tgz", 1234 + "integrity": "sha512-ySyWikVhNzv+BV/IDCsrraOAZ3UaC8SZB67FZlqVwXwnFhPihOso9rPOxzZbjp81suB1O2Topw+6Ug3JNegejQ==", 1235 + "cpu": [ 1236 + "loong64" 1237 + ], 1238 + "dev": true, 1239 + "license": "MIT", 1240 + "optional": true, 1241 + "os": [ 1242 + "linux" 1243 + ] 1244 + }, 1245 + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { 1246 + "version": "4.40.1", 1247 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.40.1.tgz", 1248 + "integrity": "sha512-BvvA64QxZlh7WZWqDPPdt0GH4bznuL6uOO1pmgPnnv86rpUpc8ZxgZwcEgXvo02GRIZX1hQ0j0pAnhwkhwPqWg==", 1249 + "cpu": [ 1250 + "ppc64" 1251 + ], 1252 + "dev": true, 1253 + "license": "MIT", 1254 + "optional": true, 1255 + "os": [ 1256 + "linux" 1257 + ] 1258 + }, 1259 + "node_modules/@rollup/rollup-linux-riscv64-gnu": { 1260 + "version": "4.40.1", 1261 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.40.1.tgz", 1262 + "integrity": "sha512-EQSP+8+1VuSulm9RKSMKitTav89fKbHymTf25n5+Yr6gAPZxYWpj3DzAsQqoaHAk9YX2lwEyAf9S4W8F4l3VBQ==", 1263 + "cpu": [ 1264 + "riscv64" 1265 + ], 1266 + "dev": true, 1267 + "license": "MIT", 1268 + "optional": true, 1269 + "os": [ 1270 + "linux" 1271 + ] 1272 + }, 1273 + "node_modules/@rollup/rollup-linux-riscv64-musl": { 1274 + "version": "4.40.1", 1275 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.40.1.tgz", 1276 + "integrity": "sha512-n/vQ4xRZXKuIpqukkMXZt9RWdl+2zgGNx7Uda8NtmLJ06NL8jiHxUawbwC+hdSq1rrw/9CghCpEONor+l1e2gA==", 1277 + "cpu": [ 1278 + "riscv64" 1279 + ], 1280 + "dev": true, 1281 + "license": "MIT", 1282 + "optional": true, 1283 + "os": [ 1284 + "linux" 1285 + ] 1286 + }, 1287 + "node_modules/@rollup/rollup-linux-s390x-gnu": { 1288 + "version": "4.40.1", 1289 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.40.1.tgz", 1290 + "integrity": "sha512-h8d28xzYb98fMQKUz0w2fMc1XuGzLLjdyxVIbhbil4ELfk5/orZlSTpF/xdI9C8K0I8lCkq+1En2RJsawZekkg==", 1291 + "cpu": [ 1292 + "s390x" 1293 + ], 1294 + "dev": true, 1295 + "license": "MIT", 1296 + "optional": true, 1297 + "os": [ 1298 + "linux" 1299 + ] 1300 + }, 1301 + "node_modules/@rollup/rollup-linux-x64-gnu": { 1302 + "version": "4.40.1", 1303 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.40.1.tgz", 1304 + "integrity": "sha512-XiK5z70PEFEFqcNj3/zRSz/qX4bp4QIraTy9QjwJAb/Z8GM7kVUsD0Uk8maIPeTyPCP03ChdI+VVmJriKYbRHQ==", 1305 + "cpu": [ 1306 + "x64" 1307 + ], 1308 + "dev": true, 1309 + "license": "MIT", 1310 + "optional": true, 1311 + "os": [ 1312 + "linux" 1313 + ] 1314 + }, 1315 + "node_modules/@rollup/rollup-linux-x64-musl": { 1316 + "version": "4.40.1", 1317 + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.40.1.tgz", 1318 + "integrity": "sha512-2BRORitq5rQ4Da9blVovzNCMaUlyKrzMSvkVR0D4qPuOy/+pMCrh1d7o01RATwVy+6Fa1WBw+da7QPeLWU/1mQ==", 1319 + "cpu": [ 1320 + "x64" 1321 + ], 1322 + "dev": true, 1323 + "license": "MIT", 1324 + "optional": true, 1325 + "os": [ 1326 + "linux" 1327 + ] 1328 + }, 1329 + "node_modules/@rollup/rollup-win32-arm64-msvc": { 1330 + "version": "4.40.1", 1331 + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.40.1.tgz", 1332 + "integrity": "sha512-b2bcNm9Kbde03H+q+Jjw9tSfhYkzrDUf2d5MAd1bOJuVplXvFhWz7tRtWvD8/ORZi7qSCy0idW6tf2HgxSXQSg==", 1333 + "cpu": [ 1334 + "arm64" 1335 + ], 1336 + "dev": true, 1337 + "license": "MIT", 1338 + "optional": true, 1339 + "os": [ 1340 + "win32" 1341 + ] 1342 + }, 1343 + "node_modules/@rollup/rollup-win32-ia32-msvc": { 1344 + "version": "4.40.1", 1345 + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.40.1.tgz", 1346 + "integrity": "sha512-DfcogW8N7Zg7llVEfpqWMZcaErKfsj9VvmfSyRjCyo4BI3wPEfrzTtJkZG6gKP/Z92wFm6rz2aDO7/JfiR/whA==", 1347 + "cpu": [ 1348 + "ia32" 1349 + ], 1350 + "dev": true, 1351 + "license": "MIT", 1352 + "optional": true, 1353 + "os": [ 1354 + "win32" 1355 + ] 1356 + }, 1357 + "node_modules/@rollup/rollup-win32-x64-msvc": { 1358 + "version": "4.40.1", 1359 + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.40.1.tgz", 1360 + "integrity": "sha512-ECyOuDeH3C1I8jH2MK1RtBJW+YPMvSfT0a5NN0nHfQYnDSJ6tUiZH3gzwVP5/Kfh/+Tt7tpWVF9LXNTnhTJ3kA==", 1361 + "cpu": [ 1362 + "x64" 1363 + ], 1364 + "dev": true, 1365 + "license": "MIT", 1366 + "optional": true, 1367 + "os": [ 1368 + "win32" 1369 + ] 1370 + }, 1371 + "node_modules/@standard-schema/spec": { 1372 + "version": "1.1.0", 1373 + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", 1374 + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", 1375 + "license": "MIT" 1376 + }, 1377 + "node_modules/@types/estree": { 1378 + "version": "1.0.7", 1379 + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", 1380 + "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", 1381 + "dev": true, 1382 + "license": "MIT" 1383 + }, 1384 + "node_modules/@vitest/expect": { 1385 + "version": "3.0.9", 1386 + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.9.tgz", 1387 + "integrity": "sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==", 1388 + "dev": true, 1389 + "license": "MIT", 1390 + "dependencies": { 1391 + "@vitest/spy": "3.0.9", 1392 + "@vitest/utils": "3.0.9", 1393 + "chai": "^5.2.0", 1394 + "tinyrainbow": "^2.0.0" 1395 + }, 1396 + "funding": { 1397 + "url": "https://opencollective.com/vitest" 1398 + } 1399 + }, 1400 + "node_modules/@vitest/mocker": { 1401 + "version": "3.0.9", 1402 + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.9.tgz", 1403 + "integrity": "sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==", 1404 + "dev": true, 1405 + "license": "MIT", 1406 + "dependencies": { 1407 + "@vitest/spy": "3.0.9", 1408 + "estree-walker": "^3.0.3", 1409 + "magic-string": "^0.30.17" 1410 + }, 1411 + "funding": { 1412 + "url": "https://opencollective.com/vitest" 1413 + }, 1414 + "peerDependencies": { 1415 + "msw": "^2.4.9", 1416 + "vite": "^5.0.0 || ^6.0.0" 1417 + }, 1418 + "peerDependenciesMeta": { 1419 + "msw": { 1420 + "optional": true 1421 + }, 1422 + "vite": { 1423 + "optional": true 1424 + } 1425 + } 1426 + }, 1427 + "node_modules/@vitest/pretty-format": { 1428 + "version": "3.1.2", 1429 + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.1.2.tgz", 1430 + "integrity": "sha512-R0xAiHuWeDjTSB3kQ3OQpT8Rx3yhdOAIm/JM4axXxnG7Q/fS8XUwggv/A4xzbQA+drYRjzkMnpYnOGAc4oeq8w==", 1431 + "dev": true, 1432 + "license": "MIT", 1433 + "dependencies": { 1434 + "tinyrainbow": "^2.0.0" 1435 + }, 1436 + "funding": { 1437 + "url": "https://opencollective.com/vitest" 1438 + } 1439 + }, 1440 + "node_modules/@vitest/runner": { 1441 + "version": "3.0.9", 1442 + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.9.tgz", 1443 + "integrity": "sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==", 1444 + "dev": true, 1445 + "license": "MIT", 1446 + "dependencies": { 1447 + "@vitest/utils": "3.0.9", 1448 + "pathe": "^2.0.3" 1449 + }, 1450 + "funding": { 1451 + "url": "https://opencollective.com/vitest" 1452 + } 1453 + }, 1454 + "node_modules/@vitest/snapshot": { 1455 + "version": "3.0.9", 1456 + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.9.tgz", 1457 + "integrity": "sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==", 1458 + "dev": true, 1459 + "license": "MIT", 1460 + "dependencies": { 1461 + "@vitest/pretty-format": "3.0.9", 1462 + "magic-string": "^0.30.17", 1463 + "pathe": "^2.0.3" 1464 + }, 1465 + "funding": { 1466 + "url": "https://opencollective.com/vitest" 1467 + } 1468 + }, 1469 + "node_modules/@vitest/snapshot/node_modules/@vitest/pretty-format": { 1470 + "version": "3.0.9", 1471 + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", 1472 + "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", 1473 + "dev": true, 1474 + "license": "MIT", 1475 + "dependencies": { 1476 + "tinyrainbow": "^2.0.0" 1477 + }, 1478 + "funding": { 1479 + "url": "https://opencollective.com/vitest" 1480 + } 1481 + }, 1482 + "node_modules/@vitest/spy": { 1483 + "version": "3.0.9", 1484 + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.9.tgz", 1485 + "integrity": "sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==", 1486 + "dev": true, 1487 + "license": "MIT", 1488 + "dependencies": { 1489 + "tinyspy": "^3.0.2" 1490 + }, 1491 + "funding": { 1492 + "url": "https://opencollective.com/vitest" 1493 + } 1494 + }, 1495 + "node_modules/@vitest/utils": { 1496 + "version": "3.0.9", 1497 + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.9.tgz", 1498 + "integrity": "sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==", 1499 + "dev": true, 1500 + "license": "MIT", 1501 + "dependencies": { 1502 + "@vitest/pretty-format": "3.0.9", 1503 + "loupe": "^3.1.3", 1504 + "tinyrainbow": "^2.0.0" 1505 + }, 1506 + "funding": { 1507 + "url": "https://opencollective.com/vitest" 1508 + } 1509 + }, 1510 + "node_modules/@vitest/utils/node_modules/@vitest/pretty-format": { 1511 + "version": "3.0.9", 1512 + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", 1513 + "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", 1514 + "dev": true, 1515 + "license": "MIT", 1516 + "dependencies": { 1517 + "tinyrainbow": "^2.0.0" 1518 + }, 1519 + "funding": { 1520 + "url": "https://opencollective.com/vitest" 1521 + } 1522 + }, 1523 + "node_modules/acorn": { 1524 + "version": "8.14.0", 1525 + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", 1526 + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", 1527 + "dev": true, 1528 + "license": "MIT", 1529 + "bin": { 1530 + "acorn": "bin/acorn" 1531 + }, 1532 + "engines": { 1533 + "node": ">=0.4.0" 1534 + } 1535 + }, 1536 + "node_modules/acorn-walk": { 1537 + "version": "8.3.2", 1538 + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", 1539 + "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", 1540 + "dev": true, 1541 + "license": "MIT", 1542 + "engines": { 1543 + "node": ">=0.4.0" 1544 + } 1545 + }, 1546 + "node_modules/as-table": { 1547 + "version": "1.0.55", 1548 + "resolved": "https://registry.npmjs.org/as-table/-/as-table-1.0.55.tgz", 1549 + "integrity": "sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==", 1550 + "dev": true, 1551 + "license": "MIT", 1552 + "dependencies": { 1553 + "printable-characters": "^1.0.42" 1554 + } 1555 + }, 1556 + "node_modules/assertion-error": { 1557 + "version": "2.0.1", 1558 + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", 1559 + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", 1560 + "dev": true, 1561 + "license": "MIT", 1562 + "engines": { 1563 + "node": ">=12" 1564 + } 1565 + }, 1566 + "node_modules/birpc": { 1567 + "version": "0.2.14", 1568 + "resolved": "https://registry.npmjs.org/birpc/-/birpc-0.2.14.tgz", 1569 + "integrity": "sha512-37FHE8rqsYM5JEKCnXFyHpBCzvgHEExwVVTq+nUmloInU7l8ezD1TpOhKpS8oe1DTYFqEK27rFZVKG43oTqXRA==", 1570 + "dev": true, 1571 + "license": "MIT", 1572 + "funding": { 1573 + "url": "https://github.com/sponsors/antfu" 1574 + } 1575 + }, 1576 + "node_modules/blake3-wasm": { 1577 + "version": "2.1.5", 1578 + "resolved": "https://registry.npmjs.org/blake3-wasm/-/blake3-wasm-2.1.5.tgz", 1579 + "integrity": "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==", 1580 + "dev": true, 1581 + "license": "MIT" 1582 + }, 1583 + "node_modules/cac": { 1584 + "version": "6.7.14", 1585 + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", 1586 + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", 1587 + "dev": true, 1588 + "license": "MIT", 1589 + "engines": { 1590 + "node": ">=8" 1591 + } 1592 + }, 1593 + "node_modules/chai": { 1594 + "version": "5.2.0", 1595 + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", 1596 + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", 1597 + "dev": true, 1598 + "license": "MIT", 1599 + "dependencies": { 1600 + "assertion-error": "^2.0.1", 1601 + "check-error": "^2.1.1", 1602 + "deep-eql": "^5.0.1", 1603 + "loupe": "^3.1.0", 1604 + "pathval": "^2.0.0" 1605 + }, 1606 + "engines": { 1607 + "node": ">=12" 1608 + } 1609 + }, 1610 + "node_modules/check-error": { 1611 + "version": "2.1.1", 1612 + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", 1613 + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", 1614 + "dev": true, 1615 + "license": "MIT", 1616 + "engines": { 1617 + "node": ">= 16" 1618 + } 1619 + }, 1620 + "node_modules/cjs-module-lexer": { 1621 + "version": "1.4.3", 1622 + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", 1623 + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", 1624 + "dev": true, 1625 + "license": "MIT" 1626 + }, 1627 + "node_modules/color": { 1628 + "version": "4.2.3", 1629 + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", 1630 + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", 1631 + "dev": true, 1632 + "license": "MIT", 1633 + "optional": true, 1634 + "dependencies": { 1635 + "color-convert": "^2.0.1", 1636 + "color-string": "^1.9.0" 1637 + }, 1638 + "engines": { 1639 + "node": ">=12.5.0" 1640 + } 1641 + }, 1642 + "node_modules/color-convert": { 1643 + "version": "2.0.1", 1644 + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", 1645 + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 1646 + "dev": true, 1647 + "license": "MIT", 1648 + "optional": true, 1649 + "dependencies": { 1650 + "color-name": "~1.1.4" 1651 + }, 1652 + "engines": { 1653 + "node": ">=7.0.0" 1654 + } 1655 + }, 1656 + "node_modules/color-name": { 1657 + "version": "1.1.4", 1658 + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", 1659 + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", 1660 + "dev": true, 1661 + "license": "MIT", 1662 + "optional": true 1663 + }, 1664 + "node_modules/color-string": { 1665 + "version": "1.9.1", 1666 + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", 1667 + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", 1668 + "dev": true, 1669 + "license": "MIT", 1670 + "optional": true, 1671 + "dependencies": { 1672 + "color-name": "^1.0.0", 1673 + "simple-swizzle": "^0.2.2" 1674 + } 1675 + }, 1676 + "node_modules/cookie": { 1677 + "version": "0.7.2", 1678 + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", 1679 + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", 1680 + "dev": true, 1681 + "license": "MIT", 1682 + "engines": { 1683 + "node": ">= 0.6" 1684 + } 1685 + }, 1686 + "node_modules/data-uri-to-buffer": { 1687 + "version": "2.0.2", 1688 + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-2.0.2.tgz", 1689 + "integrity": "sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==", 1690 + "dev": true, 1691 + "license": "MIT" 1692 + }, 1693 + "node_modules/debug": { 1694 + "version": "4.4.0", 1695 + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", 1696 + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", 1697 + "dev": true, 1698 + "license": "MIT", 1699 + "dependencies": { 1700 + "ms": "^2.1.3" 1701 + }, 1702 + "engines": { 1703 + "node": ">=6.0" 1704 + }, 1705 + "peerDependenciesMeta": { 1706 + "supports-color": { 1707 + "optional": true 1708 + } 1709 + } 1710 + }, 1711 + "node_modules/deep-eql": { 1712 + "version": "5.0.2", 1713 + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", 1714 + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", 1715 + "dev": true, 1716 + "license": "MIT", 1717 + "engines": { 1718 + "node": ">=6" 1719 + } 1720 + }, 1721 + "node_modules/defu": { 1722 + "version": "6.1.4", 1723 + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", 1724 + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", 1725 + "dev": true, 1726 + "license": "MIT" 1727 + }, 1728 + "node_modules/detect-libc": { 1729 + "version": "2.0.4", 1730 + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", 1731 + "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", 1732 + "dev": true, 1733 + "license": "Apache-2.0", 1734 + "optional": true, 1735 + "engines": { 1736 + "node": ">=8" 1737 + } 1738 + }, 1739 + "node_modules/devalue": { 1740 + "version": "4.3.3", 1741 + "resolved": "https://registry.npmjs.org/devalue/-/devalue-4.3.3.tgz", 1742 + "integrity": "sha512-UH8EL6H2ifcY8TbD2QsxwCC/pr5xSwPvv85LrLXVihmHVC3T3YqTCIwnR5ak0yO1KYqlxrPVOA/JVZJYPy2ATg==", 1743 + "dev": true, 1744 + "license": "MIT" 1745 + }, 1746 + "node_modules/es-module-lexer": { 1747 + "version": "1.7.0", 1748 + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", 1749 + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", 1750 + "dev": true, 1751 + "license": "MIT" 1752 + }, 1753 + "node_modules/esbuild": { 1754 + "version": "0.25.3", 1755 + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.3.tgz", 1756 + "integrity": "sha512-qKA6Pvai73+M2FtftpNKRxJ78GIjmFXFxd/1DVBqGo/qNhLSfv+G12n9pNoWdytJC8U00TrViOwpjT0zgqQS8Q==", 1757 + "dev": true, 1758 + "hasInstallScript": true, 1759 + "license": "MIT", 1760 + "bin": { 1761 + "esbuild": "bin/esbuild" 1762 + }, 1763 + "engines": { 1764 + "node": ">=18" 1765 + }, 1766 + "optionalDependencies": { 1767 + "@esbuild/aix-ppc64": "0.25.3", 1768 + "@esbuild/android-arm": "0.25.3", 1769 + "@esbuild/android-arm64": "0.25.3", 1770 + "@esbuild/android-x64": "0.25.3", 1771 + "@esbuild/darwin-arm64": "0.25.3", 1772 + "@esbuild/darwin-x64": "0.25.3", 1773 + "@esbuild/freebsd-arm64": "0.25.3", 1774 + "@esbuild/freebsd-x64": "0.25.3", 1775 + "@esbuild/linux-arm": "0.25.3", 1776 + "@esbuild/linux-arm64": "0.25.3", 1777 + "@esbuild/linux-ia32": "0.25.3", 1778 + "@esbuild/linux-loong64": "0.25.3", 1779 + "@esbuild/linux-mips64el": "0.25.3", 1780 + "@esbuild/linux-ppc64": "0.25.3", 1781 + "@esbuild/linux-riscv64": "0.25.3", 1782 + "@esbuild/linux-s390x": "0.25.3", 1783 + "@esbuild/linux-x64": "0.25.3", 1784 + "@esbuild/netbsd-arm64": "0.25.3", 1785 + "@esbuild/netbsd-x64": "0.25.3", 1786 + "@esbuild/openbsd-arm64": "0.25.3", 1787 + "@esbuild/openbsd-x64": "0.25.3", 1788 + "@esbuild/sunos-x64": "0.25.3", 1789 + "@esbuild/win32-arm64": "0.25.3", 1790 + "@esbuild/win32-ia32": "0.25.3", 1791 + "@esbuild/win32-x64": "0.25.3" 1792 + } 1793 + }, 1794 + "node_modules/esm-env": { 1795 + "version": "1.2.2", 1796 + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", 1797 + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", 1798 + "license": "MIT" 1799 + }, 1800 + "node_modules/estree-walker": { 1801 + "version": "3.0.3", 1802 + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", 1803 + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", 1804 + "dev": true, 1805 + "license": "MIT", 1806 + "dependencies": { 1807 + "@types/estree": "^1.0.0" 1808 + } 1809 + }, 1810 + "node_modules/exit-hook": { 1811 + "version": "2.2.1", 1812 + "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-2.2.1.tgz", 1813 + "integrity": "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==", 1814 + "dev": true, 1815 + "license": "MIT", 1816 + "engines": { 1817 + "node": ">=6" 1818 + }, 1819 + "funding": { 1820 + "url": "https://github.com/sponsors/sindresorhus" 1821 + } 1822 + }, 1823 + "node_modules/expect-type": { 1824 + "version": "1.2.1", 1825 + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", 1826 + "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", 1827 + "dev": true, 1828 + "license": "Apache-2.0", 1829 + "engines": { 1830 + "node": ">=12.0.0" 1831 + } 1832 + }, 1833 + "node_modules/exsolve": { 1834 + "version": "1.0.5", 1835 + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.5.tgz", 1836 + "integrity": "sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==", 1837 + "dev": true, 1838 + "license": "MIT" 1839 + }, 1840 + "node_modules/fdir": { 1841 + "version": "6.4.4", 1842 + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", 1843 + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", 1844 + "dev": true, 1845 + "license": "MIT", 1846 + "peerDependencies": { 1847 + "picomatch": "^3 || ^4" 1848 + }, 1849 + "peerDependenciesMeta": { 1850 + "picomatch": { 1851 + "optional": true 1852 + } 1853 + } 1854 + }, 1855 + "node_modules/fsevents": { 1856 + "version": "2.3.3", 1857 + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", 1858 + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", 1859 + "dev": true, 1860 + "hasInstallScript": true, 1861 + "license": "MIT", 1862 + "optional": true, 1863 + "os": [ 1864 + "darwin" 1865 + ], 1866 + "engines": { 1867 + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" 1868 + } 1869 + }, 1870 + "node_modules/get-source": { 1871 + "version": "2.0.12", 1872 + "resolved": "https://registry.npmjs.org/get-source/-/get-source-2.0.12.tgz", 1873 + "integrity": "sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==", 1874 + "dev": true, 1875 + "license": "Unlicense", 1876 + "dependencies": { 1877 + "data-uri-to-buffer": "^2.0.0", 1878 + "source-map": "^0.6.1" 1879 + } 1880 + }, 1881 + "node_modules/glob-to-regexp": { 1882 + "version": "0.4.1", 1883 + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", 1884 + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", 1885 + "dev": true, 1886 + "license": "BSD-2-Clause" 1887 + }, 1888 + "node_modules/is-arrayish": { 1889 + "version": "0.3.2", 1890 + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", 1891 + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", 1892 + "dev": true, 1893 + "license": "MIT", 1894 + "optional": true 1895 + }, 1896 + "node_modules/loupe": { 1897 + "version": "3.1.3", 1898 + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", 1899 + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", 1900 + "dev": true, 1901 + "license": "MIT" 1902 + }, 1903 + "node_modules/magic-string": { 1904 + "version": "0.30.17", 1905 + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", 1906 + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", 1907 + "dev": true, 1908 + "license": "MIT", 1909 + "dependencies": { 1910 + "@jridgewell/sourcemap-codec": "^1.5.0" 1911 + } 1912 + }, 1913 + "node_modules/mime": { 1914 + "version": "3.0.0", 1915 + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", 1916 + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", 1917 + "dev": true, 1918 + "license": "MIT", 1919 + "bin": { 1920 + "mime": "cli.js" 1921 + }, 1922 + "engines": { 1923 + "node": ">=10.0.0" 1924 + } 1925 + }, 1926 + "node_modules/miniflare": { 1927 + "version": "4.20250428.1", 1928 + "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20250428.1.tgz", 1929 + "integrity": "sha512-M3qcJXjeAEimHrEeWXEhrJiC3YHB5M3QSqqK67pOTI+lHn0QyVG/2iFUjVJ/nv+i10uxeAEva8GRGeu+tKRCmQ==", 1930 + "dev": true, 1931 + "license": "MIT", 1932 + "dependencies": { 1933 + "@cspotcode/source-map-support": "0.8.1", 1934 + "acorn": "8.14.0", 1935 + "acorn-walk": "8.3.2", 1936 + "exit-hook": "2.2.1", 1937 + "glob-to-regexp": "0.4.1", 1938 + "stoppable": "1.1.0", 1939 + "undici": "^5.28.5", 1940 + "workerd": "1.20250428.0", 1941 + "ws": "8.18.0", 1942 + "youch": "3.3.4", 1943 + "zod": "3.22.3" 1944 + }, 1945 + "bin": { 1946 + "miniflare": "bootstrap.js" 1947 + }, 1948 + "engines": { 1949 + "node": ">=18.0.0" 1950 + } 1951 + }, 1952 + "node_modules/miniflare/node_modules/zod": { 1953 + "version": "3.22.3", 1954 + "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.3.tgz", 1955 + "integrity": "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug==", 1956 + "dev": true, 1957 + "license": "MIT", 1958 + "funding": { 1959 + "url": "https://github.com/sponsors/colinhacks" 1960 + } 1961 + }, 1962 + "node_modules/ms": { 1963 + "version": "2.1.3", 1964 + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 1965 + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", 1966 + "dev": true, 1967 + "license": "MIT" 1968 + }, 1969 + "node_modules/mustache": { 1970 + "version": "4.2.0", 1971 + "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", 1972 + "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", 1973 + "dev": true, 1974 + "license": "MIT", 1975 + "bin": { 1976 + "mustache": "bin/mustache" 1977 + } 1978 + }, 1979 + "node_modules/nanoid": { 1980 + "version": "3.3.11", 1981 + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", 1982 + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", 1983 + "dev": true, 1984 + "funding": [ 1985 + { 1986 + "type": "github", 1987 + "url": "https://github.com/sponsors/ai" 1988 + } 1989 + ], 1990 + "license": "MIT", 1991 + "bin": { 1992 + "nanoid": "bin/nanoid.cjs" 1993 + }, 1994 + "engines": { 1995 + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" 1996 + } 1997 + }, 1998 + "node_modules/ohash": { 1999 + "version": "2.0.11", 2000 + "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", 2001 + "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==", 2002 + "dev": true, 2003 + "license": "MIT" 2004 + }, 2005 + "node_modules/path-to-regexp": { 2006 + "version": "6.3.0", 2007 + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", 2008 + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", 2009 + "dev": true, 2010 + "license": "MIT" 2011 + }, 2012 + "node_modules/pathe": { 2013 + "version": "2.0.3", 2014 + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", 2015 + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", 2016 + "dev": true, 2017 + "license": "MIT" 2018 + }, 2019 + "node_modules/pathval": { 2020 + "version": "2.0.0", 2021 + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", 2022 + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", 2023 + "dev": true, 2024 + "license": "MIT", 2025 + "engines": { 2026 + "node": ">= 14.16" 2027 + } 2028 + }, 2029 + "node_modules/picocolors": { 2030 + "version": "1.1.1", 2031 + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", 2032 + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", 2033 + "dev": true, 2034 + "license": "ISC" 2035 + }, 2036 + "node_modules/picomatch": { 2037 + "version": "4.0.2", 2038 + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", 2039 + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", 2040 + "dev": true, 2041 + "license": "MIT", 2042 + "engines": { 2043 + "node": ">=12" 2044 + }, 2045 + "funding": { 2046 + "url": "https://github.com/sponsors/jonschlinkert" 2047 + } 2048 + }, 2049 + "node_modules/postcss": { 2050 + "version": "8.5.3", 2051 + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", 2052 + "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", 2053 + "dev": true, 2054 + "funding": [ 2055 + { 2056 + "type": "opencollective", 2057 + "url": "https://opencollective.com/postcss/" 2058 + }, 2059 + { 2060 + "type": "tidelift", 2061 + "url": "https://tidelift.com/funding/github/npm/postcss" 2062 + }, 2063 + { 2064 + "type": "github", 2065 + "url": "https://github.com/sponsors/ai" 2066 + } 2067 + ], 2068 + "license": "MIT", 2069 + "dependencies": { 2070 + "nanoid": "^3.3.8", 2071 + "picocolors": "^1.1.1", 2072 + "source-map-js": "^1.2.1" 2073 + }, 2074 + "engines": { 2075 + "node": "^10 || ^12 || >=14" 2076 + } 2077 + }, 2078 + "node_modules/printable-characters": { 2079 + "version": "1.0.42", 2080 + "resolved": "https://registry.npmjs.org/printable-characters/-/printable-characters-1.0.42.tgz", 2081 + "integrity": "sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==", 2082 + "dev": true, 2083 + "license": "Unlicense" 2084 + }, 2085 + "node_modules/rollup": { 2086 + "version": "4.40.1", 2087 + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.40.1.tgz", 2088 + "integrity": "sha512-C5VvvgCCyfyotVITIAv+4efVytl5F7wt+/I2i9q9GZcEXW9BP52YYOXC58igUi+LFZVHukErIIqQSWwv/M3WRw==", 2089 + "dev": true, 2090 + "license": "MIT", 2091 + "dependencies": { 2092 + "@types/estree": "1.0.7" 2093 + }, 2094 + "bin": { 2095 + "rollup": "dist/bin/rollup" 2096 + }, 2097 + "engines": { 2098 + "node": ">=18.0.0", 2099 + "npm": ">=8.0.0" 2100 + }, 2101 + "optionalDependencies": { 2102 + "@rollup/rollup-android-arm-eabi": "4.40.1", 2103 + "@rollup/rollup-android-arm64": "4.40.1", 2104 + "@rollup/rollup-darwin-arm64": "4.40.1", 2105 + "@rollup/rollup-darwin-x64": "4.40.1", 2106 + "@rollup/rollup-freebsd-arm64": "4.40.1", 2107 + "@rollup/rollup-freebsd-x64": "4.40.1", 2108 + "@rollup/rollup-linux-arm-gnueabihf": "4.40.1", 2109 + "@rollup/rollup-linux-arm-musleabihf": "4.40.1", 2110 + "@rollup/rollup-linux-arm64-gnu": "4.40.1", 2111 + "@rollup/rollup-linux-arm64-musl": "4.40.1", 2112 + "@rollup/rollup-linux-loongarch64-gnu": "4.40.1", 2113 + "@rollup/rollup-linux-powerpc64le-gnu": "4.40.1", 2114 + "@rollup/rollup-linux-riscv64-gnu": "4.40.1", 2115 + "@rollup/rollup-linux-riscv64-musl": "4.40.1", 2116 + "@rollup/rollup-linux-s390x-gnu": "4.40.1", 2117 + "@rollup/rollup-linux-x64-gnu": "4.40.1", 2118 + "@rollup/rollup-linux-x64-musl": "4.40.1", 2119 + "@rollup/rollup-win32-arm64-msvc": "4.40.1", 2120 + "@rollup/rollup-win32-ia32-msvc": "4.40.1", 2121 + "@rollup/rollup-win32-x64-msvc": "4.40.1", 2122 + "fsevents": "~2.3.2" 2123 + } 2124 + }, 2125 + "node_modules/semver": { 2126 + "version": "7.7.1", 2127 + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", 2128 + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", 2129 + "dev": true, 2130 + "license": "ISC", 2131 + "bin": { 2132 + "semver": "bin/semver.js" 2133 + }, 2134 + "engines": { 2135 + "node": ">=10" 2136 + } 2137 + }, 2138 + "node_modules/sharp": { 2139 + "version": "0.33.5", 2140 + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", 2141 + "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", 2142 + "dev": true, 2143 + "hasInstallScript": true, 2144 + "license": "Apache-2.0", 2145 + "optional": true, 2146 + "dependencies": { 2147 + "color": "^4.2.3", 2148 + "detect-libc": "^2.0.3", 2149 + "semver": "^7.6.3" 2150 + }, 2151 + "engines": { 2152 + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" 2153 + }, 2154 + "funding": { 2155 + "url": "https://opencollective.com/libvips" 2156 + }, 2157 + "optionalDependencies": { 2158 + "@img/sharp-darwin-arm64": "0.33.5", 2159 + "@img/sharp-darwin-x64": "0.33.5", 2160 + "@img/sharp-libvips-darwin-arm64": "1.0.4", 2161 + "@img/sharp-libvips-darwin-x64": "1.0.4", 2162 + "@img/sharp-libvips-linux-arm": "1.0.5", 2163 + "@img/sharp-libvips-linux-arm64": "1.0.4", 2164 + "@img/sharp-libvips-linux-s390x": "1.0.4", 2165 + "@img/sharp-libvips-linux-x64": "1.0.4", 2166 + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", 2167 + "@img/sharp-libvips-linuxmusl-x64": "1.0.4", 2168 + "@img/sharp-linux-arm": "0.33.5", 2169 + "@img/sharp-linux-arm64": "0.33.5", 2170 + "@img/sharp-linux-s390x": "0.33.5", 2171 + "@img/sharp-linux-x64": "0.33.5", 2172 + "@img/sharp-linuxmusl-arm64": "0.33.5", 2173 + "@img/sharp-linuxmusl-x64": "0.33.5", 2174 + "@img/sharp-wasm32": "0.33.5", 2175 + "@img/sharp-win32-ia32": "0.33.5", 2176 + "@img/sharp-win32-x64": "0.33.5" 2177 + } 2178 + }, 2179 + "node_modules/siginfo": { 2180 + "version": "2.0.0", 2181 + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", 2182 + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", 2183 + "dev": true, 2184 + "license": "ISC" 2185 + }, 2186 + "node_modules/simple-swizzle": { 2187 + "version": "0.2.2", 2188 + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", 2189 + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", 2190 + "dev": true, 2191 + "license": "MIT", 2192 + "optional": true, 2193 + "dependencies": { 2194 + "is-arrayish": "^0.3.1" 2195 + } 2196 + }, 2197 + "node_modules/source-map": { 2198 + "version": "0.6.1", 2199 + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", 2200 + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", 2201 + "dev": true, 2202 + "license": "BSD-3-Clause", 2203 + "engines": { 2204 + "node": ">=0.10.0" 2205 + } 2206 + }, 2207 + "node_modules/source-map-js": { 2208 + "version": "1.2.1", 2209 + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", 2210 + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", 2211 + "dev": true, 2212 + "license": "BSD-3-Clause", 2213 + "engines": { 2214 + "node": ">=0.10.0" 2215 + } 2216 + }, 2217 + "node_modules/stackback": { 2218 + "version": "0.0.2", 2219 + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", 2220 + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", 2221 + "dev": true, 2222 + "license": "MIT" 2223 + }, 2224 + "node_modules/stacktracey": { 2225 + "version": "2.1.8", 2226 + "resolved": "https://registry.npmjs.org/stacktracey/-/stacktracey-2.1.8.tgz", 2227 + "integrity": "sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==", 2228 + "dev": true, 2229 + "license": "Unlicense", 2230 + "dependencies": { 2231 + "as-table": "^1.0.36", 2232 + "get-source": "^2.0.12" 2233 + } 2234 + }, 2235 + "node_modules/std-env": { 2236 + "version": "3.9.0", 2237 + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", 2238 + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", 2239 + "dev": true, 2240 + "license": "MIT" 2241 + }, 2242 + "node_modules/stoppable": { 2243 + "version": "1.1.0", 2244 + "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz", 2245 + "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==", 2246 + "dev": true, 2247 + "license": "MIT", 2248 + "engines": { 2249 + "node": ">=4", 2250 + "npm": ">=6" 2251 + } 2252 + }, 2253 + "node_modules/tinybench": { 2254 + "version": "2.9.0", 2255 + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", 2256 + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", 2257 + "dev": true, 2258 + "license": "MIT" 2259 + }, 2260 + "node_modules/tinyexec": { 2261 + "version": "0.3.2", 2262 + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", 2263 + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", 2264 + "dev": true, 2265 + "license": "MIT" 2266 + }, 2267 + "node_modules/tinyglobby": { 2268 + "version": "0.2.13", 2269 + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", 2270 + "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", 2271 + "dev": true, 2272 + "license": "MIT", 2273 + "dependencies": { 2274 + "fdir": "^6.4.4", 2275 + "picomatch": "^4.0.2" 2276 + }, 2277 + "engines": { 2278 + "node": ">=12.0.0" 2279 + }, 2280 + "funding": { 2281 + "url": "https://github.com/sponsors/SuperchupuDev" 2282 + } 2283 + }, 2284 + "node_modules/tinypool": { 2285 + "version": "1.0.2", 2286 + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", 2287 + "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", 2288 + "dev": true, 2289 + "license": "MIT", 2290 + "engines": { 2291 + "node": "^18.0.0 || >=20.0.0" 2292 + } 2293 + }, 2294 + "node_modules/tinyrainbow": { 2295 + "version": "2.0.0", 2296 + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", 2297 + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", 2298 + "dev": true, 2299 + "license": "MIT", 2300 + "engines": { 2301 + "node": ">=14.0.0" 2302 + } 2303 + }, 2304 + "node_modules/tinyspy": { 2305 + "version": "3.0.2", 2306 + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", 2307 + "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", 2308 + "dev": true, 2309 + "license": "MIT", 2310 + "engines": { 2311 + "node": ">=14.0.0" 2312 + } 2313 + }, 2314 + "node_modules/tslib": { 2315 + "version": "2.8.1", 2316 + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", 2317 + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", 2318 + "dev": true, 2319 + "license": "0BSD", 2320 + "optional": true 2321 + }, 2322 + "node_modules/ufo": { 2323 + "version": "1.6.1", 2324 + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz", 2325 + "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", 2326 + "dev": true, 2327 + "license": "MIT" 2328 + }, 2329 + "node_modules/undici": { 2330 + "version": "5.29.0", 2331 + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", 2332 + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", 2333 + "dev": true, 2334 + "license": "MIT", 2335 + "dependencies": { 2336 + "@fastify/busboy": "^2.0.0" 2337 + }, 2338 + "engines": { 2339 + "node": ">=14.0" 2340 + } 2341 + }, 2342 + "node_modules/unenv": { 2343 + "version": "2.0.0-rc.15", 2344 + "resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.15.tgz", 2345 + "integrity": "sha512-J/rEIZU8w6FOfLNz/hNKsnY+fFHWnu9MH4yRbSZF3xbbGHovcetXPs7sD+9p8L6CeNC//I9bhRYAOsBt2u7/OA==", 2346 + "dev": true, 2347 + "license": "MIT", 2348 + "dependencies": { 2349 + "defu": "^6.1.4", 2350 + "exsolve": "^1.0.4", 2351 + "ohash": "^2.0.11", 2352 + "pathe": "^2.0.3", 2353 + "ufo": "^1.5.4" 2354 + } 2355 + }, 2356 + "node_modules/unicode-segmenter": { 2357 + "version": "0.14.4", 2358 + "resolved": "https://registry.npmjs.org/unicode-segmenter/-/unicode-segmenter-0.14.4.tgz", 2359 + "integrity": "sha512-pR5VCiCrLrKOL6FRW61jnk9+wyMtKKowq+jyFY9oc6uHbWKhDL4yVRiI4YZPksGMK72Pahh8m0cn/0JvbDDyJg==", 2360 + "license": "MIT" 2361 + }, 2362 + "node_modules/vite": { 2363 + "version": "6.3.4", 2364 + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz", 2365 + "integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==", 2366 + "dev": true, 2367 + "license": "MIT", 2368 + "dependencies": { 2369 + "esbuild": "^0.25.0", 2370 + "fdir": "^6.4.4", 2371 + "picomatch": "^4.0.2", 2372 + "postcss": "^8.5.3", 2373 + "rollup": "^4.34.9", 2374 + "tinyglobby": "^0.2.13" 2375 + }, 2376 + "bin": { 2377 + "vite": "bin/vite.js" 2378 + }, 2379 + "engines": { 2380 + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" 2381 + }, 2382 + "funding": { 2383 + "url": "https://github.com/vitejs/vite?sponsor=1" 2384 + }, 2385 + "optionalDependencies": { 2386 + "fsevents": "~2.3.3" 2387 + }, 2388 + "peerDependencies": { 2389 + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", 2390 + "jiti": ">=1.21.0", 2391 + "less": "*", 2392 + "lightningcss": "^1.21.0", 2393 + "sass": "*", 2394 + "sass-embedded": "*", 2395 + "stylus": "*", 2396 + "sugarss": "*", 2397 + "terser": "^5.16.0", 2398 + "tsx": "^4.8.1", 2399 + "yaml": "^2.4.2" 2400 + }, 2401 + "peerDependenciesMeta": { 2402 + "@types/node": { 2403 + "optional": true 2404 + }, 2405 + "jiti": { 2406 + "optional": true 2407 + }, 2408 + "less": { 2409 + "optional": true 2410 + }, 2411 + "lightningcss": { 2412 + "optional": true 2413 + }, 2414 + "sass": { 2415 + "optional": true 2416 + }, 2417 + "sass-embedded": { 2418 + "optional": true 2419 + }, 2420 + "stylus": { 2421 + "optional": true 2422 + }, 2423 + "sugarss": { 2424 + "optional": true 2425 + }, 2426 + "terser": { 2427 + "optional": true 2428 + }, 2429 + "tsx": { 2430 + "optional": true 2431 + }, 2432 + "yaml": { 2433 + "optional": true 2434 + } 2435 + } 2436 + }, 2437 + "node_modules/vite-node": { 2438 + "version": "3.0.9", 2439 + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.9.tgz", 2440 + "integrity": "sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==", 2441 + "dev": true, 2442 + "license": "MIT", 2443 + "dependencies": { 2444 + "cac": "^6.7.14", 2445 + "debug": "^4.4.0", 2446 + "es-module-lexer": "^1.6.0", 2447 + "pathe": "^2.0.3", 2448 + "vite": "^5.0.0 || ^6.0.0" 2449 + }, 2450 + "bin": { 2451 + "vite-node": "vite-node.mjs" 2452 + }, 2453 + "engines": { 2454 + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" 2455 + }, 2456 + "funding": { 2457 + "url": "https://opencollective.com/vitest" 2458 + } 2459 + }, 2460 + "node_modules/vitest": { 2461 + "version": "3.0.9", 2462 + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", 2463 + "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", 2464 + "dev": true, 2465 + "license": "MIT", 2466 + "dependencies": { 2467 + "@vitest/expect": "3.0.9", 2468 + "@vitest/mocker": "3.0.9", 2469 + "@vitest/pretty-format": "^3.0.9", 2470 + "@vitest/runner": "3.0.9", 2471 + "@vitest/snapshot": "3.0.9", 2472 + "@vitest/spy": "3.0.9", 2473 + "@vitest/utils": "3.0.9", 2474 + "chai": "^5.2.0", 2475 + "debug": "^4.4.0", 2476 + "expect-type": "^1.1.0", 2477 + "magic-string": "^0.30.17", 2478 + "pathe": "^2.0.3", 2479 + "std-env": "^3.8.0", 2480 + "tinybench": "^2.9.0", 2481 + "tinyexec": "^0.3.2", 2482 + "tinypool": "^1.0.2", 2483 + "tinyrainbow": "^2.0.0", 2484 + "vite": "^5.0.0 || ^6.0.0", 2485 + "vite-node": "3.0.9", 2486 + "why-is-node-running": "^2.3.0" 2487 + }, 2488 + "bin": { 2489 + "vitest": "vitest.mjs" 2490 + }, 2491 + "engines": { 2492 + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" 2493 + }, 2494 + "funding": { 2495 + "url": "https://opencollective.com/vitest" 2496 + }, 2497 + "peerDependencies": { 2498 + "@edge-runtime/vm": "*", 2499 + "@types/debug": "^4.1.12", 2500 + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", 2501 + "@vitest/browser": "3.0.9", 2502 + "@vitest/ui": "3.0.9", 2503 + "happy-dom": "*", 2504 + "jsdom": "*" 2505 + }, 2506 + "peerDependenciesMeta": { 2507 + "@edge-runtime/vm": { 2508 + "optional": true 2509 + }, 2510 + "@types/debug": { 2511 + "optional": true 2512 + }, 2513 + "@types/node": { 2514 + "optional": true 2515 + }, 2516 + "@vitest/browser": { 2517 + "optional": true 2518 + }, 2519 + "@vitest/ui": { 2520 + "optional": true 2521 + }, 2522 + "happy-dom": { 2523 + "optional": true 2524 + }, 2525 + "jsdom": { 2526 + "optional": true 2527 + } 2528 + } 2529 + }, 2530 + "node_modules/why-is-node-running": { 2531 + "version": "2.3.0", 2532 + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", 2533 + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", 2534 + "dev": true, 2535 + "license": "MIT", 2536 + "dependencies": { 2537 + "siginfo": "^2.0.0", 2538 + "stackback": "0.0.2" 2539 + }, 2540 + "bin": { 2541 + "why-is-node-running": "cli.js" 2542 + }, 2543 + "engines": { 2544 + "node": ">=8" 2545 + } 2546 + }, 2547 + "node_modules/workerd": { 2548 + "version": "1.20250428.0", 2549 + "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20250428.0.tgz", 2550 + "integrity": "sha512-JJNWkHkwPQKQdvtM9UORijgYdcdJsihA4SfYjwh02IUQsdMyZ9jizV1sX9yWi9B9ptlohTW8UNHJEATuphGgdg==", 2551 + "dev": true, 2552 + "hasInstallScript": true, 2553 + "license": "Apache-2.0", 2554 + "bin": { 2555 + "workerd": "bin/workerd" 2556 + }, 2557 + "engines": { 2558 + "node": ">=16" 2559 + }, 2560 + "optionalDependencies": { 2561 + "@cloudflare/workerd-darwin-64": "1.20250428.0", 2562 + "@cloudflare/workerd-darwin-arm64": "1.20250428.0", 2563 + "@cloudflare/workerd-linux-64": "1.20250428.0", 2564 + "@cloudflare/workerd-linux-arm64": "1.20250428.0", 2565 + "@cloudflare/workerd-windows-64": "1.20250428.0" 2566 + } 2567 + }, 2568 + "node_modules/wrangler": { 2569 + "version": "4.14.1", 2570 + "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.14.1.tgz", 2571 + "integrity": "sha512-EU7IThP7i68TBftJJSveogvWZ5k/WRijcJh3UclDWiWWhDZTPbL6LOJEFhHKqFzHOaC4Y2Aewt48rfTz0e7oCw==", 2572 + "dev": true, 2573 + "license": "MIT OR Apache-2.0", 2574 + "dependencies": { 2575 + "@cloudflare/kv-asset-handler": "0.4.0", 2576 + "@cloudflare/unenv-preset": "2.3.1", 2577 + "blake3-wasm": "2.1.5", 2578 + "esbuild": "0.25.2", 2579 + "miniflare": "4.20250428.1", 2580 + "path-to-regexp": "6.3.0", 2581 + "unenv": "2.0.0-rc.15", 2582 + "workerd": "1.20250428.0" 2583 + }, 2584 + "bin": { 2585 + "wrangler": "bin/wrangler.js", 2586 + "wrangler2": "bin/wrangler.js" 2587 + }, 2588 + "engines": { 2589 + "node": ">=18.0.0" 2590 + }, 2591 + "optionalDependencies": { 2592 + "fsevents": "~2.3.2", 2593 + "sharp": "^0.33.5" 2594 + }, 2595 + "peerDependencies": { 2596 + "@cloudflare/workers-types": "^4.20250428.0" 2597 + }, 2598 + "peerDependenciesMeta": { 2599 + "@cloudflare/workers-types": { 2600 + "optional": true 2601 + } 2602 + } 2603 + }, 2604 + "node_modules/wrangler/node_modules/@esbuild/aix-ppc64": { 2605 + "version": "0.25.2", 2606 + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.2.tgz", 2607 + "integrity": "sha512-wCIboOL2yXZym2cgm6mlA742s9QeJ8DjGVaL39dLN4rRwrOgOyYSnOaFPhKZGLb2ngj4EyfAFjsNJwPXZvseag==", 2608 + "cpu": [ 2609 + "ppc64" 2610 + ], 2611 + "dev": true, 2612 + "license": "MIT", 2613 + "optional": true, 2614 + "os": [ 2615 + "aix" 2616 + ], 2617 + "engines": { 2618 + "node": ">=18" 2619 + } 2620 + }, 2621 + "node_modules/wrangler/node_modules/@esbuild/android-arm": { 2622 + "version": "0.25.2", 2623 + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.2.tgz", 2624 + "integrity": "sha512-NQhH7jFstVY5x8CKbcfa166GoV0EFkaPkCKBQkdPJFvo5u+nGXLEH/ooniLb3QI8Fk58YAx7nsPLozUWfCBOJA==", 2625 + "cpu": [ 2626 + "arm" 2627 + ], 2628 + "dev": true, 2629 + "license": "MIT", 2630 + "optional": true, 2631 + "os": [ 2632 + "android" 2633 + ], 2634 + "engines": { 2635 + "node": ">=18" 2636 + } 2637 + }, 2638 + "node_modules/wrangler/node_modules/@esbuild/android-arm64": { 2639 + "version": "0.25.2", 2640 + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.2.tgz", 2641 + "integrity": "sha512-5ZAX5xOmTligeBaeNEPnPaeEuah53Id2tX4c2CVP3JaROTH+j4fnfHCkr1PjXMd78hMst+TlkfKcW/DlTq0i4w==", 2642 + "cpu": [ 2643 + "arm64" 2644 + ], 2645 + "dev": true, 2646 + "license": "MIT", 2647 + "optional": true, 2648 + "os": [ 2649 + "android" 2650 + ], 2651 + "engines": { 2652 + "node": ">=18" 2653 + } 2654 + }, 2655 + "node_modules/wrangler/node_modules/@esbuild/android-x64": { 2656 + "version": "0.25.2", 2657 + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.2.tgz", 2658 + "integrity": "sha512-Ffcx+nnma8Sge4jzddPHCZVRvIfQ0kMsUsCMcJRHkGJ1cDmhe4SsrYIjLUKn1xpHZybmOqCWwB0zQvsjdEHtkg==", 2659 + "cpu": [ 2660 + "x64" 2661 + ], 2662 + "dev": true, 2663 + "license": "MIT", 2664 + "optional": true, 2665 + "os": [ 2666 + "android" 2667 + ], 2668 + "engines": { 2669 + "node": ">=18" 2670 + } 2671 + }, 2672 + "node_modules/wrangler/node_modules/@esbuild/darwin-arm64": { 2673 + "version": "0.25.2", 2674 + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.2.tgz", 2675 + "integrity": "sha512-MpM6LUVTXAzOvN4KbjzU/q5smzryuoNjlriAIx+06RpecwCkL9JpenNzpKd2YMzLJFOdPqBpuub6eVRP5IgiSA==", 2676 + "cpu": [ 2677 + "arm64" 2678 + ], 2679 + "dev": true, 2680 + "license": "MIT", 2681 + "optional": true, 2682 + "os": [ 2683 + "darwin" 2684 + ], 2685 + "engines": { 2686 + "node": ">=18" 2687 + } 2688 + }, 2689 + "node_modules/wrangler/node_modules/@esbuild/darwin-x64": { 2690 + "version": "0.25.2", 2691 + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.2.tgz", 2692 + "integrity": "sha512-5eRPrTX7wFyuWe8FqEFPG2cU0+butQQVNcT4sVipqjLYQjjh8a8+vUTfgBKM88ObB85ahsnTwF7PSIt6PG+QkA==", 2693 + "cpu": [ 2694 + "x64" 2695 + ], 2696 + "dev": true, 2697 + "license": "MIT", 2698 + "optional": true, 2699 + "os": [ 2700 + "darwin" 2701 + ], 2702 + "engines": { 2703 + "node": ">=18" 2704 + } 2705 + }, 2706 + "node_modules/wrangler/node_modules/@esbuild/freebsd-arm64": { 2707 + "version": "0.25.2", 2708 + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.2.tgz", 2709 + "integrity": "sha512-mLwm4vXKiQ2UTSX4+ImyiPdiHjiZhIaE9QvC7sw0tZ6HoNMjYAqQpGyui5VRIi5sGd+uWq940gdCbY3VLvsO1w==", 2710 + "cpu": [ 2711 + "arm64" 2712 + ], 2713 + "dev": true, 2714 + "license": "MIT", 2715 + "optional": true, 2716 + "os": [ 2717 + "freebsd" 2718 + ], 2719 + "engines": { 2720 + "node": ">=18" 2721 + } 2722 + }, 2723 + "node_modules/wrangler/node_modules/@esbuild/freebsd-x64": { 2724 + "version": "0.25.2", 2725 + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.2.tgz", 2726 + "integrity": "sha512-6qyyn6TjayJSwGpm8J9QYYGQcRgc90nmfdUb0O7pp1s4lTY+9D0H9O02v5JqGApUyiHOtkz6+1hZNvNtEhbwRQ==", 2727 + "cpu": [ 2728 + "x64" 2729 + ], 2730 + "dev": true, 2731 + "license": "MIT", 2732 + "optional": true, 2733 + "os": [ 2734 + "freebsd" 2735 + ], 2736 + "engines": { 2737 + "node": ">=18" 2738 + } 2739 + }, 2740 + "node_modules/wrangler/node_modules/@esbuild/linux-arm": { 2741 + "version": "0.25.2", 2742 + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.2.tgz", 2743 + "integrity": "sha512-UHBRgJcmjJv5oeQF8EpTRZs/1knq6loLxTsjc3nxO9eXAPDLcWW55flrMVc97qFPbmZP31ta1AZVUKQzKTzb0g==", 2744 + "cpu": [ 2745 + "arm" 2746 + ], 2747 + "dev": true, 2748 + "license": "MIT", 2749 + "optional": true, 2750 + "os": [ 2751 + "linux" 2752 + ], 2753 + "engines": { 2754 + "node": ">=18" 2755 + } 2756 + }, 2757 + "node_modules/wrangler/node_modules/@esbuild/linux-arm64": { 2758 + "version": "0.25.2", 2759 + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.2.tgz", 2760 + "integrity": "sha512-gq/sjLsOyMT19I8obBISvhoYiZIAaGF8JpeXu1u8yPv8BE5HlWYobmlsfijFIZ9hIVGYkbdFhEqC0NvM4kNO0g==", 2761 + "cpu": [ 2762 + "arm64" 2763 + ], 2764 + "dev": true, 2765 + "license": "MIT", 2766 + "optional": true, 2767 + "os": [ 2768 + "linux" 2769 + ], 2770 + "engines": { 2771 + "node": ">=18" 2772 + } 2773 + }, 2774 + "node_modules/wrangler/node_modules/@esbuild/linux-ia32": { 2775 + "version": "0.25.2", 2776 + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.2.tgz", 2777 + "integrity": "sha512-bBYCv9obgW2cBP+2ZWfjYTU+f5cxRoGGQ5SeDbYdFCAZpYWrfjjfYwvUpP8MlKbP0nwZ5gyOU/0aUzZ5HWPuvQ==", 2778 + "cpu": [ 2779 + "ia32" 2780 + ], 2781 + "dev": true, 2782 + "license": "MIT", 2783 + "optional": true, 2784 + "os": [ 2785 + "linux" 2786 + ], 2787 + "engines": { 2788 + "node": ">=18" 2789 + } 2790 + }, 2791 + "node_modules/wrangler/node_modules/@esbuild/linux-loong64": { 2792 + "version": "0.25.2", 2793 + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.2.tgz", 2794 + "integrity": "sha512-SHNGiKtvnU2dBlM5D8CXRFdd+6etgZ9dXfaPCeJtz+37PIUlixvlIhI23L5khKXs3DIzAn9V8v+qb1TRKrgT5w==", 2795 + "cpu": [ 2796 + "loong64" 2797 + ], 2798 + "dev": true, 2799 + "license": "MIT", 2800 + "optional": true, 2801 + "os": [ 2802 + "linux" 2803 + ], 2804 + "engines": { 2805 + "node": ">=18" 2806 + } 2807 + }, 2808 + "node_modules/wrangler/node_modules/@esbuild/linux-mips64el": { 2809 + "version": "0.25.2", 2810 + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.2.tgz", 2811 + "integrity": "sha512-hDDRlzE6rPeoj+5fsADqdUZl1OzqDYow4TB4Y/3PlKBD0ph1e6uPHzIQcv2Z65u2K0kpeByIyAjCmjn1hJgG0Q==", 2812 + "cpu": [ 2813 + "mips64el" 2814 + ], 2815 + "dev": true, 2816 + "license": "MIT", 2817 + "optional": true, 2818 + "os": [ 2819 + "linux" 2820 + ], 2821 + "engines": { 2822 + "node": ">=18" 2823 + } 2824 + }, 2825 + "node_modules/wrangler/node_modules/@esbuild/linux-ppc64": { 2826 + "version": "0.25.2", 2827 + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.2.tgz", 2828 + "integrity": "sha512-tsHu2RRSWzipmUi9UBDEzc0nLc4HtpZEI5Ba+Omms5456x5WaNuiG3u7xh5AO6sipnJ9r4cRWQB2tUjPyIkc6g==", 2829 + "cpu": [ 2830 + "ppc64" 2831 + ], 2832 + "dev": true, 2833 + "license": "MIT", 2834 + "optional": true, 2835 + "os": [ 2836 + "linux" 2837 + ], 2838 + "engines": { 2839 + "node": ">=18" 2840 + } 2841 + }, 2842 + "node_modules/wrangler/node_modules/@esbuild/linux-riscv64": { 2843 + "version": "0.25.2", 2844 + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.2.tgz", 2845 + "integrity": "sha512-k4LtpgV7NJQOml/10uPU0s4SAXGnowi5qBSjaLWMojNCUICNu7TshqHLAEbkBdAszL5TabfvQ48kK84hyFzjnw==", 2846 + "cpu": [ 2847 + "riscv64" 2848 + ], 2849 + "dev": true, 2850 + "license": "MIT", 2851 + "optional": true, 2852 + "os": [ 2853 + "linux" 2854 + ], 2855 + "engines": { 2856 + "node": ">=18" 2857 + } 2858 + }, 2859 + "node_modules/wrangler/node_modules/@esbuild/linux-s390x": { 2860 + "version": "0.25.2", 2861 + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.2.tgz", 2862 + "integrity": "sha512-GRa4IshOdvKY7M/rDpRR3gkiTNp34M0eLTaC1a08gNrh4u488aPhuZOCpkF6+2wl3zAN7L7XIpOFBhnaE3/Q8Q==", 2863 + "cpu": [ 2864 + "s390x" 2865 + ], 2866 + "dev": true, 2867 + "license": "MIT", 2868 + "optional": true, 2869 + "os": [ 2870 + "linux" 2871 + ], 2872 + "engines": { 2873 + "node": ">=18" 2874 + } 2875 + }, 2876 + "node_modules/wrangler/node_modules/@esbuild/linux-x64": { 2877 + "version": "0.25.2", 2878 + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.2.tgz", 2879 + "integrity": "sha512-QInHERlqpTTZ4FRB0fROQWXcYRD64lAoiegezDunLpalZMjcUcld3YzZmVJ2H/Cp0wJRZ8Xtjtj0cEHhYc/uUg==", 2880 + "cpu": [ 2881 + "x64" 2882 + ], 2883 + "dev": true, 2884 + "license": "MIT", 2885 + "optional": true, 2886 + "os": [ 2887 + "linux" 2888 + ], 2889 + "engines": { 2890 + "node": ">=18" 2891 + } 2892 + }, 2893 + "node_modules/wrangler/node_modules/@esbuild/netbsd-arm64": { 2894 + "version": "0.25.2", 2895 + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.2.tgz", 2896 + "integrity": "sha512-talAIBoY5M8vHc6EeI2WW9d/CkiO9MQJ0IOWX8hrLhxGbro/vBXJvaQXefW2cP0z0nQVTdQ/eNyGFV1GSKrxfw==", 2897 + "cpu": [ 2898 + "arm64" 2899 + ], 2900 + "dev": true, 2901 + "license": "MIT", 2902 + "optional": true, 2903 + "os": [ 2904 + "netbsd" 2905 + ], 2906 + "engines": { 2907 + "node": ">=18" 2908 + } 2909 + }, 2910 + "node_modules/wrangler/node_modules/@esbuild/netbsd-x64": { 2911 + "version": "0.25.2", 2912 + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.2.tgz", 2913 + "integrity": "sha512-voZT9Z+tpOxrvfKFyfDYPc4DO4rk06qamv1a/fkuzHpiVBMOhpjK+vBmWM8J1eiB3OLSMFYNaOaBNLXGChf5tg==", 2914 + "cpu": [ 2915 + "x64" 2916 + ], 2917 + "dev": true, 2918 + "license": "MIT", 2919 + "optional": true, 2920 + "os": [ 2921 + "netbsd" 2922 + ], 2923 + "engines": { 2924 + "node": ">=18" 2925 + } 2926 + }, 2927 + "node_modules/wrangler/node_modules/@esbuild/openbsd-arm64": { 2928 + "version": "0.25.2", 2929 + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.2.tgz", 2930 + "integrity": "sha512-dcXYOC6NXOqcykeDlwId9kB6OkPUxOEqU+rkrYVqJbK2hagWOMrsTGsMr8+rW02M+d5Op5NNlgMmjzecaRf7Tg==", 2931 + "cpu": [ 2932 + "arm64" 2933 + ], 2934 + "dev": true, 2935 + "license": "MIT", 2936 + "optional": true, 2937 + "os": [ 2938 + "openbsd" 2939 + ], 2940 + "engines": { 2941 + "node": ">=18" 2942 + } 2943 + }, 2944 + "node_modules/wrangler/node_modules/@esbuild/openbsd-x64": { 2945 + "version": "0.25.2", 2946 + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.2.tgz", 2947 + "integrity": "sha512-t/TkWwahkH0Tsgoq1Ju7QfgGhArkGLkF1uYz8nQS/PPFlXbP5YgRpqQR3ARRiC2iXoLTWFxc6DJMSK10dVXluw==", 2948 + "cpu": [ 2949 + "x64" 2950 + ], 2951 + "dev": true, 2952 + "license": "MIT", 2953 + "optional": true, 2954 + "os": [ 2955 + "openbsd" 2956 + ], 2957 + "engines": { 2958 + "node": ">=18" 2959 + } 2960 + }, 2961 + "node_modules/wrangler/node_modules/@esbuild/sunos-x64": { 2962 + "version": "0.25.2", 2963 + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.2.tgz", 2964 + "integrity": "sha512-cfZH1co2+imVdWCjd+D1gf9NjkchVhhdpgb1q5y6Hcv9TP6Zi9ZG/beI3ig8TvwT9lH9dlxLq5MQBBgwuj4xvA==", 2965 + "cpu": [ 2966 + "x64" 2967 + ], 2968 + "dev": true, 2969 + "license": "MIT", 2970 + "optional": true, 2971 + "os": [ 2972 + "sunos" 2973 + ], 2974 + "engines": { 2975 + "node": ">=18" 2976 + } 2977 + }, 2978 + "node_modules/wrangler/node_modules/@esbuild/win32-arm64": { 2979 + "version": "0.25.2", 2980 + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.2.tgz", 2981 + "integrity": "sha512-7Loyjh+D/Nx/sOTzV8vfbB3GJuHdOQyrOryFdZvPHLf42Tk9ivBU5Aedi7iyX+x6rbn2Mh68T4qq1SDqJBQO5Q==", 2982 + "cpu": [ 2983 + "arm64" 2984 + ], 2985 + "dev": true, 2986 + "license": "MIT", 2987 + "optional": true, 2988 + "os": [ 2989 + "win32" 2990 + ], 2991 + "engines": { 2992 + "node": ">=18" 2993 + } 2994 + }, 2995 + "node_modules/wrangler/node_modules/@esbuild/win32-ia32": { 2996 + "version": "0.25.2", 2997 + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.2.tgz", 2998 + "integrity": "sha512-WRJgsz9un0nqZJ4MfhabxaD9Ft8KioqU3JMinOTvobbX6MOSUigSBlogP8QB3uxpJDsFS6yN+3FDBdqE5lg9kg==", 2999 + "cpu": [ 3000 + "ia32" 3001 + ], 3002 + "dev": true, 3003 + "license": "MIT", 3004 + "optional": true, 3005 + "os": [ 3006 + "win32" 3007 + ], 3008 + "engines": { 3009 + "node": ">=18" 3010 + } 3011 + }, 3012 + "node_modules/wrangler/node_modules/@esbuild/win32-x64": { 3013 + "version": "0.25.2", 3014 + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.2.tgz", 3015 + "integrity": "sha512-kM3HKb16VIXZyIeVrM1ygYmZBKybX8N4p754bw390wGO3Tf2j4L2/WYL+4suWujpgf6GBYs3jv7TyUivdd05JA==", 3016 + "cpu": [ 3017 + "x64" 3018 + ], 3019 + "dev": true, 3020 + "license": "MIT", 3021 + "optional": true, 3022 + "os": [ 3023 + "win32" 3024 + ], 3025 + "engines": { 3026 + "node": ">=18" 3027 + } 3028 + }, 3029 + "node_modules/wrangler/node_modules/esbuild": { 3030 + "version": "0.25.2", 3031 + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.2.tgz", 3032 + "integrity": "sha512-16854zccKPnC+toMywC+uKNeYSv+/eXkevRAfwRD/G9Cleq66m8XFIrigkbvauLLlCfDL45Q2cWegSg53gGBnQ==", 3033 + "dev": true, 3034 + "hasInstallScript": true, 3035 + "license": "MIT", 3036 + "bin": { 3037 + "esbuild": "bin/esbuild" 3038 + }, 3039 + "engines": { 3040 + "node": ">=18" 3041 + }, 3042 + "optionalDependencies": { 3043 + "@esbuild/aix-ppc64": "0.25.2", 3044 + "@esbuild/android-arm": "0.25.2", 3045 + "@esbuild/android-arm64": "0.25.2", 3046 + "@esbuild/android-x64": "0.25.2", 3047 + "@esbuild/darwin-arm64": "0.25.2", 3048 + "@esbuild/darwin-x64": "0.25.2", 3049 + "@esbuild/freebsd-arm64": "0.25.2", 3050 + "@esbuild/freebsd-x64": "0.25.2", 3051 + "@esbuild/linux-arm": "0.25.2", 3052 + "@esbuild/linux-arm64": "0.25.2", 3053 + "@esbuild/linux-ia32": "0.25.2", 3054 + "@esbuild/linux-loong64": "0.25.2", 3055 + "@esbuild/linux-mips64el": "0.25.2", 3056 + "@esbuild/linux-ppc64": "0.25.2", 3057 + "@esbuild/linux-riscv64": "0.25.2", 3058 + "@esbuild/linux-s390x": "0.25.2", 3059 + "@esbuild/linux-x64": "0.25.2", 3060 + "@esbuild/netbsd-arm64": "0.25.2", 3061 + "@esbuild/netbsd-x64": "0.25.2", 3062 + "@esbuild/openbsd-arm64": "0.25.2", 3063 + "@esbuild/openbsd-x64": "0.25.2", 3064 + "@esbuild/sunos-x64": "0.25.2", 3065 + "@esbuild/win32-arm64": "0.25.2", 3066 + "@esbuild/win32-ia32": "0.25.2", 3067 + "@esbuild/win32-x64": "0.25.2" 3068 + } 3069 + }, 3070 + "node_modules/ws": { 3071 + "version": "8.18.0", 3072 + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", 3073 + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", 3074 + "dev": true, 3075 + "license": "MIT", 3076 + "engines": { 3077 + "node": ">=10.0.0" 3078 + }, 3079 + "peerDependencies": { 3080 + "bufferutil": "^4.0.1", 3081 + "utf-8-validate": ">=5.0.2" 3082 + }, 3083 + "peerDependenciesMeta": { 3084 + "bufferutil": { 3085 + "optional": true 3086 + }, 3087 + "utf-8-validate": { 3088 + "optional": true 3089 + } 3090 + } 3091 + }, 3092 + "node_modules/youch": { 3093 + "version": "3.3.4", 3094 + "resolved": "https://registry.npmjs.org/youch/-/youch-3.3.4.tgz", 3095 + "integrity": "sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg==", 3096 + "dev": true, 3097 + "license": "MIT", 3098 + "dependencies": { 3099 + "cookie": "^0.7.1", 3100 + "mustache": "^4.2.0", 3101 + "stacktracey": "^2.1.8" 3102 + } 3103 + }, 3104 + "node_modules/zod": { 3105 + "version": "3.24.3", 3106 + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.3.tgz", 3107 + "integrity": "sha512-HhY1oqzWCQWuUqvBFnsyrtZRhyPeR7SUGv+C4+MsisMuVfSPx8HpwWqH8tRahSlt6M3PiFAcoeFhZAqIXTxoSg==", 3108 + "dev": true, 3109 + "license": "MIT", 3110 + "funding": { 3111 + "url": "https://github.com/sponsors/colinhacks" 3112 + } 3113 + } 3114 + } 3024 3115 }
+18 -14
avatar/package.json
··· 1 1 { 2 - "name": "avatar", 3 - "version": "0.0.0", 4 - "private": true, 5 - "scripts": { 6 - "deploy": "wrangler deploy", 7 - "dev": "wrangler dev", 8 - "start": "wrangler dev", 9 - "test": "vitest" 10 - }, 11 - "devDependencies": { 12 - "@cloudflare/vitest-pool-workers": "^0.8.19", 13 - "vitest": "~3.0.7", 14 - "wrangler": "^4.14.1" 15 - } 2 + "name": "avatar", 3 + "version": "0.0.0", 4 + "private": true, 5 + "type": "module", 6 + "scripts": { 7 + "deploy": "wrangler deploy", 8 + "dev": "wrangler dev", 9 + "start": "wrangler dev", 10 + "test": "vitest" 11 + }, 12 + "dependencies": { 13 + "@atcute/identity-resolver": "^1.2.2" 14 + }, 15 + "devDependencies": { 16 + "@cloudflare/vitest-pool-workers": "^0.8.19", 17 + "vitest": "~3.0.7", 18 + "wrangler": "^4.14.1" 19 + } 16 20 }
+156 -9
avatar/src/index.js
··· 1 + import { 2 + LocalActorResolver, 3 + CompositeHandleResolver, 4 + DohJsonHandleResolver, 5 + WellKnownHandleResolver, 6 + CompositeDidDocumentResolver, 7 + PlcDidDocumentResolver, 8 + WebDidDocumentResolver, 9 + } from "@atcute/identity-resolver"; 10 + 11 + // Initialize resolvers for Cloudflare Workers 12 + const handleResolver = new CompositeHandleResolver({ 13 + strategy: "race", 14 + methods: { 15 + dns: new DohJsonHandleResolver({ 16 + dohUrl: "https://cloudflare-dns.com/dns-query", 17 + }), 18 + http: new WellKnownHandleResolver(), 19 + }, 20 + }); 21 + 22 + const didDocumentResolver = new CompositeDidDocumentResolver({ 23 + methods: { 24 + plc: new PlcDidDocumentResolver(), 25 + web: new WebDidDocumentResolver(), 26 + }, 27 + }); 28 + 29 + const actorResolver = new LocalActorResolver({ 30 + handleResolver, 31 + didDocumentResolver, 32 + }); 33 + 1 34 export default { 2 35 async fetch(request, env) { 3 36 // Helper function to generate a color from a string ··· 14 47 return color; 15 48 }; 16 49 50 + // Helper function to fetch Tangled profile from PDS 51 + const getTangledAvatarFromPDS = async (actor) => { 52 + try { 53 + // Resolve the identity 54 + const identity = await actorResolver.resolve(actor); 55 + if (!identity) { 56 + console.log({ 57 + level: "debug", 58 + message: "failed to resolve identity", 59 + actor: actor, 60 + }); 61 + return null; 62 + } 63 + 64 + const did = identity.did; 65 + const pdsEndpoint = identity.pds.replace(/\/$/, ""); // Remove trailing slash 66 + 67 + if (!pdsEndpoint) { 68 + console.log({ 69 + level: "debug", 70 + message: "no PDS endpoint found", 71 + actor: actor, 72 + did: did, 73 + }); 74 + return null; 75 + } 76 + 77 + const profileUrl = `${pdsEndpoint}/xrpc/com.atproto.repo.getRecord?repo=${did}&collection=sh.tangled.actor.profile&rkey=self`; 78 + 79 + // Fetch the Tangled profile record from PDS 80 + const profileResponse = await fetch(profileUrl); 81 + 82 + if (!profileResponse.ok) { 83 + console.log({ 84 + level: "debug", 85 + message: "no Tangled profile found on PDS", 86 + actor: actor, 87 + status: profileResponse.status, 88 + }); 89 + return null; 90 + } 91 + 92 + const profileData = await profileResponse.json(); 93 + const avatarBlob = profileData?.value?.avatar; 94 + 95 + if (!avatarBlob) { 96 + console.log({ 97 + level: "debug", 98 + message: "Tangled profile has no avatar", 99 + actor: actor, 100 + }); 101 + return null; 102 + } 103 + 104 + // Extract CID from blob reference object 105 + // The ref might be an object with $link property or a string 106 + let avatarCID; 107 + if (typeof avatarBlob.ref === "string") { 108 + avatarCID = avatarBlob.ref; 109 + } else if (avatarBlob.ref?.$link) { 110 + avatarCID = avatarBlob.ref.$link; 111 + } else if (typeof avatarBlob === "string") { 112 + avatarCID = avatarBlob; 113 + } 114 + 115 + if (!avatarCID || typeof avatarCID !== "string") { 116 + console.log({ 117 + level: "warn", 118 + message: "could not extract valid CID from avatar blob", 119 + actor: actor, 120 + avatarBlob: avatarBlob, 121 + avatarBlobRef: avatarBlob.ref, 122 + }); 123 + return null; 124 + } 125 + 126 + // Construct blob URL (pdsEndpoint already has trailing slash removed) 127 + const blobUrl = `${pdsEndpoint}/xrpc/com.atproto.sync.getBlob?did=${did}&cid=${avatarCID}`; 128 + 129 + return blobUrl; 130 + } catch (e) { 131 + console.log({ 132 + level: "warn", 133 + message: "error fetching Tangled avatar from PDS", 134 + actor: actor, 135 + error: e.message, 136 + }); 137 + return null; 138 + } 139 + }; 140 + 17 141 const url = new URL(request.url); 18 142 const { pathname, searchParams } = url; 19 143 20 144 if (!pathname || pathname === "/") { 21 - return new Response(`This is Tangled's avatar service. It fetches your pretty avatar from Bluesky and caches it on Cloudflare. 22 - You can't use this directly unfortunately since all requests are signed and may only originate from the appview.`); 145 + return new Response( 146 + `This is Tangled's avatar service. It fetches your pretty avatar from your PDS, Bluesky, or generates a placeholder. 147 + You can't use this directly unfortunately since all requests are signed and may only originate from the appview.`, 148 + ); 23 149 } 24 150 25 151 const size = searchParams.get("size"); ··· 68 194 } 69 195 70 196 try { 71 - const profileResponse = await fetch( 72 - `https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${actor}`, 73 - ); 74 - const profile = await profileResponse.json(); 75 - const avatar = profile.avatar; 197 + let avatarUrl = null; 198 + 199 + // Try to get Tangled avatar from user's PDS first 200 + avatarUrl = await getTangledAvatarFromPDS(actor); 201 + 202 + // If no Tangled avatar, fall back to Bluesky 203 + if (!avatarUrl) { 204 + console.log({ 205 + level: "debug", 206 + message: "no Tangled avatar, falling back to Bluesky", 207 + actor: actor, 208 + }); 209 + 210 + const profileResponse = await fetch( 211 + `https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${actor}`, 212 + ); 76 213 77 - let avatarUrl = profile.avatar; 214 + if (profileResponse.ok) { 215 + const profile = await profileResponse.json(); 216 + avatarUrl = profile.avatar; 217 + } 218 + } 78 219 79 220 if (!avatarUrl) { 80 221 // Generate a random color based on the actor string 222 + console.log({ 223 + level: "debug", 224 + message: "no avatar found, generating placeholder", 225 + actor: actor, 226 + }); 227 + 81 228 const bgColor = stringToColor(actor); 82 229 const size = resizeToTiny ? 32 : 128; 83 230 const svg = `<svg width="${size}" height="${size}" viewBox="0 0 ${size} ${size}" xmlns="http://www.w3.org/2000/svg"><rect width="${size}" height="${size}" fill="${bgColor}"/></svg>`; ··· 93 240 return response; 94 241 } 95 242 96 - // Resize if requested 243 + // Fetch and optionally resize the avatar 97 244 let avatarResponse; 98 245 if (resizeToTiny) { 99 246 avatarResponse = await fetch(avatarUrl, {
+13 -13
avatar/wrangler.jsonc
··· 1 1 { 2 - "$schema": "node_modules/wrangler/config-schema.json", 3 - "name": "avatar", 4 - "main": "src/index.js", 5 - "compatibility_date": "2025-05-03", 6 - "observability": { 7 - "enabled": true, 8 - }, 9 - "routes": [ 10 - { 11 - "pattern": "avatar.tangled.sh", 12 - "custom_domain": true, 13 - }, 14 - ], 2 + "$schema": "node_modules/wrangler/config-schema.json", 3 + "name": "avatar", 4 + "main": "src/index.js", 5 + "compatibility_date": "2025-05-03", 6 + "observability": { 7 + "enabled": true, 8 + }, 9 + "routes": [ 10 + { 11 + "pattern": "avatar.tangled.sh", 12 + "custom_domain": true, 13 + }, 14 + ], 15 15 }
-6
cmd/cborgen/cborgen.go
··· 55 55 tangled.Spindle{}, 56 56 tangled.SpindleMember{}, 57 57 tangled.String{}, 58 - tangled.CiEvent{}, 59 - tangled.CiEvent_PullRequest{}, 60 - tangled.CiEvent_Push{}, 61 - tangled.CiEvent_Manual{}, 62 - tangled.CiPipeline{}, 63 - tangled.CiWorkflowRun{}, 64 58 ); err != nil { 65 59 panic(err) 66 60 }
-11
contrib/certs/root.crt
··· 1 - -----BEGIN CERTIFICATE----- 2 - MIIBozCCAUmgAwIBAgIQRnYoKs3BuihlLFeydgURVzAKBggqhkjOPQQDAjAwMS4w 3 - LAYDVQQDEyVDYWRkeSBMb2NhbCBBdXRob3JpdHkgLSAyMDI2IEVDQyBSb290MB4X 4 - DTI2MDEwODEzNTk1MloXDTM1MTExNzEzNTk1MlowMDEuMCwGA1UEAxMlQ2FkZHkg 5 - TG9jYWwgQXV0aG9yaXR5IC0gMjAyNiBFQ0MgUm9vdDBZMBMGByqGSM49AgEGCCqG 6 - SM49AwEHA0IABCQlYShhxLaX8/ZP7rcBtD5xL4u3wYMe77JS/lRFjjpAUGmJPxUE 7 - ctsNvukG1hU4MeLMSqAEIqFWjs8dQBxLjGSjRTBDMA4GA1UdDwEB/wQEAwIBBjAS 8 - BgNVHRMBAf8ECDAGAQH/AgEBMB0GA1UdDgQWBBQ7Mt/6izTOOXCSWDS6HrwrqMDB 9 - vzAKBggqhkjOPQQDAgNIADBFAiEA9QAYIuHR5qsGJ1JMZnuAAQpEwaqewhUICsKO 10 - e2fWj4ACICPgj9Kh9++8FH5eVyDI1AD/BLwmMmiaqs1ojZT7QJqb 11 - -----END CERTIFICATE-----
-31
contrib/example.env
··· 1 - # NOTE: put actual DIDs here 2 - alice_did=did:plc:alice-did 3 - tangled_did=did:plc:tangled-did 4 - 5 - #core 6 - export TANGLED_DEV=true 7 - export TANGLED_APPVIEW_HOST=127.0.0.1:3000 8 - # plc 9 - export TANGLED_PLC_URL=https://plc.tngl.boltless.dev 10 - # jetstream 11 - export TANGLED_JETSTREAM_ENDPOINT=wss://jetstream.tngl.boltless.dev/subscribe 12 - # label 13 - export TANGLED_LABEL_GFI=at://${tangled_did}/sh.tangled.label.definition/good-first-issue 14 - export TANGLED_LABEL_DEFAULTS=$TANGLED_LABEL_GFI 15 - export TANGLED_LABEL_DEFAULTS=$TANGLED_LABEL_DEFAULTS,at://${tangled_did}/sh.tangled.label.definition/assignee 16 - export TANGLED_LABEL_DEFAULTS=$TANGLED_LABEL_DEFAULTS,at://${tangled_did}/sh.tangled.label.definition/documentation 17 - export TANGLED_LABEL_DEFAULTS=$TANGLED_LABEL_DEFAULTS,at://${tangled_did}/sh.tangled.label.definition/duplicate 18 - export TANGLED_LABEL_DEFAULTS=$TANGLED_LABEL_DEFAULTS,at://${tangled_did}/sh.tangled.label.definition/wontfix 19 - 20 - # vm settings 21 - export TANGLED_VM_PLC_URL=https://plc.tngl.boltless.dev 22 - export TANGLED_VM_JETSTREAM_ENDPOINT=wss://jetstream.tngl.boltless.dev/subscribe 23 - export TANGLED_VM_KNOT_HOST=knot.tngl.boltless.dev 24 - export TANGLED_VM_KNOT_OWNER=$alice_did 25 - export TANGLED_VM_SPINDLE_HOST=spindle.tngl.boltless.dev 26 - export TANGLED_VM_SPINDLE_OWNER=$alice_did 27 - 28 - if [ -n "${TANGLED_RESEND_API_KEY:-}" ] && [ -n "${TANGLED_RESEND_SENT_FROM:-}" ]; then 29 - export TANGLED_VM_PDS_EMAIL_SMTP_URL=smtps://resend:$TANGLED_RESEND_API_KEY@smtp.resend.com:465/ 30 - export TANGLED_VM_PDS_EMAIL_FROM_ADDRESS=$TANGLED_RESEND_SENT_FROM 31 - fi
-12
contrib/pds.env
··· 1 - LOG_ENABLED=true 2 - 3 - PDS_JWT_SECRET=8cae8bffcc73d9932819650791e4e89a 4 - PDS_ADMIN_PASSWORD=d6a902588cd93bee1af83f924f60cfd3 5 - PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX=2e92e336a50a618458e1097d94a1db86ec3fd8829d7735020cbae80625c761d7 6 - 7 - PDS_DATA_DIRECTORY=/pds 8 - PDS_BLOBSTORE_DISK_LOCATION=/pds/blocks 9 - 10 - PDS_DID_PLC_URL=http://localhost:8080 11 - PDS_HOSTNAME=pds.tngl.boltless.dev 12 - PDS_PORT=3000
-25
contrib/readme.md
··· 1 - # how to setup local appview dev environment 2 - 3 - Appview requires several microservices from knot and spindle to entire atproto infra. This test environment is implemented under nixos vm. 4 - 5 - 1. copy `contrib/example.env` to `.env`, fill it and source it 6 - 2. run vm 7 - ```bash 8 - nix run --impure .#vm 9 - ``` 10 - 3. trust the generated cert from host machine 11 - ```bash 12 - # for macos 13 - sudo security add-trusted-cert -d -r trustRoot \ 14 - -k /Library/Keychains/System.keychain \ 15 - ./nix/vm-data/caddy/.local/share/caddy/pki/authorities/local/root.crt 16 - ``` 17 - 4. create test accounts with valid emails (use [`create-test-account.sh`](./scripts/create-test-account.sh)) 18 - 5. create default labels (use [`setup-const-records`](./scripts/setup-const-records.sh)) 19 - 6. restart vm with correct owner-did 20 - 21 - for git-https, you should change your local git config: 22 - ``` 23 - [http "https://knot.tngl.boltless.dev"] 24 - sslCAPath = /Users/boltless/repo/tangled/nix/vm-data/caddy/.local/share/caddy/pki/authorities/local/ 25 - ```
-68
contrib/scripts/create-test-account.sh
··· 1 - #!/bin/bash 2 - set -o errexit 3 - set -o nounset 4 - set -o pipefail 5 - 6 - source "$(dirname "$0")/../pds.env" 7 - 8 - # PDS_HOSTNAME= 9 - # PDS_ADMIN_PASSWORD= 10 - 11 - # curl a URL and fail if the request fails. 12 - function curl_cmd_get { 13 - curl --fail --silent --show-error "$@" 14 - } 15 - 16 - # curl a URL and fail if the request fails. 17 - function curl_cmd_post { 18 - curl --fail --silent --show-error --request POST --header "Content-Type: application/json" "$@" 19 - } 20 - 21 - # curl a URL but do not fail if the request fails. 22 - function curl_cmd_post_nofail { 23 - curl --silent --show-error --request POST --header "Content-Type: application/json" "$@" 24 - } 25 - 26 - USERNAME="${1:-}" 27 - 28 - if [[ "${USERNAME}" == "" ]]; then 29 - read -p "Enter a username: " USERNAME 30 - fi 31 - 32 - if [[ "${USERNAME}" == "" ]]; then 33 - echo "ERROR: missing USERNAME parameter." >/dev/stderr 34 - echo "Usage: $0 ${SUBCOMMAND} <USERNAME>" >/dev/stderr 35 - exit 1 36 - fi 37 - 38 - EMAIL=${USERNAME}@${PDS_HOSTNAME} 39 - 40 - PASSWORD="password" 41 - INVITE_CODE="$(curl_cmd_post \ 42 - --user "admin:${PDS_ADMIN_PASSWORD}" \ 43 - --data '{"useCount": 1}' \ 44 - "https://${PDS_HOSTNAME}/xrpc/com.atproto.server.createInviteCode" | jq --raw-output '.code' 45 - )" 46 - RESULT="$(curl_cmd_post_nofail \ 47 - --data "{\"email\":\"${EMAIL}\", \"handle\":\"${USERNAME}.${PDS_HOSTNAME}\", \"password\":\"${PASSWORD}\", \"inviteCode\":\"${INVITE_CODE}\"}" \ 48 - "https://${PDS_HOSTNAME}/xrpc/com.atproto.server.createAccount" 49 - )" 50 - 51 - DID="$(echo $RESULT | jq --raw-output '.did')" 52 - if [[ "${DID}" != did:* ]]; then 53 - ERR="$(echo ${RESULT} | jq --raw-output '.message')" 54 - echo "ERROR: ${ERR}" >/dev/stderr 55 - echo "Usage: $0 <EMAIL> <HANDLE>" >/dev/stderr 56 - exit 1 57 - fi 58 - 59 - echo 60 - echo "Account created successfully!" 61 - echo "-----------------------------" 62 - echo "Handle : ${USERNAME}.${PDS_HOSTNAME}" 63 - echo "DID : ${DID}" 64 - echo "Password : ${PASSWORD}" 65 - echo "-----------------------------" 66 - echo "This is a test account with an insecure password." 67 - echo "Make sure it's only used for development." 68 - echo
-106
contrib/scripts/setup-const-records.sh
··· 1 - #!/bin/bash 2 - set -o errexit 3 - set -o nounset 4 - set -o pipefail 5 - 6 - source "$(dirname "$0")/../pds.env" 7 - 8 - # PDS_HOSTNAME= 9 - 10 - # curl a URL and fail if the request fails. 11 - function curl_cmd_get { 12 - curl --fail --silent --show-error "$@" 13 - } 14 - 15 - # curl a URL and fail if the request fails. 16 - function curl_cmd_post { 17 - curl --fail --silent --show-error --request POST --header "Content-Type: application/json" "$@" 18 - } 19 - 20 - # curl a URL but do not fail if the request fails. 21 - function curl_cmd_post_nofail { 22 - curl --silent --show-error --request POST --header "Content-Type: application/json" "$@" 23 - } 24 - 25 - USERNAME="${1:-}" 26 - 27 - if [[ "${USERNAME}" == "" ]]; then 28 - read -p "Enter a username: " USERNAME 29 - fi 30 - 31 - if [[ "${USERNAME}" == "" ]]; then 32 - echo "ERROR: missing USERNAME parameter." >/dev/stderr 33 - echo "Usage: $0 ${SUBCOMMAND} <USERNAME>" >/dev/stderr 34 - exit 1 35 - fi 36 - 37 - SESS_RESULT="$(curl_cmd_post \ 38 - --data "$(cat <<EOF 39 - { 40 - "identifier": "$USERNAME", 41 - "password": "password" 42 - } 43 - EOF 44 - )" \ 45 - https://pds.tngl.boltless.dev/xrpc/com.atproto.server.createSession 46 - )" 47 - 48 - echo $SESS_RESULT | jq 49 - 50 - DID="$(echo $SESS_RESULT | jq --raw-output '.did')" 51 - ACCESS_JWT="$(echo $SESS_RESULT | jq --raw-output '.accessJwt')" 52 - 53 - function add_label_def { 54 - local color=$1 55 - local name=$2 56 - echo $color 57 - echo $name 58 - local json_payload=$(cat <<EOF 59 - { 60 - "repo": "$DID", 61 - "collection": "sh.tangled.label.definition", 62 - "rkey": "$name", 63 - "record": { 64 - "name": "$name", 65 - "color": "$color", 66 - "scope": ["sh.tangled.repo.issue"], 67 - "multiple": false, 68 - "createdAt": "2025-09-22T11:14:35+01:00", 69 - "valueType": {"type": "null", "format": "any"} 70 - } 71 - } 72 - EOF 73 - ) 74 - echo $json_payload 75 - echo $json_payload | jq 76 - RESULT="$(curl_cmd_post \ 77 - --data "$json_payload" \ 78 - -H "Authorization: Bearer ${ACCESS_JWT}" \ 79 - "https://${PDS_HOSTNAME}/xrpc/com.atproto.repo.createRecord")" 80 - echo $RESULT | jq 81 - } 82 - 83 - add_label_def '#64748b' 'wontfix' 84 - add_label_def '#8B5CF6' 'good-first-issue' 85 - add_label_def '#ef4444' 'duplicate' 86 - add_label_def '#06b6d4' 'documentation' 87 - json_payload=$(cat <<EOF 88 - { 89 - "repo": "$DID", 90 - "collection": "sh.tangled.label.definition", 91 - "rkey": "assignee", 92 - "record": { 93 - "name": "assignee", 94 - "color": "#10B981", 95 - "scope": ["sh.tangled.repo.issue", "sh.tangled.repo.pull"], 96 - "multiple": false, 97 - "createdAt": "2025-09-22T11:14:35+01:00", 98 - "valueType": {"type": "string", "format": "did"} 99 - } 100 - } 101 - EOF 102 - ) 103 - curl_cmd_post \ 104 - --data "$json_payload" \ 105 - -H "Authorization: Bearer ${ACCESS_JWT}" \ 106 - "https://${PDS_HOSTNAME}/xrpc/com.atproto.repo.createRecord"
+86 -3
docs/DOCS.md
··· 375 375 KNOT_SERVER_LISTEN_ADDR=127.0.0.1:5555 376 376 ``` 377 377 378 - If you run a Linux distribution that uses systemd, you can use the provided 379 - service file to run the server. Copy 380 - [`knotserver.service`](/systemd/knotserver.service) 378 + If you run a Linux distribution that uses systemd, you can 379 + use the provided service file to run the server. Copy 380 + [`knotserver.service`](https://tangled.org/tangled.org/core/blob/master/systemd/knotserver.service) 381 381 to `/etc/systemd/system/`. Then, run: 382 382 383 383 ``` ··· 501 501 502 502 Note that you should add a newline at the end if setting a non-empty message 503 503 since the knot won't do this for you. 504 + 505 + ## Troubleshooting 506 + 507 + If you run your own knot, you may run into some of these 508 + common issues. You can always join the 509 + [IRC](https://web.libera.chat/#tangled) or 510 + [Discord](https://chat.tangled.org/) if this section does 511 + not help. 512 + 513 + ### Unable to push 514 + 515 + If you are unable to push to your knot or repository: 516 + 517 + 1. First, ensure that you have added your SSH public key to 518 + your account 519 + 2. Check to see that your knot has synced the key by running 520 + `knot keys` 521 + 3. Check to see if git is supplying the correct private key 522 + when pushing: `GIT_SSH_COMMAND="ssh -v" git push ...` 523 + 4. Check to see if `sshd` on the knot is rejecting the push 524 + for some reason: `journalctl -xeu ssh` (or `sshd`, 525 + depending on your machine). These logs are unavailable if 526 + using docker. 527 + 5. Check to see if the knot itself is rejecting the push, 528 + depending on your setup, the logs might be in one of the 529 + following paths: 530 + * `/tmp/knotguard.log` 531 + * `/home/git/log` 532 + * `/home/git/guard.log` 504 533 505 534 # Spindles 506 535 ··· 1561 1590 Refer to the [jujutsu 1562 1591 documentation](https://jj-vcs.github.io/jj/latest/config/#commit-trailers) 1563 1592 for more information. 1593 + 1594 + # Troubleshooting guide 1595 + 1596 + ## Login issues 1597 + 1598 + Owing to the distributed nature of OAuth on AT Protocol, you 1599 + may run into issues with logging in. If you run a 1600 + self-hosted PDS: 1601 + 1602 + - You may need to ensure that your PDS is timesynced using 1603 + NTP: 1604 + * Enable the `ntpd` service 1605 + * Run `ntpd -qg` to synchronize your clock 1606 + - You may need to increase the default request timeout: 1607 + `NODE_OPTIONS="--network-family-autoselection-attempt-timeout=500"` 1608 + 1609 + ## Empty punchcard 1610 + 1611 + For Tangled to register commits that you make across the 1612 + network, you need to setup one of following: 1613 + 1614 + - The committer email should be a verified email associated 1615 + to your account. You can add and verify emails on the 1616 + settings page. 1617 + - Or, the committer email should be set to your account's 1618 + DID: `git config user.email "did:plc:foobar". You can find 1619 + your account's DID on the settings page 1620 + 1621 + ## Commit is not marked as verified 1622 + 1623 + Presently, Tangled only supports SSH commit signatures. 1624 + 1625 + To sign commits using an SSH key with git: 1626 + 1627 + ``` 1628 + git config --global gpg.format ssh 1629 + git config --global user.signingkey ~/.ssh/tangled-key 1630 + ``` 1631 + 1632 + To sign commits using an SSH key with jj, add this to your 1633 + config: 1634 + 1635 + ``` 1636 + [signing] 1637 + behavior = "own" 1638 + backend = "ssh" 1639 + key = "~/.ssh/tangled-key" 1640 + ``` 1641 + 1642 + ## Self-hosted knot issues 1643 + 1644 + If you need help troubleshooting a self-hosted knot, check 1645 + out the [knot troubleshooting 1646 + guide](/knot-self-hosting-guide.html#troubleshooting).
+17
flake.lock
··· 106 106 "url": "https://github.com/IBM/plex/releases/download/@ibm/plex-mono@1.1.0/ibm-plex-mono.zip" 107 107 } 108 108 }, 109 + "indigo": { 110 + "flake": false, 111 + "locked": { 112 + "lastModified": 1753693716, 113 + "narHash": "sha256-DMIKnCJRODQXEHUxA+7mLzRALmnZhkkbHlFT2rCQYrE=", 114 + "owner": "oppiliappan", 115 + "repo": "indigo", 116 + "rev": "5f170569da9360f57add450a278d73538092d8ca", 117 + "type": "github" 118 + }, 119 + "original": { 120 + "owner": "oppiliappan", 121 + "repo": "indigo", 122 + "type": "github" 123 + } 124 + }, 109 125 "inter-fonts-src": { 110 126 "flake": false, 111 127 "locked": { ··· 156 172 "htmx-src": "htmx-src", 157 173 "htmx-ws-src": "htmx-ws-src", 158 174 "ibm-plex-mono-src": "ibm-plex-mono-src", 175 + "indigo": "indigo", 159 176 "inter-fonts-src": "inter-fonts-src", 160 177 "lucide-src": "lucide-src", 161 178 "nixpkgs": "nixpkgs",
+12 -39
flake.nix
··· 11 11 url = "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz"; 12 12 flake = false; 13 13 }; 14 + indigo = { 15 + url = "github:oppiliappan/indigo"; 16 + flake = false; 17 + }; 14 18 htmx-src = { 15 19 url = "https://unpkg.com/htmx.org@2.0.4/dist/htmx.min.js"; 16 20 flake = false; ··· 47 51 self, 48 52 nixpkgs, 49 53 gomod2nix, 54 + indigo, 50 55 htmx-src, 51 56 htmx-ws-src, 52 57 lucide-src, ··· 77 82 sqlite-lib = self.callPackage ./nix/pkgs/sqlite-lib.nix { 78 83 inherit sqlite-lib-src; 79 84 }; 80 - lexgen = self.callPackage ./nix/pkgs/lexgen.nix {}; 85 + lexgen = self.callPackage ./nix/pkgs/lexgen.nix {inherit indigo;}; 86 + goat = self.callPackage ./nix/pkgs/goat.nix {inherit indigo;}; 81 87 appview-static-files = self.callPackage ./nix/pkgs/appview-static-files.nix { 82 88 inherit htmx-src htmx-ws-src lucide-src inter-fonts-src ibm-plex-mono-src actor-typeahead-src; 83 89 }; ··· 89 95 knot-unwrapped = self.callPackage ./nix/pkgs/knot-unwrapped.nix {}; 90 96 knot = self.callPackage ./nix/pkgs/knot.nix {}; 91 97 dolly = self.callPackage ./nix/pkgs/dolly.nix {}; 92 - did-method-plc = self.callPackage ./nix/pkgs/did-method-plc.nix {}; 93 - bluesky-jetstream = self.callPackage ./nix/pkgs/bluesky-jetstream.nix {}; 94 - bluesky-relay = self.callPackage ./nix/pkgs/bluesky-relay.nix {}; 95 - tap = self.callPackage ./nix/pkgs/tap.nix {}; 96 98 }); 97 99 in { 98 100 overlays.default = final: prev: { 99 - inherit (mkPackageSet final) lexgen sqlite-lib spindle knot-unwrapped knot appview docs dolly did-method-plc bluesky-jetstream bluesky-relay tap; 101 + inherit (mkPackageSet final) lexgen goat sqlite-lib spindle knot-unwrapped knot appview docs dolly; 100 102 }; 101 103 102 104 packages = forAllSystems (system: let ··· 110 112 appview 111 113 appview-static-files 112 114 lexgen 115 + goat 113 116 spindle 114 117 knot 115 118 knot-unwrapped 116 119 sqlite-lib 117 120 docs 118 121 dolly 119 - did-method-plc 120 - bluesky-jetstream 121 - bluesky-relay 122 - tap 123 122 ; 124 123 125 124 pkgsStatic-appview = staticPackages.appview; ··· 182 181 pkgs.nixos-shell 183 182 pkgs.redis 184 183 pkgs.coreutils # for those of us who are on systems that use busybox (alpine) 185 - pkgs.atproto-goat 186 184 packages'.lexgen 187 185 packages'.treefmt-wrapper 188 186 ]; ··· 191 189 # no preserve is needed because watch-tailwind will want to be able to overwrite 192 190 cp -fr --no-preserve=ownership ${packages'.appview-static-files}/* appview/pages/static 193 191 export TANGLED_OAUTH_CLIENT_KID="$(date +%s)" 194 - export TANGLED_OAUTH_CLIENT_SECRET="$(${pkgs.atproto-goat}/bin/goat key generate -t P-256 | grep -A1 "Secret Key" | tail -n1 | awk '{print $1}')" 192 + export TANGLED_OAUTH_CLIENT_SECRET="$(${packages'.goat}/bin/goat key generate -t P-256 | grep -A1 "Secret Key" | tail -n1 | awk '{print $1}')" 195 193 ''; 196 194 env.CGO_ENABLED = 1; 197 195 }; ··· 250 248 rootDir=$(jj --ignore-working-copy root || git rev-parse --show-toplevel) || (echo "error: can't find repo root?"; exit 1) 251 249 cd "$rootDir" 252 250 253 - mkdir -p nix/vm-data/{caddy,knot,repos,spindle,spindle-logs} 251 + mkdir -p nix/vm-data/{knot,repos,spindle,spindle-logs} 254 252 255 253 export TANGLED_VM_DATA_DIR="$rootDir/nix/vm-data" 256 254 exec ${pkgs.lib.getExe ··· 285 283 286 284 rm -f api/tangled/* 287 285 lexgen --build-file lexicon-build-config.json lexicons 288 - sed -i.bak 's/\tlexutil.RegisterType/\/\/\tlexutil.RegisterType/' api/tangled/* 286 + sed -i.bak 's/\tutil/\/\/\tutil/' api/tangled/* 289 287 # lexgen generates incomplete Marshaler/Unmarshaler for union types 290 288 find api/tangled/*.go -not -name "cbor_gen.go" -exec \ 291 289 sed -i '/^func.*\(MarshalCBOR\|UnmarshalCBOR\)/,/^}/ s/^/\/\/ /' {} + ··· 325 323 imports = [./nix/modules/spindle.nix]; 326 324 327 325 services.tangled.spindle.package = lib.mkDefault self.packages.${pkgs.stdenv.hostPlatform.system}.spindle; 328 - services.tangled.spindle.tap-package = lib.mkDefault self.packages.${pkgs.system}.tap; 329 - }; 330 - nixosModules.did-method-plc = { 331 - lib, 332 - pkgs, 333 - ... 334 - }: { 335 - imports = [./nix/modules/did-method-plc.nix]; 336 - services.did-method-plc.package = lib.mkDefault self.packages.${pkgs.system}.did-method-plc; 337 - }; 338 - nixosModules.bluesky-relay = { 339 - lib, 340 - pkgs, 341 - ... 342 - }: { 343 - imports = [./nix/modules/bluesky-relay.nix]; 344 - services.bluesky-relay.package = lib.mkDefault self.packages.${pkgs.system}.bluesky-relay; 345 - }; 346 - nixosModules.bluesky-jetstream = { 347 - lib, 348 - pkgs, 349 - ... 350 - }: { 351 - imports = [./nix/modules/bluesky-jetstream.nix]; 352 - services.bluesky-jetstream.package = lib.mkDefault self.packages.${pkgs.system}.bluesky-jetstream; 353 326 }; 354 327 }; 355 328 }
+4 -5
go.mod
··· 5 5 require ( 6 6 github.com/Blank-Xu/sql-adapter v1.1.1 7 7 github.com/alecthomas/assert/v2 v2.11.0 8 - github.com/alecthomas/chroma/v2 v2.15.0 8 + github.com/alecthomas/chroma/v2 v2.23.1 9 9 github.com/avast/retry-go/v4 v4.6.1 10 10 github.com/blevesearch/bleve/v2 v2.5.3 11 11 github.com/bluekeyes/go-gitdiff v0.8.1 ··· 29 29 github.com/gorilla/feeds v1.2.0 30 30 github.com/gorilla/sessions v1.4.0 31 31 github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 32 - github.com/hashicorp/go-version v1.8.0 33 32 github.com/hiddeco/sshsig v0.2.0 34 33 github.com/hpcloud/tail v1.0.0 35 34 github.com/ipfs/go-cid v0.5.0 ··· 50 49 github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc 51 50 gitlab.com/staticnoise/goldmark-callout v0.0.0-20240609120641-6366b799e4ab 52 51 golang.org/x/crypto v0.40.0 53 - golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b 54 52 golang.org/x/image v0.31.0 55 53 golang.org/x/net v0.42.0 56 54 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da ··· 62 60 github.com/Microsoft/go-winio v0.6.2 // indirect 63 61 github.com/ProtonMail/go-crypto v1.3.0 // indirect 64 62 github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect 65 - github.com/alecthomas/repr v0.4.0 // indirect 63 + github.com/alecthomas/repr v0.5.2 // indirect 66 64 github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect 67 65 github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect 68 66 github.com/aymerick/douceur v0.2.0 // indirect ··· 204 202 go.uber.org/atomic v1.11.0 // indirect 205 203 go.uber.org/multierr v1.11.0 // indirect 206 204 go.uber.org/zap v1.27.0 // indirect 205 + golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect 207 206 golang.org/x/sync v0.17.0 // indirect 208 207 golang.org/x/sys v0.34.0 // indirect 209 208 golang.org/x/text v0.29.0 // indirect ··· 225 224 226 225 replace github.com/bluekeyes/go-gitdiff => tangled.sh/oppi.li/go-gitdiff v0.8.2 227 226 228 - replace github.com/alecthomas/chroma/v2 => github.com/oppiliappan/chroma/v2 v2.19.0 227 + replace github.com/alecthomas/chroma/v2 => github.com/oppiliappan/chroma/v2 v2.24.2 229 228 230 229 // from bluesky-social/indigo 231 230 replace github.com/gocql/gocql => github.com/scylladb/gocql v1.14.4
+4 -5
go.sum
··· 13 13 github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0= 14 14 github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= 15 15 github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= 16 - github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= 17 16 github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= 17 + github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs= 18 + github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= 18 19 github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= 19 20 github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= 20 21 github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= ··· 264 265 github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= 265 266 github.com/hashicorp/go-sockaddr v1.0.7 h1:G+pTkSO01HpR5qCxg7lxfsFEZaG+C0VssTy/9dbT+Fw= 266 267 github.com/hashicorp/go-sockaddr v1.0.7/go.mod h1:FZQbEYa1pxkQ7WLpyXJ6cbjpT8q0YgQaK/JakXqGyWw= 267 - github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4= 268 - github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= 269 268 github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= 270 269 github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= 271 270 github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= ··· 414 413 github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= 415 414 github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b h1:FfH+VrHHk6Lxt9HdVS0PXzSXFyS2NbZKXv33FYPol0A= 416 415 github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b/go.mod h1:AC62GU6hc0BrNm+9RK9VSiwa/EUe1bkIeFORAMcHvJU= 417 - github.com/oppiliappan/chroma/v2 v2.19.0 h1:PN7/pb+6JRKCva30NPTtRJMlrOyzgpPpIroNzy4ekHU= 418 - github.com/oppiliappan/chroma/v2 v2.19.0/go.mod h1:RVX6AvYm4VfYe/zsk7mjHueLDZor3aWCNE14TFlepBk= 416 + github.com/oppiliappan/chroma/v2 v2.24.2 h1:lHB9tWQxDoHa6sYEDdFep8SX6FPMmAF+ocGUffFwujE= 417 + github.com/oppiliappan/chroma/v2 v2.24.2/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o= 419 418 github.com/oppiliappan/go-git/v5 v5.17.0 h1:CuJnpcIDxr0oiNaSHMconovSWnowHznVDG+AhjGuSEo= 420 419 github.com/oppiliappan/go-git/v5 v5.17.0/go.mod h1:q/FE8C3SPMoRN7LoH9vRFiBzidAOBWJPS1CqVS8DN+w= 421 420 github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
+15 -5
input.css
··· 92 92 label { 93 93 @apply block text-gray-900 text-sm font-bold py-2 uppercase dark:text-gray-100; 94 94 } 95 - input { 96 - @apply border border-gray-400 block rounded bg-gray-50 focus:ring-black p-3 dark:bg-gray-800 dark:border-gray-600 dark:text-white dark:focus:ring-gray-400; 97 - } 98 - textarea { 99 - @apply border border-gray-400 block rounded bg-gray-50 focus:ring-black p-3 dark:bg-gray-800 dark:border-gray-600 dark:text-white dark:focus:ring-gray-400 font-mono; 95 + input, textarea { 96 + @apply 97 + block rounded p-3 98 + bg-gray-50 dark:bg-gray-800 dark:text-white 99 + border border-gray-300 dark:border-gray-600 100 + focus:outline-none focus:ring-1 focus:ring-gray-400 dark:focus:ring-gray-500; 100 101 } 101 102 details summary::-webkit-details-marker { 102 103 display: none; ··· 172 173 173 174 .prose .heading .anchor:hover { 174 175 @apply opacity-70; 176 + } 177 + 178 + .prose h1:target, 179 + .prose h2:target, 180 + .prose h3:target, 181 + .prose h4:target, 182 + .prose h5:target, 183 + .prose h6:target { 184 + @apply bg-yellow-200/30 dark:bg-yellow-600/30; 175 185 } 176 186 177 187 .prose a.footnote-backref {
+1 -2
jetstream/jetstream.go
··· 159 159 j.cancelMu.Unlock() 160 160 161 161 if err := j.client.ConnectAndRead(connCtx, cursor); err != nil { 162 - l.Error("error reading jetstream, retry in 3s", "error", err) 162 + l.Error("error reading jetstream", "error", err) 163 163 cancel() 164 - time.Sleep(3 * time.Second) 165 164 continue 166 165 } 167 166
+46 -3
knotserver/git/branch.go
··· 12 12 "tangled.org/core/types" 13 13 ) 14 14 15 - func (g *GitRepo) Branches() ([]types.Branch, error) { 15 + type BranchesOptions struct { 16 + Limit int 17 + Offset int 18 + } 19 + 20 + func (g *GitRepo) Branches(opts *BranchesOptions) ([]types.Branch, error) { 21 + if opts == nil { 22 + opts = &BranchesOptions{} 23 + } 24 + 16 25 fields := []string{ 17 26 "refname:short", 18 27 "objectname", ··· 33 42 if i != 0 { 34 43 outFormat.WriteString(fieldSeparator) 35 44 } 36 - outFormat.WriteString(fmt.Sprintf("%%(%s)", f)) 45 + fmt.Fprintf(&outFormat, "%%(%s)", f) 37 46 } 38 47 outFormat.WriteString("") 39 48 outFormat.WriteString(recordSeparator) 40 49 41 - output, err := g.forEachRef(outFormat.String(), "refs/heads") 50 + args := []string{outFormat.String(), "--sort=-creatordate"} 51 + 52 + // only add the count if the limit is a non-zero value, 53 + // if it is zero, get as many tags as we can 54 + if opts.Limit > 0 { 55 + args = append(args, fmt.Sprintf("--count=%d", opts.Offset+opts.Limit)) 56 + } 57 + 58 + args = append(args, "refs/heads") 59 + 60 + output, err := g.forEachRef(args...) 42 61 if err != nil { 43 62 return nil, fmt.Errorf("failed to get branches: %w", err) 44 63 } ··· 48 67 return nil, nil 49 68 } 50 69 70 + startIdx := opts.Offset 71 + if startIdx >= len(records) { 72 + return nil, nil 73 + } 74 + 75 + endIdx := len(records) 76 + if opts.Limit > 0 { 77 + endIdx = min(startIdx+opts.Limit, len(records)) 78 + } 79 + 80 + records = records[startIdx:endIdx] 51 81 branches := make([]types.Branch, 0, len(records)) 52 82 53 83 // ignore errors here ··· 109 139 110 140 slices.Reverse(branches) 111 141 return branches, nil 142 + } 143 + 144 + func (g *GitRepo) Branch(name string) (*plumbing.Reference, error) { 145 + ref, err := g.r.Reference(plumbing.NewBranchReferenceName(name), false) 146 + if err != nil { 147 + return nil, fmt.Errorf("branch: %w", err) 148 + } 149 + 150 + if !ref.Name().IsBranch() { 151 + return nil, fmt.Errorf("branch: %s is not a branch", ref.Name()) 152 + } 153 + 154 + return ref, nil 112 155 } 113 156 114 157 func (g *GitRepo) DeleteBranch(branch string) error {
+355
knotserver/git/branch_test.go
··· 1 + package git 2 + 3 + import ( 4 + "path/filepath" 5 + "slices" 6 + "testing" 7 + 8 + gogit "github.com/go-git/go-git/v5" 9 + "github.com/go-git/go-git/v5/plumbing" 10 + "github.com/stretchr/testify/assert" 11 + "github.com/stretchr/testify/require" 12 + "github.com/stretchr/testify/suite" 13 + 14 + "tangled.org/core/sets" 15 + ) 16 + 17 + type BranchSuite struct { 18 + suite.Suite 19 + *RepoSuite 20 + } 21 + 22 + func TestBranchSuite(t *testing.T) { 23 + t.Parallel() 24 + suite.Run(t, new(BranchSuite)) 25 + } 26 + 27 + func (s *BranchSuite) SetupTest() { 28 + s.RepoSuite = NewRepoSuite(s.T()) 29 + } 30 + 31 + func (s *BranchSuite) TearDownTest() { 32 + s.RepoSuite.cleanup() 33 + } 34 + 35 + func (s *BranchSuite) setupRepoWithBranches() { 36 + s.init() 37 + 38 + // get the initial commit on master 39 + head, err := s.repo.r.Head() 40 + require.NoError(s.T(), err) 41 + initialCommit := head.Hash() 42 + 43 + // create multiple branches with commits 44 + // branch-1 45 + s.createBranch("branch-1", initialCommit) 46 + s.checkoutBranch("branch-1") 47 + _ = s.commitFile("file1.txt", "content 1", "Add file1 on branch-1") 48 + 49 + // branch-2 50 + s.createBranch("branch-2", initialCommit) 51 + s.checkoutBranch("branch-2") 52 + _ = s.commitFile("file2.txt", "content 2", "Add file2 on branch-2") 53 + 54 + // branch-3 55 + s.createBranch("branch-3", initialCommit) 56 + s.checkoutBranch("branch-3") 57 + _ = s.commitFile("file3.txt", "content 3", "Add file3 on branch-3") 58 + 59 + // branch-4 60 + s.createBranch("branch-4", initialCommit) 61 + s.checkoutBranch("branch-4") 62 + s.commitFile("file4.txt", "content 4", "Add file4 on branch-4") 63 + 64 + // back to master and make a commit 65 + s.checkoutBranch("master") 66 + s.commitFile("master-file.txt", "master content", "Add file on master") 67 + 68 + // verify we have multiple branches 69 + refs, err := s.repo.r.References() 70 + require.NoError(s.T(), err) 71 + 72 + branchCount := 0 73 + err = refs.ForEach(func(ref *plumbing.Reference) error { 74 + if ref.Name().IsBranch() { 75 + branchCount++ 76 + } 77 + return nil 78 + }) 79 + require.NoError(s.T(), err) 80 + 81 + // we should have 5 branches: master, branch-1, branch-2, branch-3, branch-4 82 + assert.Equal(s.T(), 5, branchCount, "expected 5 branches") 83 + } 84 + 85 + func (s *BranchSuite) TestBranches_All() { 86 + s.setupRepoWithBranches() 87 + 88 + branches, err := s.repo.Branches(&BranchesOptions{}) 89 + require.NoError(s.T(), err) 90 + 91 + assert.Len(s.T(), branches, 5, "expected 5 branches") 92 + 93 + expectedBranches := sets.Collect(slices.Values([]string{ 94 + "master", 95 + "branch-1", 96 + "branch-2", 97 + "branch-3", 98 + "branch-4", 99 + })) 100 + 101 + for _, branch := range branches { 102 + assert.True(s.T(), expectedBranches.Contains(branch.Reference.Name), 103 + "unexpected branch: %s", branch.Reference.Name) 104 + assert.NotEmpty(s.T(), branch.Reference.Hash, "branch hash should not be empty") 105 + assert.NotNil(s.T(), branch.Commit, "branch commit should not be nil") 106 + } 107 + } 108 + 109 + func (s *BranchSuite) TestBranches_WithLimit() { 110 + s.setupRepoWithBranches() 111 + 112 + tests := []struct { 113 + name string 114 + limit int 115 + expectedCount int 116 + }{ 117 + { 118 + name: "limit 1", 119 + limit: 1, 120 + expectedCount: 1, 121 + }, 122 + { 123 + name: "limit 2", 124 + limit: 2, 125 + expectedCount: 2, 126 + }, 127 + { 128 + name: "limit 3", 129 + limit: 3, 130 + expectedCount: 3, 131 + }, 132 + { 133 + name: "limit 10 (more than available)", 134 + limit: 10, 135 + expectedCount: 5, 136 + }, 137 + } 138 + 139 + for _, tt := range tests { 140 + s.Run(tt.name, func() { 141 + branches, err := s.repo.Branches(&BranchesOptions{ 142 + Limit: tt.limit, 143 + }) 144 + require.NoError(s.T(), err) 145 + assert.Len(s.T(), branches, tt.expectedCount, "expected %d branches", tt.expectedCount) 146 + }) 147 + } 148 + } 149 + 150 + func (s *BranchSuite) TestBranches_WithOffset() { 151 + s.setupRepoWithBranches() 152 + 153 + tests := []struct { 154 + name string 155 + offset int 156 + expectedCount int 157 + }{ 158 + { 159 + name: "offset 0", 160 + offset: 0, 161 + expectedCount: 5, 162 + }, 163 + { 164 + name: "offset 1", 165 + offset: 1, 166 + expectedCount: 4, 167 + }, 168 + { 169 + name: "offset 2", 170 + offset: 2, 171 + expectedCount: 3, 172 + }, 173 + { 174 + name: "offset 4", 175 + offset: 4, 176 + expectedCount: 1, 177 + }, 178 + { 179 + name: "offset 5 (all skipped)", 180 + offset: 5, 181 + expectedCount: 0, 182 + }, 183 + { 184 + name: "offset 10 (more than available)", 185 + offset: 10, 186 + expectedCount: 0, 187 + }, 188 + } 189 + 190 + for _, tt := range tests { 191 + s.Run(tt.name, func() { 192 + branches, err := s.repo.Branches(&BranchesOptions{ 193 + Offset: tt.offset, 194 + }) 195 + require.NoError(s.T(), err) 196 + assert.Len(s.T(), branches, tt.expectedCount, "expected %d branches", tt.expectedCount) 197 + }) 198 + } 199 + } 200 + 201 + func (s *BranchSuite) TestBranches_WithLimitAndOffset() { 202 + s.setupRepoWithBranches() 203 + 204 + tests := []struct { 205 + name string 206 + limit int 207 + offset int 208 + expectedCount int 209 + }{ 210 + { 211 + name: "limit 2, offset 0", 212 + limit: 2, 213 + offset: 0, 214 + expectedCount: 2, 215 + }, 216 + { 217 + name: "limit 2, offset 1", 218 + limit: 2, 219 + offset: 1, 220 + expectedCount: 2, 221 + }, 222 + { 223 + name: "limit 2, offset 3", 224 + limit: 2, 225 + offset: 3, 226 + expectedCount: 2, 227 + }, 228 + { 229 + name: "limit 2, offset 4", 230 + limit: 2, 231 + offset: 4, 232 + expectedCount: 1, 233 + }, 234 + { 235 + name: "limit 3, offset 2", 236 + limit: 3, 237 + offset: 2, 238 + expectedCount: 3, 239 + }, 240 + { 241 + name: "limit 10, offset 3", 242 + limit: 10, 243 + offset: 3, 244 + expectedCount: 2, 245 + }, 246 + } 247 + 248 + for _, tt := range tests { 249 + s.Run(tt.name, func() { 250 + branches, err := s.repo.Branches(&BranchesOptions{ 251 + Limit: tt.limit, 252 + Offset: tt.offset, 253 + }) 254 + require.NoError(s.T(), err) 255 + assert.Len(s.T(), branches, tt.expectedCount, "expected %d branches", tt.expectedCount) 256 + }) 257 + } 258 + } 259 + 260 + func (s *BranchSuite) TestBranches_EmptyRepo() { 261 + repoPath := filepath.Join(s.tempDir, "empty-repo") 262 + 263 + _, err := gogit.PlainInit(repoPath, false) 264 + require.NoError(s.T(), err) 265 + 266 + gitRepo, err := PlainOpen(repoPath) 267 + require.NoError(s.T(), err) 268 + 269 + branches, err := gitRepo.Branches(&BranchesOptions{}) 270 + require.NoError(s.T(), err) 271 + 272 + if branches != nil { 273 + assert.Empty(s.T(), branches, "expected no branches in empty repo") 274 + } 275 + } 276 + 277 + func (s *BranchSuite) TestBranches_Pagination() { 278 + s.setupRepoWithBranches() 279 + 280 + allBranches, err := s.repo.Branches(&BranchesOptions{}) 281 + require.NoError(s.T(), err) 282 + assert.Len(s.T(), allBranches, 5, "expected 5 branches") 283 + 284 + pageSize := 2 285 + var paginatedBranches []string 286 + 287 + for offset := 0; offset < len(allBranches); offset += pageSize { 288 + branches, err := s.repo.Branches(&BranchesOptions{ 289 + Limit: pageSize, 290 + Offset: offset, 291 + }) 292 + require.NoError(s.T(), err) 293 + for _, branch := range branches { 294 + paginatedBranches = append(paginatedBranches, branch.Reference.Name) 295 + } 296 + } 297 + 298 + assert.Len(s.T(), paginatedBranches, len(allBranches), "pagination should return all branches") 299 + 300 + // create sets to verify all branches are present 301 + allBranchNames := sets.New[string]() 302 + for _, branch := range allBranches { 303 + allBranchNames.Insert(branch.Reference.Name) 304 + } 305 + 306 + paginatedBranchNames := sets.New[string]() 307 + for _, name := range paginatedBranches { 308 + paginatedBranchNames.Insert(name) 309 + } 310 + 311 + assert.EqualValues(s.T(), allBranchNames, paginatedBranchNames, 312 + "pagination should return the same set of branches") 313 + } 314 + 315 + func (s *BranchSuite) TestBranches_VerifyBranchFields() { 316 + s.setupRepoWithBranches() 317 + 318 + branches, err := s.repo.Branches(&BranchesOptions{}) 319 + require.NoError(s.T(), err) 320 + 321 + found := false 322 + for i := range branches { 323 + if branches[i].Reference.Name == "master" { 324 + found = true 325 + assert.Equal(s.T(), "master", branches[i].Reference.Name) 326 + assert.NotEmpty(s.T(), branches[i].Reference.Hash) 327 + assert.NotNil(s.T(), branches[i].Commit) 328 + assert.NotEmpty(s.T(), branches[i].Commit.Author.Name) 329 + assert.NotEmpty(s.T(), branches[i].Commit.Author.Email) 330 + assert.False(s.T(), branches[i].Commit.Hash.IsZero()) 331 + break 332 + } 333 + } 334 + 335 + assert.True(s.T(), found, "master branch not found") 336 + } 337 + 338 + func (s *BranchSuite) TestBranches_NilOptions() { 339 + s.setupRepoWithBranches() 340 + 341 + branches, err := s.repo.Branches(nil) 342 + require.NoError(s.T(), err) 343 + assert.Len(s.T(), branches, 5, "nil options should return all branches") 344 + } 345 + 346 + func (s *BranchSuite) TestBranches_ZeroLimitAndOffset() { 347 + s.setupRepoWithBranches() 348 + 349 + branches, err := s.repo.Branches(&BranchesOptions{ 350 + Limit: 0, 351 + Offset: 0, 352 + }) 353 + require.NoError(s.T(), err) 354 + assert.Len(s.T(), branches, 5, "zero limit should return all branches") 355 + }
+1 -14
knotserver/git/git.go
··· 122 122 func (g *GitRepo) TotalCommits() (int, error) { 123 123 output, err := g.revList( 124 124 g.h.String(), 125 - fmt.Sprintf("--count"), 125 + "--count", 126 126 ) 127 127 if err != nil { 128 128 return 0, fmt.Errorf("failed to run rev-list: %w", err) ··· 250 250 251 251 // path is not a submodule 252 252 return nil, ErrNotSubmodule 253 - } 254 - 255 - func (g *GitRepo) Branch(name string) (*plumbing.Reference, error) { 256 - ref, err := g.r.Reference(plumbing.NewBranchReferenceName(name), false) 257 - if err != nil { 258 - return nil, fmt.Errorf("branch: %w", err) 259 - } 260 - 261 - if !ref.Name().IsBranch() { 262 - return nil, fmt.Errorf("branch: %s is not a branch", ref.Name()) 263 - } 264 - 265 - return ref, nil 266 253 } 267 254 268 255 func (g *GitRepo) SetDefaultBranch(branch string) error {
+94 -31
knotserver/git/last_commit.go
··· 6 6 "crypto/sha256" 7 7 "fmt" 8 8 "io" 9 + "iter" 9 10 "os/exec" 10 11 "path" 12 + "strconv" 11 13 "strings" 12 14 "time" 13 15 14 16 "github.com/dgraph-io/ristretto" 15 17 "github.com/go-git/go-git/v5/plumbing" 16 - "github.com/go-git/go-git/v5/plumbing/object" 18 + "tangled.org/core/sets" 19 + "tangled.org/core/types" 17 20 ) 18 21 19 22 var ( ··· 72 75 type commit struct { 73 76 hash plumbing.Hash 74 77 when time.Time 75 - files []string 78 + files sets.Set[string] 76 79 message string 77 80 } 78 81 82 + func newCommit() commit { 83 + return commit{ 84 + files: sets.New[string](), 85 + } 86 + } 87 + 88 + type lastCommitDir struct { 89 + dir string 90 + entries []string 91 + } 92 + 93 + func (l lastCommitDir) children() iter.Seq[string] { 94 + return func(yield func(string) bool) { 95 + for _, child := range l.entries { 96 + if !yield(path.Join(l.dir, child)) { 97 + return 98 + } 99 + } 100 + } 101 + } 102 + 79 103 func cacheKey(g *GitRepo, path string) string { 80 104 sep := byte(':') 81 105 hash := sha256.Sum256(fmt.Append([]byte{}, g.path, sep, g.h.String(), sep, path)) 82 106 return fmt.Sprintf("%x", hash) 83 107 } 84 108 85 - func (g *GitRepo) calculateCommitTimeIn(ctx context.Context, subtree *object.Tree, parent string, timeout time.Duration) (map[string]commit, error) { 109 + func (g *GitRepo) lastCommitDirIn(ctx context.Context, parent lastCommitDir, timeout time.Duration) (map[string]commit, error) { 86 110 ctx, cancel := context.WithTimeout(ctx, timeout) 87 111 defer cancel() 88 - return g.calculateCommitTime(ctx, subtree, parent) 112 + return g.lastCommitDir(ctx, parent) 89 113 } 90 114 91 - func (g *GitRepo) calculateCommitTime(ctx context.Context, subtree *object.Tree, parent string) (map[string]commit, error) { 92 - filesToDo := make(map[string]struct{}) 115 + func (g *GitRepo) lastCommitDir(ctx context.Context, parent lastCommitDir) (map[string]commit, error) { 116 + filesToDo := sets.Collect(parent.children()) 93 117 filesDone := make(map[string]commit) 94 - for _, e := range subtree.Entries { 95 - fpath := path.Clean(path.Join(parent, e.Name)) 96 - filesToDo[fpath] = struct{}{} 97 - } 98 118 99 - for _, e := range subtree.Entries { 100 - f := path.Clean(path.Join(parent, e.Name)) 101 - cacheKey := cacheKey(g, f) 119 + for p := range filesToDo.All() { 120 + cacheKey := cacheKey(g, p) 102 121 if cached, ok := commitCache.Get(cacheKey); ok { 103 - filesDone[f] = cached.(commit) 104 - delete(filesToDo, f) 122 + filesDone[p] = cached.(commit) 123 + filesToDo.Remove(p) 105 124 } else { 106 - filesToDo[f] = struct{}{} 125 + filesToDo.Insert(p) 107 126 } 108 127 } 109 128 110 - if len(filesToDo) == 0 { 129 + if filesToDo.IsEmpty() { 111 130 return filesDone, nil 112 131 } 113 132 ··· 115 134 defer cancel() 116 135 117 136 pathSpec := "." 118 - if parent != "" { 119 - pathSpec = parent 137 + if parent.dir != "" { 138 + pathSpec = parent.dir 139 + } 140 + if filesToDo.Len() == 1 { 141 + // this is an optimization for the scenario where we want to calculate 142 + // the last commit for just one path, we can directly set the pathspec to that path 143 + for s := range filesToDo.All() { 144 + pathSpec = s 145 + } 120 146 } 121 - output, err := g.streamingGitLog(ctx, "--pretty=format:%H,%ad,%s", "--date=iso", "--name-only", "--", pathSpec) 147 + 148 + output, err := g.streamingGitLog(ctx, "--pretty=format:%H,%ad,%s", "--date=unix", "--name-only", "--", pathSpec) 122 149 if err != nil { 123 150 return nil, err 124 151 } 125 152 defer output.Close() // Ensure the git process is properly cleaned up 126 153 127 154 reader := bufio.NewReader(output) 128 - var current commit 155 + current := newCommit() 129 156 for { 130 157 line, err := reader.ReadString('\n') 131 158 if err != nil && err != io.EOF { ··· 136 163 if line == "" { 137 164 if !current.hash.IsZero() { 138 165 // we have a fully parsed commit 139 - for _, f := range current.files { 140 - if _, ok := filesToDo[f]; ok { 166 + for f := range current.files.All() { 167 + if filesToDo.Contains(f) { 141 168 filesDone[f] = current 142 - delete(filesToDo, f) 169 + filesToDo.Remove(f) 143 170 commitCache.Set(cacheKey(g, f), current, 0) 144 171 } 145 172 } 146 173 147 - if len(filesToDo) == 0 { 148 - cancel() 174 + if filesToDo.IsEmpty() { 149 175 break 150 176 } 151 - current = commit{} 177 + current = newCommit() 152 178 } 153 179 } else if current.hash.IsZero() { 154 180 parts := strings.SplitN(line, ",", 3) 155 181 if len(parts) == 3 { 156 182 current.hash = plumbing.NewHash(parts[0]) 157 - current.when, _ = time.Parse("2006-01-02 15:04:05 -0700", parts[1]) 183 + epochTime, _ := strconv.ParseInt(parts[1], 10, 64) 184 + current.when = time.Unix(epochTime, 0) 158 185 current.message = parts[2] 159 186 } 160 187 } else { 161 188 // all ancestors along this path should also be included 162 189 file := path.Clean(line) 163 - ancestors := ancestors(file) 164 - current.files = append(current.files, file) 165 - current.files = append(current.files, ancestors...) 190 + current.files.Insert(file) 191 + for _, a := range ancestors(file) { 192 + current.files.Insert(a) 193 + } 166 194 } 167 195 168 196 if err == io.EOF { ··· 171 199 } 172 200 173 201 return filesDone, nil 202 + } 203 + 204 + // LastCommitFile returns the last commit information for a specific file path 205 + func (g *GitRepo) LastCommitFile(ctx context.Context, filePath string) (*types.LastCommitInfo, error) { 206 + parent, child := path.Split(filePath) 207 + parent = path.Clean(parent) 208 + if parent == "." { 209 + parent = "" 210 + } 211 + 212 + lastCommitDir := lastCommitDir{ 213 + dir: parent, 214 + entries: []string{child}, 215 + } 216 + 217 + times, err := g.lastCommitDirIn(ctx, lastCommitDir, 2*time.Second) 218 + if err != nil { 219 + return nil, fmt.Errorf("calculate commit time: %w", err) 220 + } 221 + 222 + // extract the only element of the map, the commit info of the current path 223 + var commitInfo *commit 224 + for _, c := range times { 225 + commitInfo = &c 226 + } 227 + 228 + if commitInfo == nil { 229 + return nil, fmt.Errorf("no commit found for path: %s", filePath) 230 + } 231 + 232 + return &types.LastCommitInfo{ 233 + Hash: commitInfo.hash, 234 + Message: commitInfo.message, 235 + When: commitInfo.when, 236 + }, nil 174 237 } 175 238 176 239 func ancestors(p string) []string {
+30 -30
knotserver/git/merge.go
··· 107 107 return fmt.Sprintf("merge failed: %s", e.Message) 108 108 } 109 109 110 - func (g *GitRepo) createTempFileWithPatch(patchData string) (string, error) { 110 + func createTemp(data string) (string, error) { 111 111 tmpFile, err := os.CreateTemp("", "git-patch-*.patch") 112 112 if err != nil { 113 113 return "", fmt.Errorf("failed to create temporary patch file: %w", err) 114 114 } 115 115 116 - if _, err := tmpFile.Write([]byte(patchData)); err != nil { 116 + if _, err := tmpFile.Write([]byte(data)); err != nil { 117 117 tmpFile.Close() 118 118 os.Remove(tmpFile.Name()) 119 119 return "", fmt.Errorf("failed to write patch data to temporary file: %w", err) ··· 127 127 return tmpFile.Name(), nil 128 128 } 129 129 130 - func (g *GitRepo) cloneRepository(targetBranch string) (string, error) { 130 + func (g *GitRepo) cloneTemp(targetBranch string) (string, error) { 131 131 tmpDir, err := os.MkdirTemp("", "git-clone-") 132 132 if err != nil { 133 133 return "", fmt.Errorf("failed to create temporary directory: %w", err) ··· 147 147 return tmpDir, nil 148 148 } 149 149 150 - func (g *GitRepo) checkPatch(tmpDir, patchFile string) error { 151 - var stderr bytes.Buffer 152 - 153 - cmd := exec.Command("git", "-C", tmpDir, "apply", "--check", "-v", patchFile) 154 - cmd.Stderr = &stderr 155 - 156 - if err := cmd.Run(); err != nil { 157 - conflicts := parseGitApplyErrors(stderr.String()) 158 - return &ErrMerge{ 159 - Message: "patch cannot be applied cleanly", 160 - Conflicts: conflicts, 161 - HasConflict: len(conflicts) > 0, 162 - OtherError: err, 163 - } 164 - } 165 - return nil 166 - } 167 - 168 150 func (g *GitRepo) applyPatch(patchData, patchFile string, opts MergeOptions) error { 169 151 var stderr bytes.Buffer 170 152 var cmd *exec.Cmd ··· 173 155 exec.Command("git", "-C", g.path, "config", "user.name", opts.CommitterName).Run() 174 156 exec.Command("git", "-C", g.path, "config", "user.email", opts.CommitterEmail).Run() 175 157 exec.Command("git", "-C", g.path, "config", "advice.mergeConflict", "false").Run() 158 + exec.Command("git", "-C", g.path, "config", "advice.amWorkDir", "false").Run() 176 159 177 160 // if patch is a format-patch, apply using 'git am' 178 161 if opts.FormatPatch { ··· 213 196 cmd.Stderr = &stderr 214 197 215 198 if err := cmd.Run(); err != nil { 216 - return fmt.Errorf("patch application failed: %s", stderr.String()) 199 + conflicts := parseGitApplyErrors(stderr.String()) 200 + return &ErrMerge{ 201 + Message: "patch cannot be applied cleanly", 202 + Conflicts: conflicts, 203 + HasConflict: len(conflicts) > 0, 204 + OtherError: err, 205 + } 217 206 } 218 207 219 208 return nil ··· 241 230 } 242 231 243 232 func (g *GitRepo) applySingleMailbox(singlePatch types.FormatPatch) (plumbing.Hash, error) { 244 - tmpPatch, err := g.createTempFileWithPatch(singlePatch.Raw) 233 + tmpPatch, err := createTemp(singlePatch.Raw) 245 234 if err != nil { 246 235 return plumbing.ZeroHash, fmt.Errorf("failed to create temporary patch file for singluar mailbox patch: %w", err) 247 236 } ··· 257 246 log.Println("head before apply", head.Hash().String()) 258 247 259 248 if err := cmd.Run(); err != nil { 260 - return plumbing.ZeroHash, fmt.Errorf("patch application failed: %s", stderr.String()) 249 + conflicts := parseGitApplyErrors(stderr.String()) 250 + return plumbing.ZeroHash, &ErrMerge{ 251 + Message: "patch cannot be applied cleanly", 252 + Conflicts: conflicts, 253 + HasConflict: len(conflicts) > 0, 254 + OtherError: err, 255 + } 261 256 } 262 257 263 258 if err := g.Refresh(); err != nil { ··· 324 319 return newHash, nil 325 320 } 326 321 327 - func (g *GitRepo) MergeCheck(patchData string, targetBranch string) error { 322 + func (g *GitRepo) MergeCheckWithOptions(patchData string, targetBranch string, mo MergeOptions) error { 328 323 if val, ok := mergeCheckCache.Get(g, patchData, targetBranch); ok { 329 324 return val 330 325 } 331 326 332 - patchFile, err := g.createTempFileWithPatch(patchData) 327 + patchFile, err := createTemp(patchData) 333 328 if err != nil { 334 329 return &ErrMerge{ 335 330 Message: err.Error(), ··· 338 333 } 339 334 defer os.Remove(patchFile) 340 335 341 - tmpDir, err := g.cloneRepository(targetBranch) 336 + tmpDir, err := g.cloneTemp(targetBranch) 342 337 if err != nil { 343 338 return &ErrMerge{ 344 339 Message: err.Error(), ··· 347 342 } 348 343 defer os.RemoveAll(tmpDir) 349 344 350 - result := g.checkPatch(tmpDir, patchFile) 345 + tmpRepo, err := PlainOpen(tmpDir) 346 + if err != nil { 347 + return err 348 + } 349 + 350 + result := tmpRepo.applyPatch(patchData, patchFile, mo) 351 351 mergeCheckCache.Set(g, patchData, targetBranch, result) 352 352 return result 353 353 } 354 354 355 355 func (g *GitRepo) MergeWithOptions(patchData string, targetBranch string, opts MergeOptions) error { 356 - patchFile, err := g.createTempFileWithPatch(patchData) 356 + patchFile, err := createTemp(patchData) 357 357 if err != nil { 358 358 return &ErrMerge{ 359 359 Message: err.Error(), ··· 362 362 } 363 363 defer os.Remove(patchFile) 364 364 365 - tmpDir, err := g.cloneRepository(targetBranch) 365 + tmpDir, err := g.cloneTemp(targetBranch) 366 366 if err != nil { 367 367 return &ErrMerge{ 368 368 Message: err.Error(),
+706
knotserver/git/merge_test.go
··· 1 + package git 2 + 3 + import ( 4 + "os" 5 + "path/filepath" 6 + "strings" 7 + "testing" 8 + 9 + "github.com/go-git/go-git/v5" 10 + "github.com/go-git/go-git/v5/config" 11 + "github.com/go-git/go-git/v5/plumbing" 12 + "github.com/go-git/go-git/v5/plumbing/object" 13 + "github.com/stretchr/testify/assert" 14 + "github.com/stretchr/testify/require" 15 + ) 16 + 17 + type Helper struct { 18 + t *testing.T 19 + tempDir string 20 + repo *GitRepo 21 + } 22 + 23 + func helper(t *testing.T) *Helper { 24 + tempDir, err := os.MkdirTemp("", "git-merge-test-*") 25 + require.NoError(t, err) 26 + 27 + return &Helper{ 28 + t: t, 29 + tempDir: tempDir, 30 + } 31 + } 32 + 33 + func (h *Helper) cleanup() { 34 + if h.tempDir != "" { 35 + os.RemoveAll(h.tempDir) 36 + } 37 + } 38 + 39 + // initRepo initializes a git repository with an initial commit 40 + func (h *Helper) initRepo() *GitRepo { 41 + repoPath := filepath.Join(h.tempDir, "test-repo") 42 + 43 + // initialize repository 44 + r, err := git.PlainInit(repoPath, false) 45 + require.NoError(h.t, err) 46 + 47 + // configure git user 48 + cfg, err := r.Config() 49 + require.NoError(h.t, err) 50 + cfg.User.Name = "Test User" 51 + cfg.User.Email = "test@example.com" 52 + err = r.SetConfig(cfg) 53 + require.NoError(h.t, err) 54 + 55 + // create initial commit with a file 56 + w, err := r.Worktree() 57 + require.NoError(h.t, err) 58 + 59 + // create initial file 60 + initialFile := filepath.Join(repoPath, "README.md") 61 + err = os.WriteFile(initialFile, []byte("# Test Repository\n\nInitial content.\n"), 0644) 62 + require.NoError(h.t, err) 63 + 64 + _, err = w.Add("README.md") 65 + require.NoError(h.t, err) 66 + 67 + _, err = w.Commit("Initial commit", &git.CommitOptions{ 68 + Author: &object.Signature{ 69 + Name: "Test User", 70 + Email: "test@example.com", 71 + }, 72 + }) 73 + require.NoError(h.t, err) 74 + 75 + gitRepo, err := PlainOpen(repoPath) 76 + require.NoError(h.t, err) 77 + 78 + h.repo = gitRepo 79 + return gitRepo 80 + } 81 + 82 + // addFile creates a file in the repository 83 + func (h *Helper) addFile(filename, content string) { 84 + filePath := filepath.Join(h.repo.path, filename) 85 + dir := filepath.Dir(filePath) 86 + 87 + err := os.MkdirAll(dir, 0755) 88 + require.NoError(h.t, err) 89 + 90 + err = os.WriteFile(filePath, []byte(content), 0644) 91 + require.NoError(h.t, err) 92 + } 93 + 94 + // commitFile adds and commits a file 95 + func (h *Helper) commitFile(filename, content, message string) plumbing.Hash { 96 + h.addFile(filename, content) 97 + 98 + w, err := h.repo.r.Worktree() 99 + require.NoError(h.t, err) 100 + 101 + _, err = w.Add(filename) 102 + require.NoError(h.t, err) 103 + 104 + hash, err := w.Commit(message, &git.CommitOptions{ 105 + Author: &object.Signature{ 106 + Name: "Test User", 107 + Email: "test@example.com", 108 + }, 109 + }) 110 + require.NoError(h.t, err) 111 + 112 + return hash 113 + } 114 + 115 + // readFile reads a file from the repository 116 + func (h *Helper) readFile(filename string) string { 117 + content, err := os.ReadFile(filepath.Join(h.repo.path, filename)) 118 + require.NoError(h.t, err) 119 + return string(content) 120 + } 121 + 122 + // fileExists checks if a file exists in the repository 123 + func (h *Helper) fileExists(filename string) bool { 124 + _, err := os.Stat(filepath.Join(h.repo.path, filename)) 125 + return err == nil 126 + } 127 + 128 + func TestApplyPatch_Success(t *testing.T) { 129 + h := helper(t) 130 + defer h.cleanup() 131 + 132 + repo := h.initRepo() 133 + 134 + // modify README.md 135 + patch := `diff --git a/README.md b/README.md 136 + index 1234567..abcdefg 100644 137 + --- a/README.md 138 + +++ b/README.md 139 + @@ -1,3 +1,3 @@ 140 + # Test Repository 141 + 142 + -Initial content. 143 + +Modified content. 144 + ` 145 + 146 + patchFile, err := createTemp(patch) 147 + require.NoError(t, err) 148 + defer os.Remove(patchFile) 149 + 150 + opts := MergeOptions{ 151 + CommitMessage: "Apply test patch", 152 + CommitterName: "Test Committer", 153 + CommitterEmail: "committer@example.com", 154 + FormatPatch: false, 155 + } 156 + 157 + err = repo.applyPatch(patch, patchFile, opts) 158 + assert.NoError(t, err) 159 + 160 + // verify the file was modified 161 + content := h.readFile("README.md") 162 + assert.Contains(t, content, "Modified content.") 163 + } 164 + 165 + func TestApplyPatch_AddNewFile(t *testing.T) { 166 + h := helper(t) 167 + defer h.cleanup() 168 + 169 + repo := h.initRepo() 170 + 171 + // add a new file 172 + patch := `diff --git a/newfile.txt b/newfile.txt 173 + new file mode 100644 174 + index 0000000..ce01362 175 + --- /dev/null 176 + +++ b/newfile.txt 177 + @@ -0,0 +1 @@ 178 + +hello 179 + ` 180 + 181 + patchFile, err := createTemp(patch) 182 + require.NoError(t, err) 183 + defer os.Remove(patchFile) 184 + 185 + opts := MergeOptions{ 186 + CommitMessage: "Add new file", 187 + CommitterName: "Test Committer", 188 + CommitterEmail: "committer@example.com", 189 + FormatPatch: false, 190 + } 191 + 192 + err = repo.applyPatch(patch, patchFile, opts) 193 + assert.NoError(t, err) 194 + 195 + assert.True(t, h.fileExists("newfile.txt")) 196 + content := h.readFile("newfile.txt") 197 + assert.Equal(t, "hello\n", content) 198 + } 199 + 200 + func TestApplyPatch_DeleteFile(t *testing.T) { 201 + h := helper(t) 202 + defer h.cleanup() 203 + 204 + repo := h.initRepo() 205 + 206 + // add a file 207 + h.commitFile("deleteme.txt", "content to delete\n", "Add file to delete") 208 + 209 + // delete the file 210 + patch := `diff --git a/deleteme.txt b/deleteme.txt 211 + deleted file mode 100644 212 + index 1234567..0000000 213 + --- a/deleteme.txt 214 + +++ /dev/null 215 + @@ -1 +0,0 @@ 216 + -content to delete 217 + ` 218 + 219 + patchFile, err := createTemp(patch) 220 + require.NoError(t, err) 221 + defer os.Remove(patchFile) 222 + 223 + opts := MergeOptions{ 224 + CommitMessage: "Delete file", 225 + CommitterName: "Test Committer", 226 + CommitterEmail: "committer@example.com", 227 + FormatPatch: false, 228 + } 229 + 230 + err = repo.applyPatch(patch, patchFile, opts) 231 + assert.NoError(t, err) 232 + 233 + assert.False(t, h.fileExists("deleteme.txt")) 234 + } 235 + 236 + func TestApplyPatch_WithAuthor(t *testing.T) { 237 + h := helper(t) 238 + defer h.cleanup() 239 + 240 + repo := h.initRepo() 241 + 242 + patch := `diff --git a/README.md b/README.md 243 + index 1234567..abcdefg 100644 244 + --- a/README.md 245 + +++ b/README.md 246 + @@ -1,3 +1,4 @@ 247 + # Test Repository 248 + 249 + Initial content. 250 + +New line. 251 + ` 252 + 253 + patchFile, err := createTemp(patch) 254 + require.NoError(t, err) 255 + defer os.Remove(patchFile) 256 + 257 + opts := MergeOptions{ 258 + CommitMessage: "Patch with author", 259 + AuthorName: "Patch Author", 260 + AuthorEmail: "author@example.com", 261 + CommitterName: "Test Committer", 262 + CommitterEmail: "committer@example.com", 263 + FormatPatch: false, 264 + } 265 + 266 + err = repo.applyPatch(patch, patchFile, opts) 267 + assert.NoError(t, err) 268 + 269 + head, err := repo.r.Head() 270 + require.NoError(t, err) 271 + 272 + commit, err := repo.r.CommitObject(head.Hash()) 273 + require.NoError(t, err) 274 + 275 + assert.Equal(t, "Patch Author", commit.Author.Name) 276 + assert.Equal(t, "author@example.com", commit.Author.Email) 277 + } 278 + 279 + func TestApplyPatch_MissingFile(t *testing.T) { 280 + h := helper(t) 281 + defer h.cleanup() 282 + 283 + repo := h.initRepo() 284 + 285 + // patch that modifies a non-existent file 286 + patch := `diff --git a/nonexistent.txt b/nonexistent.txt 287 + index 1234567..abcdefg 100644 288 + --- a/nonexistent.txt 289 + +++ b/nonexistent.txt 290 + @@ -1 +1 @@ 291 + -old content 292 + +new content 293 + ` 294 + 295 + patchFile, err := createTemp(patch) 296 + require.NoError(t, err) 297 + defer os.Remove(patchFile) 298 + 299 + opts := MergeOptions{ 300 + CommitMessage: "Should fail", 301 + CommitterName: "Test Committer", 302 + CommitterEmail: "committer@example.com", 303 + FormatPatch: false, 304 + } 305 + 306 + err = repo.applyPatch(patch, patchFile, opts) 307 + assert.Error(t, err) 308 + assert.Contains(t, err.Error(), "patch application failed") 309 + } 310 + 311 + func TestApplyPatch_Conflict(t *testing.T) { 312 + h := helper(t) 313 + defer h.cleanup() 314 + 315 + repo := h.initRepo() 316 + 317 + // modify the file to create a conflict 318 + h.commitFile("README.md", "# Test Repository\n\nDifferent content.\n", "Modify README") 319 + 320 + // patch that expects different content 321 + patch := `diff --git a/README.md b/README.md 322 + index 1234567..abcdefg 100644 323 + --- a/README.md 324 + +++ b/README.md 325 + @@ -1,3 +1,3 @@ 326 + # Test Repository 327 + 328 + -Initial content. 329 + +Modified content. 330 + ` 331 + 332 + patchFile, err := createTemp(patch) 333 + require.NoError(t, err) 334 + defer os.Remove(patchFile) 335 + 336 + opts := MergeOptions{ 337 + CommitMessage: "Should conflict", 338 + CommitterName: "Test Committer", 339 + CommitterEmail: "committer@example.com", 340 + FormatPatch: false, 341 + } 342 + 343 + err = repo.applyPatch(patch, patchFile, opts) 344 + assert.Error(t, err) 345 + } 346 + 347 + func TestApplyPatch_MissingDirectory(t *testing.T) { 348 + h := helper(t) 349 + defer h.cleanup() 350 + 351 + repo := h.initRepo() 352 + 353 + // patch that adds a file in a non-existent directory 354 + patch := `diff --git a/subdir/newfile.txt b/subdir/newfile.txt 355 + new file mode 100644 356 + index 0000000..ce01362 357 + --- /dev/null 358 + +++ b/subdir/newfile.txt 359 + @@ -0,0 +1 @@ 360 + +content 361 + ` 362 + 363 + patchFile, err := createTemp(patch) 364 + require.NoError(t, err) 365 + defer os.Remove(patchFile) 366 + 367 + opts := MergeOptions{ 368 + CommitMessage: "Add file in subdir", 369 + CommitterName: "Test Committer", 370 + CommitterEmail: "committer@example.com", 371 + FormatPatch: false, 372 + } 373 + 374 + // git apply should create the directory automatically 375 + err = repo.applyPatch(patch, patchFile, opts) 376 + assert.NoError(t, err) 377 + 378 + // Verify the file and directory were created 379 + assert.True(t, h.fileExists("subdir/newfile.txt")) 380 + } 381 + 382 + func TestApplyMailbox_Single(t *testing.T) { 383 + h := helper(t) 384 + defer h.cleanup() 385 + 386 + repo := h.initRepo() 387 + 388 + // format-patch mailbox format 389 + patch := `From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 390 + From: Patch Author <author@example.com> 391 + Date: Mon, 1 Jan 2024 12:00:00 +0000 392 + Subject: [PATCH] Add new feature 393 + 394 + This is a test patch. 395 + --- 396 + newfile.txt | 1 + 397 + 1 file changed, 1 insertion(+) 398 + create mode 100644 newfile.txt 399 + 400 + diff --git a/newfile.txt b/newfile.txt 401 + new file mode 100644 402 + index 0000000..ce01362 403 + --- /dev/null 404 + +++ b/newfile.txt 405 + @@ -0,0 +1 @@ 406 + +hello 407 + -- 408 + 2.40.0 409 + ` 410 + 411 + err := repo.applyMailbox(patch) 412 + assert.NoError(t, err) 413 + 414 + assert.True(t, h.fileExists("newfile.txt")) 415 + content := h.readFile("newfile.txt") 416 + assert.Equal(t, "hello\n", content) 417 + } 418 + 419 + func TestApplyMailbox_Multiple(t *testing.T) { 420 + h := helper(t) 421 + defer h.cleanup() 422 + 423 + repo := h.initRepo() 424 + 425 + // multiple patches in mailbox format 426 + patch := `From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 427 + From: Patch Author <author@example.com> 428 + Date: Mon, 1 Jan 2024 12:00:00 +0000 429 + Subject: [PATCH 1/2] Add first file 430 + 431 + --- 432 + file1.txt | 1 + 433 + 1 file changed, 1 insertion(+) 434 + create mode 100644 file1.txt 435 + 436 + diff --git a/file1.txt b/file1.txt 437 + new file mode 100644 438 + index 0000000..ce01362 439 + --- /dev/null 440 + +++ b/file1.txt 441 + @@ -0,0 +1 @@ 442 + +first 443 + -- 444 + 2.40.0 445 + 446 + From 1111111111111111111111111111111111111111 Mon Sep 17 00:00:00 2001 447 + From: Patch Author <author@example.com> 448 + Date: Mon, 1 Jan 2024 12:01:00 +0000 449 + Subject: [PATCH 2/2] Add second file 450 + 451 + --- 452 + file2.txt | 1 + 453 + 1 file changed, 1 insertion(+) 454 + create mode 100644 file2.txt 455 + 456 + diff --git a/file2.txt b/file2.txt 457 + new file mode 100644 458 + index 0000000..ce01362 459 + --- /dev/null 460 + +++ b/file2.txt 461 + @@ -0,0 +1 @@ 462 + +second 463 + -- 464 + 2.40.0 465 + ` 466 + 467 + err := repo.applyMailbox(patch) 468 + assert.NoError(t, err) 469 + 470 + assert.True(t, h.fileExists("file1.txt")) 471 + assert.True(t, h.fileExists("file2.txt")) 472 + 473 + content1 := h.readFile("file1.txt") 474 + assert.Equal(t, "first\n", content1) 475 + 476 + content2 := h.readFile("file2.txt") 477 + assert.Equal(t, "second\n", content2) 478 + } 479 + 480 + func TestApplyMailbox_Conflict(t *testing.T) { 481 + h := helper(t) 482 + defer h.cleanup() 483 + 484 + repo := h.initRepo() 485 + 486 + h.commitFile("README.md", "# Test Repository\n\nConflicting content.\n", "Create conflict") 487 + 488 + patch := `From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 489 + From: Patch Author <author@example.com> 490 + Date: Mon, 1 Jan 2024 12:00:00 +0000 491 + Subject: [PATCH] Modify README 492 + 493 + --- 494 + README.md | 2 +- 495 + 1 file changed, 1 insertion(+), 1 deletion(-) 496 + 497 + diff --git a/README.md b/README.md 498 + index 1234567..abcdefg 100644 499 + --- a/README.md 500 + +++ b/README.md 501 + @@ -1,3 +1,3 @@ 502 + # Test Repository 503 + 504 + -Initial content. 505 + +Different content. 506 + -- 507 + 2.40.0 508 + ` 509 + 510 + err := repo.applyMailbox(patch) 511 + assert.Error(t, err) 512 + 513 + var mergeErr *ErrMerge 514 + assert.ErrorAs(t, err, &mergeErr) 515 + } 516 + 517 + func TestParseGitApplyErrors(t *testing.T) { 518 + tests := []struct { 519 + name string 520 + errorOutput string 521 + expectedCount int 522 + expectedReason string 523 + }{ 524 + { 525 + name: "file already exists", 526 + errorOutput: `error: path/to/file.txt: already exists in working directory`, 527 + expectedCount: 1, 528 + expectedReason: "file already exists", 529 + }, 530 + { 531 + name: "file does not exist", 532 + errorOutput: `error: path/to/file.txt: does not exist in working tree`, 533 + expectedCount: 1, 534 + expectedReason: "file does not exist", 535 + }, 536 + { 537 + name: "patch does not apply", 538 + errorOutput: `error: patch failed: file.txt:10 539 + error: file.txt: patch does not apply`, 540 + expectedCount: 1, 541 + expectedReason: "patch does not apply", 542 + }, 543 + { 544 + name: "multiple conflicts", 545 + errorOutput: `error: patch failed: file1.txt:5 546 + error: file1.txt:5: some error 547 + error: patch failed: file2.txt:10 548 + error: file2.txt:10: another error`, 549 + expectedCount: 2, 550 + }, 551 + } 552 + 553 + for _, tt := range tests { 554 + t.Run(tt.name, func(t *testing.T) { 555 + conflicts := parseGitApplyErrors(tt.errorOutput) 556 + assert.Len(t, conflicts, tt.expectedCount) 557 + 558 + if tt.expectedReason != "" && len(conflicts) > 0 { 559 + assert.Equal(t, tt.expectedReason, conflicts[0].Reason) 560 + } 561 + }) 562 + } 563 + } 564 + 565 + func TestErrMerge_Error(t *testing.T) { 566 + tests := []struct { 567 + name string 568 + err ErrMerge 569 + expectedMsg string 570 + }{ 571 + { 572 + name: "with conflicts", 573 + err: ErrMerge{ 574 + Message: "test merge failed", 575 + HasConflict: true, 576 + Conflicts: []ConflictInfo{ 577 + {Filename: "file1.txt", Reason: "conflict 1"}, 578 + {Filename: "file2.txt", Reason: "conflict 2"}, 579 + }, 580 + }, 581 + expectedMsg: "merge failed due to conflicts: test merge failed (2 conflicts)", 582 + }, 583 + { 584 + name: "with other error", 585 + err: ErrMerge{ 586 + Message: "command failed", 587 + OtherError: assert.AnError, 588 + }, 589 + expectedMsg: "merge failed: command failed:", 590 + }, 591 + { 592 + name: "message only", 593 + err: ErrMerge{ 594 + Message: "simple failure", 595 + }, 596 + expectedMsg: "merge failed: simple failure", 597 + }, 598 + } 599 + 600 + for _, tt := range tests { 601 + t.Run(tt.name, func(t *testing.T) { 602 + errMsg := tt.err.Error() 603 + assert.Contains(t, errMsg, tt.expectedMsg) 604 + }) 605 + } 606 + } 607 + 608 + func TestMergeWithOptions_Integration(t *testing.T) { 609 + h := helper(t) 610 + defer h.cleanup() 611 + 612 + // create a repository first with initial content 613 + workRepoPath := filepath.Join(h.tempDir, "work-repo") 614 + workRepo, err := git.PlainInit(workRepoPath, false) 615 + require.NoError(t, err) 616 + 617 + // configure git user 618 + cfg, err := workRepo.Config() 619 + require.NoError(t, err) 620 + cfg.User.Name = "Test User" 621 + cfg.User.Email = "test@example.com" 622 + err = workRepo.SetConfig(cfg) 623 + require.NoError(t, err) 624 + 625 + // Create initial commit 626 + w, err := workRepo.Worktree() 627 + require.NoError(t, err) 628 + 629 + err = os.WriteFile(filepath.Join(workRepoPath, "README.md"), []byte("# Initial\n"), 0644) 630 + require.NoError(t, err) 631 + 632 + _, err = w.Add("README.md") 633 + require.NoError(t, err) 634 + 635 + _, err = w.Commit("Initial commit", &git.CommitOptions{ 636 + Author: &object.Signature{ 637 + Name: "Test User", 638 + Email: "test@example.com", 639 + }, 640 + }) 641 + require.NoError(t, err) 642 + 643 + // create a bare repository (like production) 644 + bareRepoPath := filepath.Join(h.tempDir, "bare-repo") 645 + err = InitBare(bareRepoPath, "main") 646 + require.NoError(t, err) 647 + 648 + // add bare repo as remote and push to it 649 + _, err = workRepo.CreateRemote(&config.RemoteConfig{ 650 + Name: "origin", 651 + URLs: []string{"file://" + bareRepoPath}, 652 + }) 653 + require.NoError(t, err) 654 + 655 + err = workRepo.Push(&git.PushOptions{ 656 + RemoteName: "origin", 657 + RefSpecs: []config.RefSpec{"refs/heads/master:refs/heads/main"}, 658 + }) 659 + require.NoError(t, err) 660 + 661 + // now merge a patch into the bare repo 662 + gitRepo, err := PlainOpen(bareRepoPath) 663 + require.NoError(t, err) 664 + 665 + patch := `diff --git a/feature.txt b/feature.txt 666 + new file mode 100644 667 + index 0000000..5e1c309 668 + --- /dev/null 669 + +++ b/feature.txt 670 + @@ -0,0 +1 @@ 671 + +Hello World 672 + ` 673 + 674 + opts := MergeOptions{ 675 + CommitMessage: "Add feature", 676 + CommitterName: "Test Committer", 677 + CommitterEmail: "committer@example.com", 678 + FormatPatch: false, 679 + } 680 + 681 + err = gitRepo.MergeWithOptions(patch, "main", opts) 682 + assert.NoError(t, err) 683 + 684 + // Clone again and verify the changes were merged 685 + verifyRepoPath := filepath.Join(h.tempDir, "verify-repo") 686 + verifyRepo, err := git.PlainClone(verifyRepoPath, false, &git.CloneOptions{ 687 + URL: "file://" + bareRepoPath, 688 + }) 689 + require.NoError(t, err) 690 + 691 + // check that feature.txt exists 692 + featureFile := filepath.Join(verifyRepoPath, "feature.txt") 693 + assert.FileExists(t, featureFile) 694 + 695 + content, err := os.ReadFile(featureFile) 696 + require.NoError(t, err) 697 + assert.Equal(t, "Hello World\n", string(content)) 698 + 699 + // verify commit message 700 + head, err := verifyRepo.Head() 701 + require.NoError(t, err) 702 + 703 + commit, err := verifyRepo.CommitObject(head.Hash()) 704 + require.NoError(t, err) 705 + assert.Equal(t, "Add feature", strings.TrimSpace(commit.Message)) 706 + }
+1 -1
knotserver/git/post_receive.go
··· 95 95 // git rev-list <newsha> ^other-branches --not ^this-branch 96 96 args = append(args, line.NewSha.String()) 97 97 98 - branches, _ := g.Branches() 98 + branches, _ := g.Branches(nil) 99 99 for _, b := range branches { 100 100 if !strings.Contains(line.Ref, b.Name) { 101 101 args = append(args, fmt.Sprintf("^%s", b.Name))
+38 -3
knotserver/git/tag.go
··· 10 10 "github.com/go-git/go-git/v5/plumbing/object" 11 11 ) 12 12 13 - func (g *GitRepo) Tags() ([]object.Tag, error) { 13 + type TagsOptions struct { 14 + Limit int 15 + Offset int 16 + Pattern string 17 + } 18 + 19 + func (g *GitRepo) Tags(opts *TagsOptions) ([]object.Tag, error) { 20 + if opts == nil { 21 + opts = &TagsOptions{} 22 + } 23 + 24 + if opts.Pattern == "" { 25 + opts.Pattern = "refs/tags" 26 + } 27 + 14 28 fields := []string{ 15 29 "refname:short", 16 30 "objectname", ··· 29 43 if i != 0 { 30 44 outFormat.WriteString(fieldSeparator) 31 45 } 32 - outFormat.WriteString(fmt.Sprintf("%%(%s)", f)) 46 + fmt.Fprintf(&outFormat, "%%(%s)", f) 33 47 } 34 48 outFormat.WriteString("") 35 49 outFormat.WriteString(recordSeparator) 36 50 37 - output, err := g.forEachRef(outFormat.String(), "--sort=-creatordate", "refs/tags") 51 + args := []string{outFormat.String(), "--sort=-creatordate"} 52 + 53 + // only add the count if the limit is a non-zero value, 54 + // if it is zero, get as many tags as we can 55 + if opts.Limit > 0 { 56 + args = append(args, fmt.Sprintf("--count=%d", opts.Offset+opts.Limit)) 57 + } 58 + 59 + args = append(args, opts.Pattern) 60 + 61 + output, err := g.forEachRef(args...) 38 62 if err != nil { 39 63 return nil, fmt.Errorf("failed to get tags: %w", err) 40 64 } ··· 44 68 return nil, nil 45 69 } 46 70 71 + startIdx := opts.Offset 72 + if startIdx >= len(records) { 73 + return nil, nil 74 + } 75 + 76 + endIdx := len(records) 77 + if opts.Limit > 0 { 78 + endIdx = min(startIdx+opts.Limit, len(records)) 79 + } 80 + 81 + records = records[startIdx:endIdx] 47 82 tags := make([]object.Tag, 0, len(records)) 48 83 49 84 for _, line := range records {
+365
knotserver/git/tag_test.go
··· 1 + package git 2 + 3 + import ( 4 + "path/filepath" 5 + "testing" 6 + "time" 7 + 8 + gogit "github.com/go-git/go-git/v5" 9 + "github.com/go-git/go-git/v5/plumbing" 10 + "github.com/go-git/go-git/v5/plumbing/object" 11 + "github.com/stretchr/testify/assert" 12 + "github.com/stretchr/testify/require" 13 + "github.com/stretchr/testify/suite" 14 + ) 15 + 16 + type TagSuite struct { 17 + suite.Suite 18 + *RepoSuite 19 + } 20 + 21 + func TestTagSuite(t *testing.T) { 22 + t.Parallel() 23 + suite.Run(t, new(TagSuite)) 24 + } 25 + 26 + func (s *TagSuite) SetupTest() { 27 + s.RepoSuite = NewRepoSuite(s.T()) 28 + } 29 + 30 + func (s *TagSuite) TearDownTest() { 31 + s.RepoSuite.cleanup() 32 + } 33 + 34 + func (s *TagSuite) setupRepoWithTags() { 35 + s.init() 36 + 37 + // create commits for tagging 38 + commit1 := s.commitFile("file1.txt", "content 1", "Add file1") 39 + commit2 := s.commitFile("file2.txt", "content 2", "Add file2") 40 + commit3 := s.commitFile("file3.txt", "content 3", "Add file3") 41 + commit4 := s.commitFile("file4.txt", "content 4", "Add file4") 42 + commit5 := s.commitFile("file5.txt", "content 5", "Add file5") 43 + 44 + // create annotated tags 45 + s.createAnnotatedTag( 46 + "v1.0.0", 47 + commit1, 48 + "Tagger One", 49 + "tagger1@example.com", 50 + "Release version 1.0.0\n\nThis is the first stable release.", 51 + s.baseTime.Add(1*time.Hour), 52 + ) 53 + 54 + s.createAnnotatedTag( 55 + "v1.1.0", 56 + commit2, 57 + "Tagger Two", 58 + "tagger2@example.com", 59 + "Release version 1.1.0", 60 + s.baseTime.Add(2*time.Hour), 61 + ) 62 + 63 + // create lightweight tags 64 + s.createLightweightTag("v2.0.0", commit3) 65 + s.createLightweightTag("v2.1.0", commit4) 66 + 67 + // create another annotated tag 68 + s.createAnnotatedTag( 69 + "v3.0.0", 70 + commit5, 71 + "Tagger Three", 72 + "tagger3@example.com", 73 + "Major version 3.0.0\n\nBreaking changes included.", 74 + s.baseTime.Add(3*time.Hour), 75 + ) 76 + } 77 + 78 + func (s *TagSuite) TestTags_All() { 79 + s.setupRepoWithTags() 80 + 81 + tags, err := s.repo.Tags(nil) 82 + require.NoError(s.T(), err) 83 + 84 + // we created 5 tags total (3 annotated, 2 lightweight) 85 + assert.Len(s.T(), tags, 5, "expected 5 tags") 86 + 87 + // verify tags are sorted by creation date (newest first) 88 + expectedAnnotated := map[string]bool{ 89 + "v1.0.0": true, 90 + "v1.1.0": true, 91 + "v3.0.0": true, 92 + } 93 + 94 + expectedLightweight := map[string]bool{ 95 + "v2.0.0": true, 96 + "v2.1.0": true, 97 + } 98 + 99 + for _, tag := range tags { 100 + if expectedAnnotated[tag.Name] { 101 + // annotated tags should have tagger info 102 + assert.NotEmpty(s.T(), tag.Tagger.Name, "annotated tag %s should have tagger name", tag.Name) 103 + assert.NotEmpty(s.T(), tag.Message, "annotated tag %s should have message", tag.Name) 104 + } else if expectedLightweight[tag.Name] { 105 + // lightweight tags won't have tagger info or message (they'll have empty values) 106 + } else { 107 + s.T().Errorf("unexpected tag name: %s", tag.Name) 108 + } 109 + } 110 + } 111 + 112 + func (s *TagSuite) TestTags_WithLimit() { 113 + s.setupRepoWithTags() 114 + 115 + tests := []struct { 116 + name string 117 + limit int 118 + expectedCount int 119 + }{ 120 + { 121 + name: "limit 1", 122 + limit: 1, 123 + expectedCount: 1, 124 + }, 125 + { 126 + name: "limit 2", 127 + limit: 2, 128 + expectedCount: 2, 129 + }, 130 + { 131 + name: "limit 3", 132 + limit: 3, 133 + expectedCount: 3, 134 + }, 135 + { 136 + name: "limit 10 (more than available)", 137 + limit: 10, 138 + expectedCount: 5, 139 + }, 140 + } 141 + 142 + for _, tt := range tests { 143 + s.Run(tt.name, func() { 144 + tags, err := s.repo.Tags(&TagsOptions{ 145 + Limit: tt.limit, 146 + }) 147 + require.NoError(s.T(), err) 148 + assert.Len(s.T(), tags, tt.expectedCount, "expected %d tags", tt.expectedCount) 149 + }) 150 + } 151 + } 152 + 153 + func (s *TagSuite) TestTags_WithOffset() { 154 + s.setupRepoWithTags() 155 + 156 + tests := []struct { 157 + name string 158 + offset int 159 + expectedCount int 160 + }{ 161 + { 162 + name: "offset 0", 163 + offset: 0, 164 + expectedCount: 5, 165 + }, 166 + { 167 + name: "offset 1", 168 + offset: 1, 169 + expectedCount: 4, 170 + }, 171 + { 172 + name: "offset 2", 173 + offset: 2, 174 + expectedCount: 3, 175 + }, 176 + { 177 + name: "offset 4", 178 + offset: 4, 179 + expectedCount: 1, 180 + }, 181 + { 182 + name: "offset 5 (all skipped)", 183 + offset: 5, 184 + expectedCount: 0, 185 + }, 186 + { 187 + name: "offset 10 (more than available)", 188 + offset: 10, 189 + expectedCount: 0, 190 + }, 191 + } 192 + 193 + for _, tt := range tests { 194 + s.Run(tt.name, func() { 195 + tags, err := s.repo.Tags(&TagsOptions{ 196 + Offset: tt.offset, 197 + }) 198 + require.NoError(s.T(), err) 199 + assert.Len(s.T(), tags, tt.expectedCount, "expected %d tags", tt.expectedCount) 200 + }) 201 + } 202 + } 203 + 204 + func (s *TagSuite) TestTags_WithLimitAndOffset() { 205 + s.setupRepoWithTags() 206 + 207 + tests := []struct { 208 + name string 209 + limit int 210 + offset int 211 + expectedCount int 212 + }{ 213 + { 214 + name: "limit 2, offset 0", 215 + limit: 2, 216 + offset: 0, 217 + expectedCount: 2, 218 + }, 219 + { 220 + name: "limit 2, offset 1", 221 + limit: 2, 222 + offset: 1, 223 + expectedCount: 2, 224 + }, 225 + { 226 + name: "limit 2, offset 3", 227 + limit: 2, 228 + offset: 3, 229 + expectedCount: 2, 230 + }, 231 + { 232 + name: "limit 2, offset 4", 233 + limit: 2, 234 + offset: 4, 235 + expectedCount: 1, 236 + }, 237 + { 238 + name: "limit 3, offset 2", 239 + limit: 3, 240 + offset: 2, 241 + expectedCount: 3, 242 + }, 243 + { 244 + name: "limit 10, offset 3", 245 + limit: 10, 246 + offset: 3, 247 + expectedCount: 2, 248 + }, 249 + } 250 + 251 + for _, tt := range tests { 252 + s.Run(tt.name, func() { 253 + tags, err := s.repo.Tags(&TagsOptions{ 254 + Limit: tt.limit, 255 + Offset: tt.offset, 256 + }) 257 + require.NoError(s.T(), err) 258 + assert.Len(s.T(), tags, tt.expectedCount, "expected %d tags", tt.expectedCount) 259 + }) 260 + } 261 + } 262 + 263 + func (s *TagSuite) TestTags_EmptyRepo() { 264 + repoPath := filepath.Join(s.tempDir, "empty-repo") 265 + 266 + _, err := gogit.PlainInit(repoPath, false) 267 + require.NoError(s.T(), err) 268 + 269 + gitRepo, err := PlainOpen(repoPath) 270 + require.NoError(s.T(), err) 271 + 272 + tags, err := gitRepo.Tags(nil) 273 + require.NoError(s.T(), err) 274 + 275 + if tags != nil { 276 + assert.Empty(s.T(), tags, "expected no tags in empty repo") 277 + } 278 + } 279 + 280 + func (s *TagSuite) TestTags_Pagination() { 281 + s.setupRepoWithTags() 282 + 283 + allTags, err := s.repo.Tags(nil) 284 + require.NoError(s.T(), err) 285 + assert.Len(s.T(), allTags, 5, "expected 5 tags") 286 + 287 + pageSize := 2 288 + var paginatedTags []object.Tag 289 + 290 + for offset := 0; offset < len(allTags); offset += pageSize { 291 + tags, err := s.repo.Tags(&TagsOptions{ 292 + Limit: pageSize, 293 + Offset: offset, 294 + }) 295 + require.NoError(s.T(), err) 296 + paginatedTags = append(paginatedTags, tags...) 297 + } 298 + 299 + assert.Len(s.T(), paginatedTags, len(allTags), "pagination should return all tags") 300 + 301 + for i := range allTags { 302 + assert.Equal(s.T(), allTags[i].Name, paginatedTags[i].Name, 303 + "tag at index %d differs", i) 304 + } 305 + } 306 + 307 + func (s *TagSuite) TestTags_VerifyAnnotatedTagFields() { 308 + s.setupRepoWithTags() 309 + 310 + tags, err := s.repo.Tags(nil) 311 + require.NoError(s.T(), err) 312 + 313 + var v1Tag *object.Tag 314 + for i := range tags { 315 + if tags[i].Name == "v1.0.0" { 316 + v1Tag = &tags[i] 317 + break 318 + } 319 + } 320 + 321 + require.NotNil(s.T(), v1Tag, "v1.0.0 tag not found") 322 + 323 + assert.Equal(s.T(), "Tagger One", v1Tag.Tagger.Name, "tagger name should match") 324 + assert.Equal(s.T(), "tagger1@example.com", v1Tag.Tagger.Email, "tagger email should match") 325 + 326 + assert.Equal(s.T(), "Release version 1.0.0\n\nThis is the first stable release.", 327 + v1Tag.Message, "tag message should match") 328 + 329 + assert.Equal(s.T(), plumbing.TagObject, v1Tag.TargetType, 330 + "target type should be CommitObject") 331 + 332 + assert.False(s.T(), v1Tag.Hash.IsZero(), "tag hash should be set") 333 + 334 + assert.False(s.T(), v1Tag.Target.IsZero(), "target hash should be set") 335 + } 336 + 337 + func (s *TagSuite) TestTags_NilOptions() { 338 + s.setupRepoWithTags() 339 + 340 + tags, err := s.repo.Tags(nil) 341 + require.NoError(s.T(), err) 342 + assert.Len(s.T(), tags, 5, "nil options should return all tags") 343 + } 344 + 345 + func (s *TagSuite) TestTags_ZeroLimitAndOffset() { 346 + s.setupRepoWithTags() 347 + 348 + tags, err := s.repo.Tags(&TagsOptions{ 349 + Limit: 0, 350 + Offset: 0, 351 + }) 352 + require.NoError(s.T(), err) 353 + assert.Len(s.T(), tags, 5, "zero limit should return all tags") 354 + } 355 + 356 + func (s *TagSuite) TestTags_Pattern() { 357 + s.setupRepoWithTags() 358 + 359 + v1tag, err := s.repo.Tags(&TagsOptions{ 360 + Pattern: "refs/tags/v1.0.0", 361 + }) 362 + 363 + require.NoError(s.T(), err) 364 + assert.Len(s.T(), v1tag, 1, "expected 1 tag") 365 + }
+141
knotserver/git/test_common.go
··· 1 + package git 2 + 3 + import ( 4 + "os" 5 + "path/filepath" 6 + "testing" 7 + "time" 8 + 9 + gogit "github.com/go-git/go-git/v5" 10 + "github.com/go-git/go-git/v5/plumbing" 11 + "github.com/go-git/go-git/v5/plumbing/object" 12 + "github.com/stretchr/testify/require" 13 + ) 14 + 15 + type RepoSuite struct { 16 + t *testing.T 17 + tempDir string 18 + repo *GitRepo 19 + baseTime time.Time 20 + } 21 + 22 + func NewRepoSuite(t *testing.T) *RepoSuite { 23 + tempDir, err := os.MkdirTemp("", "git-test-*") 24 + require.NoError(t, err) 25 + 26 + return &RepoSuite{ 27 + t: t, 28 + tempDir: tempDir, 29 + baseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC), 30 + } 31 + } 32 + 33 + func (h *RepoSuite) cleanup() { 34 + if h.tempDir != "" { 35 + os.RemoveAll(h.tempDir) 36 + } 37 + } 38 + 39 + func (h *RepoSuite) init() *GitRepo { 40 + repoPath := filepath.Join(h.tempDir, "test-repo") 41 + 42 + // initialize repository 43 + r, err := gogit.PlainInit(repoPath, false) 44 + require.NoError(h.t, err) 45 + 46 + // configure git user 47 + cfg, err := r.Config() 48 + require.NoError(h.t, err) 49 + cfg.User.Name = "Test User" 50 + cfg.User.Email = "test@example.com" 51 + err = r.SetConfig(cfg) 52 + require.NoError(h.t, err) 53 + 54 + // create initial commit with a file 55 + w, err := r.Worktree() 56 + require.NoError(h.t, err) 57 + 58 + // create initial file 59 + initialFile := filepath.Join(repoPath, "README.md") 60 + err = os.WriteFile(initialFile, []byte("# Test Repository\n\nInitial content.\n"), 0644) 61 + require.NoError(h.t, err) 62 + 63 + _, err = w.Add("README.md") 64 + require.NoError(h.t, err) 65 + 66 + _, err = w.Commit("Initial commit", &gogit.CommitOptions{ 67 + Author: &object.Signature{ 68 + Name: "Test User", 69 + Email: "test@example.com", 70 + When: h.baseTime, 71 + }, 72 + }) 73 + require.NoError(h.t, err) 74 + 75 + gitRepo, err := PlainOpen(repoPath) 76 + require.NoError(h.t, err) 77 + 78 + h.repo = gitRepo 79 + return gitRepo 80 + } 81 + 82 + func (h *RepoSuite) commitFile(filename, content, message string) plumbing.Hash { 83 + filePath := filepath.Join(h.repo.path, filename) 84 + dir := filepath.Dir(filePath) 85 + 86 + err := os.MkdirAll(dir, 0755) 87 + require.NoError(h.t, err) 88 + 89 + err = os.WriteFile(filePath, []byte(content), 0644) 90 + require.NoError(h.t, err) 91 + 92 + w, err := h.repo.r.Worktree() 93 + require.NoError(h.t, err) 94 + 95 + _, err = w.Add(filename) 96 + require.NoError(h.t, err) 97 + 98 + hash, err := w.Commit(message, &gogit.CommitOptions{ 99 + Author: &object.Signature{ 100 + Name: "Test User", 101 + Email: "test@example.com", 102 + }, 103 + }) 104 + require.NoError(h.t, err) 105 + 106 + return hash 107 + } 108 + 109 + func (h *RepoSuite) createAnnotatedTag(name string, commit plumbing.Hash, taggerName, taggerEmail, message string, when time.Time) { 110 + _, err := h.repo.r.CreateTag(name, commit, &gogit.CreateTagOptions{ 111 + Tagger: &object.Signature{ 112 + Name: taggerName, 113 + Email: taggerEmail, 114 + When: when, 115 + }, 116 + Message: message, 117 + }) 118 + require.NoError(h.t, err) 119 + } 120 + 121 + func (h *RepoSuite) createLightweightTag(name string, commit plumbing.Hash) { 122 + ref := plumbing.NewReferenceFromStrings("refs/tags/"+name, commit.String()) 123 + err := h.repo.r.Storer.SetReference(ref) 124 + require.NoError(h.t, err) 125 + } 126 + 127 + func (h *RepoSuite) createBranch(name string, commit plumbing.Hash) { 128 + ref := plumbing.NewReferenceFromStrings("refs/heads/"+name, commit.String()) 129 + err := h.repo.r.Storer.SetReference(ref) 130 + require.NoError(h.t, err) 131 + } 132 + 133 + func (h *RepoSuite) checkoutBranch(name string) { 134 + w, err := h.repo.r.Worktree() 135 + require.NoError(h.t, err) 136 + 137 + err = w.Checkout(&gogit.CheckoutOptions{ 138 + Branch: plumbing.NewBranchReferenceName(name), 139 + }) 140 + require.NoError(h.t, err) 141 + }
+11 -1
knotserver/git/tree.go
··· 48 48 func (g *GitRepo) makeNiceTree(ctx context.Context, subtree *object.Tree, parent string) []types.NiceTree { 49 49 nts := []types.NiceTree{} 50 50 51 - times, err := g.calculateCommitTimeIn(ctx, subtree, parent, 2*time.Second) 51 + entries := make([]string, len(subtree.Entries)) 52 + for _, e := range subtree.Entries { 53 + entries = append(entries, e.Name) 54 + } 55 + 56 + lastCommitDir := lastCommitDir{ 57 + dir: parent, 58 + entries: entries, 59 + } 60 + 61 + times, err := g.lastCommitDirIn(ctx, lastCommitDir, 2*time.Second) 52 62 if err != nil { 53 63 return nts 54 64 }
+136
knotserver/ingester.go
··· 7 7 "io" 8 8 "net/http" 9 9 "net/url" 10 + "path/filepath" 10 11 "strings" 11 12 12 13 comatproto "github.com/bluesky-social/indigo/api/atproto" ··· 16 17 securejoin "github.com/cyphar/filepath-securejoin" 17 18 "tangled.org/core/api/tangled" 18 19 "tangled.org/core/knotserver/db" 20 + "tangled.org/core/knotserver/git" 19 21 "tangled.org/core/log" 20 22 "tangled.org/core/rbac" 23 + "tangled.org/core/workflow" 21 24 ) 22 25 23 26 func (h *Knot) processPublicKey(ctx context.Context, event *models.Event) error { ··· 82 85 return nil 83 86 } 84 87 88 + func (h *Knot) processPull(ctx context.Context, event *models.Event) error { 89 + raw := json.RawMessage(event.Commit.Record) 90 + did := event.Did 91 + 92 + var record tangled.RepoPull 93 + if err := json.Unmarshal(raw, &record); err != nil { 94 + return fmt.Errorf("failed to unmarshal record: %w", err) 95 + } 96 + 97 + l := log.FromContext(ctx) 98 + l = l.With("handler", "processPull") 99 + l = l.With("did", did) 100 + 101 + if record.Target == nil { 102 + return fmt.Errorf("ignoring pull record: target repo is nil") 103 + } 104 + 105 + l = l.With("target_repo", record.Target.Repo) 106 + l = l.With("target_branch", record.Target.Branch) 107 + 108 + if record.Source == nil { 109 + return fmt.Errorf("ignoring pull record: not a branch-based pull request") 110 + } 111 + 112 + if record.Source.Repo != nil { 113 + return fmt.Errorf("ignoring pull record: fork based pull") 114 + } 115 + 116 + repoAt, err := syntax.ParseATURI(record.Target.Repo) 117 + if err != nil { 118 + return fmt.Errorf("failed to parse ATURI: %w", err) 119 + } 120 + 121 + // resolve this aturi to extract the repo record 122 + ident, err := h.resolver.ResolveIdent(ctx, repoAt.Authority().String()) 123 + if err != nil || ident.Handle.IsInvalidHandle() { 124 + return fmt.Errorf("failed to resolve handle: %w", err) 125 + } 126 + 127 + xrpcc := xrpc.Client{ 128 + Host: ident.PDSEndpoint(), 129 + } 130 + 131 + resp, err := comatproto.RepoGetRecord(ctx, &xrpcc, "", tangled.RepoNSID, repoAt.Authority().String(), repoAt.RecordKey().String()) 132 + if err != nil { 133 + return fmt.Errorf("failed to resolver repo: %w", err) 134 + } 135 + 136 + repo := resp.Value.Val.(*tangled.Repo) 137 + 138 + if repo.Knot != h.c.Server.Hostname { 139 + return fmt.Errorf("rejected pull record: not this knot, %s != %s", repo.Knot, h.c.Server.Hostname) 140 + } 141 + 142 + didSlashRepo, err := securejoin.SecureJoin(ident.DID.String(), repo.Name) 143 + if err != nil { 144 + return fmt.Errorf("failed to construct relative repo path: %w", err) 145 + } 146 + 147 + repoPath, err := securejoin.SecureJoin(h.c.Repo.ScanPath, didSlashRepo) 148 + if err != nil { 149 + return fmt.Errorf("failed to construct absolute repo path: %w", err) 150 + } 151 + 152 + gr, err := git.Open(repoPath, record.Source.Sha) 153 + if err != nil { 154 + return fmt.Errorf("failed to open git repository: %w", err) 155 + } 156 + 157 + workflowDir, err := gr.FileTree(ctx, workflow.WorkflowDir) 158 + if err != nil { 159 + return fmt.Errorf("failed to open workflow directory: %w", err) 160 + } 161 + 162 + var pipeline workflow.RawPipeline 163 + for _, e := range workflowDir { 164 + if !e.IsFile() { 165 + continue 166 + } 167 + 168 + fpath := filepath.Join(workflow.WorkflowDir, e.Name) 169 + contents, err := gr.RawContent(fpath) 170 + if err != nil { 171 + continue 172 + } 173 + 174 + pipeline = append(pipeline, workflow.RawWorkflow{ 175 + Name: e.Name, 176 + Contents: contents, 177 + }) 178 + } 179 + 180 + trigger := tangled.Pipeline_PullRequestTriggerData{ 181 + Action: "create", 182 + SourceBranch: record.Source.Branch, 183 + SourceSha: record.Source.Sha, 184 + TargetBranch: record.Target.Branch, 185 + } 186 + 187 + compiler := workflow.Compiler{ 188 + Trigger: tangled.Pipeline_TriggerMetadata{ 189 + Kind: string(workflow.TriggerKindPullRequest), 190 + PullRequest: &trigger, 191 + Repo: &tangled.Pipeline_TriggerRepo{ 192 + Did: ident.DID.String(), 193 + Knot: repo.Knot, 194 + Repo: repo.Name, 195 + }, 196 + }, 197 + } 198 + 199 + cp := compiler.Compile(compiler.Parse(pipeline)) 200 + eventJson, err := json.Marshal(cp) 201 + if err != nil { 202 + return fmt.Errorf("failed to marshal pipeline event: %w", err) 203 + } 204 + 205 + // do not run empty pipelines 206 + if cp.Workflows == nil { 207 + return nil 208 + } 209 + 210 + ev := db.Event{ 211 + Rkey: TID(), 212 + Nsid: tangled.PipelineNSID, 213 + EventJson: string(eventJson), 214 + } 215 + 216 + return h.db.InsertEvent(ev, h.n) 217 + } 218 + 85 219 // duplicated from add collaborator 86 220 func (h *Knot) processCollaborator(ctx context.Context, event *models.Event) error { 87 221 raw := json.RawMessage(event.Commit.Record) ··· 204 338 err = h.processPublicKey(ctx, event) 205 339 case tangled.KnotMemberNSID: 206 340 err = h.processKnotMember(ctx, event) 341 + case tangled.RepoPullNSID: 342 + err = h.processPull(ctx, event) 207 343 case tangled.RepoCollaboratorNSID: 208 344 err = h.processCollaborator(ctx, event) 209 345 }
+109 -1
knotserver/internal.go
··· 23 23 "tangled.org/core/log" 24 24 "tangled.org/core/notifier" 25 25 "tangled.org/core/rbac" 26 + "tangled.org/core/workflow" 26 27 ) 27 28 28 29 type InternalHandle struct { ··· 175 176 } 176 177 177 178 for _, line := range lines { 178 - // TODO: pass pushOptions to refUpdate 179 179 err := h.insertRefUpdate(line, gitUserDid, repoDid, repoName) 180 180 if err != nil { 181 181 l.Error("failed to insert op", "err", err, "line", line, "did", gitUserDid, "repo", gitRelativeDir) ··· 185 185 err = h.emitCompareLink(&resp.Messages, line, repoDid, repoName) 186 186 if err != nil { 187 187 l.Error("failed to reply with compare link", "err", err, "line", line, "did", gitUserDid, "repo", gitRelativeDir) 188 + // non-fatal 189 + } 190 + 191 + err = h.triggerPipeline(&resp.Messages, line, gitUserDid, repoDid, repoName, pushOptions) 192 + if err != nil { 193 + l.Error("failed to trigger pipeline", "err", err, "line", line, "did", gitUserDid, "repo", gitRelativeDir) 188 194 // non-fatal 189 195 } 190 196 } ··· 235 241 } 236 242 237 243 return errors.Join(errs, h.db.InsertEvent(event, h.n)) 244 + } 245 + 246 + func (h *InternalHandle) triggerPipeline( 247 + clientMsgs *[]string, 248 + line git.PostReceiveLine, 249 + gitUserDid string, 250 + repoDid string, 251 + repoName string, 252 + pushOptions PushOptions, 253 + ) error { 254 + if pushOptions.skipCi { 255 + return nil 256 + } 257 + 258 + didSlashRepo, err := securejoin.SecureJoin(repoDid, repoName) 259 + if err != nil { 260 + return err 261 + } 262 + 263 + repoPath, err := securejoin.SecureJoin(h.c.Repo.ScanPath, didSlashRepo) 264 + if err != nil { 265 + return err 266 + } 267 + 268 + gr, err := git.Open(repoPath, line.Ref) 269 + if err != nil { 270 + return err 271 + } 272 + 273 + workflowDir, err := gr.FileTree(context.Background(), workflow.WorkflowDir) 274 + if err != nil { 275 + return err 276 + } 277 + 278 + var pipeline workflow.RawPipeline 279 + for _, e := range workflowDir { 280 + if !e.IsFile() { 281 + continue 282 + } 283 + 284 + fpath := filepath.Join(workflow.WorkflowDir, e.Name) 285 + contents, err := gr.RawContent(fpath) 286 + if err != nil { 287 + continue 288 + } 289 + 290 + pipeline = append(pipeline, workflow.RawWorkflow{ 291 + Name: e.Name, 292 + Contents: contents, 293 + }) 294 + } 295 + 296 + trigger := tangled.Pipeline_PushTriggerData{ 297 + Ref: line.Ref, 298 + OldSha: line.OldSha.String(), 299 + NewSha: line.NewSha.String(), 300 + } 301 + 302 + compiler := workflow.Compiler{ 303 + Trigger: tangled.Pipeline_TriggerMetadata{ 304 + Kind: string(workflow.TriggerKindPush), 305 + Push: &trigger, 306 + Repo: &tangled.Pipeline_TriggerRepo{ 307 + Did: repoDid, 308 + Knot: h.c.Server.Hostname, 309 + Repo: repoName, 310 + }, 311 + }, 312 + } 313 + 314 + cp := compiler.Compile(compiler.Parse(pipeline)) 315 + eventJson, err := json.Marshal(cp) 316 + if err != nil { 317 + return err 318 + } 319 + 320 + for _, e := range compiler.Diagnostics.Errors { 321 + *clientMsgs = append(*clientMsgs, e.String()) 322 + } 323 + 324 + if pushOptions.verboseCi { 325 + if compiler.Diagnostics.IsEmpty() { 326 + *clientMsgs = append(*clientMsgs, "success: pipeline compiled with no diagnostics") 327 + } 328 + 329 + for _, w := range compiler.Diagnostics.Warnings { 330 + *clientMsgs = append(*clientMsgs, w.String()) 331 + } 332 + } 333 + 334 + // do not run empty pipelines 335 + if cp.Workflows == nil { 336 + return nil 337 + } 338 + 339 + event := db.Event{ 340 + Rkey: TID(), 341 + Nsid: tangled.PipelineNSID, 342 + EventJson: string(eventJson), 343 + } 344 + 345 + return h.db.InsertEvent(event, h.n) 238 346 } 239 347 240 348 func (h *InternalHandle) emitCompareLink(
+25
knotserver/router.go
··· 5 5 "fmt" 6 6 "log/slog" 7 7 "net/http" 8 + "strings" 8 9 9 10 "github.com/go-chi/chi/v5" 10 11 "tangled.org/core/idresolver" ··· 79 80 }) 80 81 81 82 r.Route("/{did}", func(r chi.Router) { 83 + r.Use(h.resolveDidRedirect) 82 84 r.Route("/{name}", func(r chi.Router) { 83 85 // routes for git operations 84 86 r.Get("/info/refs", h.InfoRefs) ··· 114 116 } 115 117 116 118 return xrpc.Router() 119 + } 120 + 121 + func (h *Knot) resolveDidRedirect(next http.Handler) http.Handler { 122 + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { 123 + didOrHandle := chi.URLParam(r, "did") 124 + if strings.HasPrefix(didOrHandle, "did:") { 125 + next.ServeHTTP(w, r) 126 + return 127 + } 128 + 129 + trimmed := strings.TrimPrefix(didOrHandle, "@") 130 + id, err := h.resolver.ResolveIdent(r.Context(), trimmed) 131 + if err != nil { 132 + // invalid did or handle 133 + h.l.Error("failed to resolve did/handle", "handle", trimmed, "err", err) 134 + http.Error(w, fmt.Sprintf("failed to resolve did/handle: %s", trimmed), http.StatusInternalServerError) 135 + return 136 + } 137 + 138 + suffix := strings.TrimPrefix(r.URL.Path, "/"+didOrHandle) 139 + newPath := fmt.Sprintf("/%s/%s?%s", id.DID.String(), suffix, r.URL.RawQuery) 140 + http.Redirect(w, r, newPath, http.StatusTemporaryRedirect) 141 + }) 117 142 } 118 143 119 144 func (h *Knot) configureOwner() error {
+1
knotserver/server.go
··· 79 79 jc, err := jetstream.NewJetstreamClient(c.Server.JetstreamEndpoint, "knotserver", []string{ 80 80 tangled.PublicKeyNSID, 81 81 tangled.KnotMemberNSID, 82 + tangled.RepoPullNSID, 82 83 tangled.RepoCollaboratorNSID, 83 84 }, nil, log.SubLogger(logger, "jetstream"), db, true, c.Server.LogDids) 84 85 if err != nil {
+7 -1
knotserver/xrpc/merge_check.go
··· 9 9 securejoin "github.com/cyphar/filepath-securejoin" 10 10 "tangled.org/core/api/tangled" 11 11 "tangled.org/core/knotserver/git" 12 + "tangled.org/core/patchutil" 12 13 xrpcerr "tangled.org/core/xrpc/errors" 13 14 ) 14 15 ··· 51 52 return 52 53 } 53 54 54 - err = gr.MergeCheck(data.Patch, data.Branch) 55 + mo := git.MergeOptions{} 56 + mo.CommitterName = x.Config.Git.UserName 57 + mo.CommitterEmail = x.Config.Git.UserEmail 58 + mo.FormatPatch = patchutil.IsFormatPatch(data.Patch) 59 + 60 + err = gr.MergeCheckWithOptions(data.Patch, data.Branch, mo) 55 61 56 62 response := tangled.RepoMergeCheck_Output{ 57 63 Is_conflicted: false,
+23
knotserver/xrpc/repo_blob.go
··· 1 1 package xrpc 2 2 3 3 import ( 4 + "context" 4 5 "crypto/sha256" 5 6 "encoding/base64" 6 7 "fmt" ··· 8 9 "path/filepath" 9 10 "slices" 10 11 "strings" 12 + "time" 11 13 12 14 "tangled.org/core/api/tangled" 13 15 "tangled.org/core/knotserver/git" ··· 140 142 141 143 if mimeType != "" { 142 144 response.MimeType = &mimeType 145 + } 146 + 147 + ctx, cancel := context.WithTimeout(r.Context(), 2*time.Second) 148 + defer cancel() 149 + 150 + lastCommit, err := gr.LastCommitFile(ctx, treePath) 151 + if err == nil && lastCommit != nil { 152 + response.LastCommit = &tangled.RepoBlob_LastCommit{ 153 + Hash: lastCommit.Hash.String(), 154 + Message: lastCommit.Message, 155 + When: lastCommit.When.Format(time.RFC3339), 156 + } 157 + 158 + // try to get author information 159 + commit, err := gr.Commit(lastCommit.Hash) 160 + if err == nil { 161 + response.LastCommit.Author = &tangled.RepoBlob_Signature{ 162 + Name: commit.Author.Name, 163 + Email: commit.Author.Email, 164 + } 165 + } 143 166 } 144 167 145 168 writeJson(w, response)
+14 -21
knotserver/xrpc/repo_branches.go
··· 17 17 return 18 18 } 19 19 20 - cursor := r.URL.Query().Get("cursor") 20 + // default 21 + limit := 50 22 + offset := 0 21 23 22 - // limit := 50 // default 23 - // if limitStr := r.URL.Query().Get("limit"); limitStr != "" { 24 - // if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 { 25 - // limit = l 26 - // } 27 - // } 24 + if l, err := strconv.Atoi(r.URL.Query().Get("limit")); err == nil && l > 0 && l <= 100 { 25 + limit = l 26 + } 28 27 29 - limit := 500 28 + if o, err := strconv.Atoi(r.URL.Query().Get("cursor")); err == nil && o > 0 { 29 + offset = o 30 + } 30 31 31 32 gr, err := git.PlainOpen(repoPath) 32 33 if err != nil { ··· 34 35 return 35 36 } 36 37 37 - branches, _ := gr.Branches() 38 - 39 - offset := 0 40 - if cursor != "" { 41 - if o, err := strconv.Atoi(cursor); err == nil && o >= 0 && o < len(branches) { 42 - offset = o 43 - } 44 - } 45 - 46 - end := min(offset+limit, len(branches)) 47 - 48 - paginatedBranches := branches[offset:end] 38 + branches, _ := gr.Branches(&git.BranchesOptions{ 39 + Limit: limit, 40 + Offset: offset, 41 + }) 49 42 50 43 // Create response using existing types.RepoBranchesResponse 51 44 response := types.RepoBranchesResponse{ 52 - Branches: paginatedBranches, 45 + Branches: branches, 53 46 } 54 47 55 48 writeJson(w, response)
+85
knotserver/xrpc/repo_tag.go
··· 1 + package xrpc 2 + 3 + import ( 4 + "fmt" 5 + "net/http" 6 + 7 + "github.com/go-git/go-git/v5/plumbing" 8 + "github.com/go-git/go-git/v5/plumbing/object" 9 + 10 + "tangled.org/core/knotserver/git" 11 + "tangled.org/core/types" 12 + xrpcerr "tangled.org/core/xrpc/errors" 13 + ) 14 + 15 + func (x *Xrpc) RepoTag(w http.ResponseWriter, r *http.Request) { 16 + repo := r.URL.Query().Get("repo") 17 + repoPath, err := x.parseRepoParam(repo) 18 + if err != nil { 19 + writeError(w, err.(xrpcerr.XrpcError), http.StatusBadRequest) 20 + return 21 + } 22 + 23 + tagName := r.URL.Query().Get("tag") 24 + if tagName == "" { 25 + writeError(w, xrpcerr.NewXrpcError( 26 + xrpcerr.WithTag("InvalidRequest"), 27 + xrpcerr.WithMessage("missing name parameter"), 28 + ), http.StatusBadRequest) 29 + return 30 + } 31 + 32 + gr, err := git.PlainOpen(repoPath) 33 + if err != nil { 34 + x.Logger.Error("failed to open", "error", err) 35 + writeError(w, xrpcerr.RepoNotFoundError, http.StatusNoContent) 36 + return 37 + } 38 + 39 + // if this is not already formatted as refs/tags/v0.1.0, then format it 40 + if !plumbing.ReferenceName(tagName).IsTag() { 41 + tagName = plumbing.NewTagReferenceName(tagName).String() 42 + } 43 + 44 + tags, err := gr.Tags(&git.TagsOptions{ 45 + Pattern: tagName, 46 + }) 47 + 48 + if len(tags) != 1 { 49 + writeError(w, xrpcerr.NewXrpcError( 50 + xrpcerr.WithTag("TagNotFound"), 51 + xrpcerr.WithMessage(fmt.Sprintf("expected 1 tag to be returned, got %d tags", len(tags))), 52 + ), http.StatusBadRequest) 53 + return 54 + } 55 + 56 + tag := tags[0] 57 + 58 + if err != nil { 59 + x.Logger.Warn("getting tags", "error", err.Error()) 60 + tags = []object.Tag{} 61 + } 62 + 63 + var target *object.Tag 64 + if tag.Target != plumbing.ZeroHash { 65 + target = &tag 66 + } 67 + tr := types.TagReference{ 68 + Tag: target, 69 + } 70 + 71 + tr.Reference = types.Reference{ 72 + Name: tag.Name, 73 + Hash: tag.Hash.String(), 74 + } 75 + 76 + if tag.Message != "" { 77 + tr.Message = tag.Message 78 + } 79 + 80 + response := types.RepoTagResponse{ 81 + Tag: &tr, 82 + } 83 + 84 + writeJson(w, response) 85 + }
+15 -22
knotserver/xrpc/repo_tags.go
··· 20 20 return 21 21 } 22 22 23 - cursor := r.URL.Query().Get("cursor") 23 + // default 24 + limit := 50 25 + offset := 0 24 26 25 - limit := 50 // default 26 - if limitStr := r.URL.Query().Get("limit"); limitStr != "" { 27 - if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 { 28 - limit = l 29 - } 27 + if l, err := strconv.Atoi(r.URL.Query().Get("limit")); err == nil && l > 0 && l <= 100 { 28 + limit = l 29 + } 30 + 31 + if o, err := strconv.Atoi(r.URL.Query().Get("cursor")); err == nil && o > 0 { 32 + offset = o 30 33 } 31 34 32 35 gr, err := git.PlainOpen(repoPath) ··· 36 39 return 37 40 } 38 41 39 - tags, err := gr.Tags() 42 + tags, err := gr.Tags(&git.TagsOptions{ 43 + Limit: limit, 44 + Offset: offset, 45 + }) 46 + 40 47 if err != nil { 41 48 x.Logger.Warn("getting tags", "error", err.Error()) 42 49 tags = []object.Tag{} ··· 64 71 rtags = append(rtags, &tr) 65 72 } 66 73 67 - // apply pagination manually 68 - offset := 0 69 - if cursor != "" { 70 - if o, err := strconv.Atoi(cursor); err == nil && o >= 0 && o < len(rtags) { 71 - offset = o 72 - } 73 - } 74 - 75 - // calculate end index 76 - end := min(offset+limit, len(rtags)) 77 - 78 - paginatedTags := rtags[offset:end] 79 - 80 - // Create response using existing types.RepoTagsResponse 81 74 response := types.RepoTagsResponse{ 82 - Tags: paginatedTags, 75 + Tags: rtags, 83 76 } 84 77 85 78 writeJson(w, response)
+35
knotserver/xrpc/repo_tree.go
··· 9 9 "tangled.org/core/api/tangled" 10 10 "tangled.org/core/appview/pages/markup" 11 11 "tangled.org/core/knotserver/git" 12 + "tangled.org/core/types" 12 13 xrpcerr "tangled.org/core/xrpc/errors" 13 14 ) 14 15 ··· 105 106 Filename: readmeFileName, 106 107 Contents: readmeContents, 107 108 }, 109 + } 110 + 111 + // calculate lastCommit for the directory as a whole 112 + var lastCommitTree *types.LastCommitInfo 113 + for _, e := range files { 114 + if e.LastCommit == nil { 115 + continue 116 + } 117 + 118 + if lastCommitTree == nil { 119 + lastCommitTree = e.LastCommit 120 + continue 121 + } 122 + 123 + if lastCommitTree.When.After(e.LastCommit.When) { 124 + lastCommitTree = e.LastCommit 125 + } 126 + } 127 + 128 + if lastCommitTree != nil { 129 + response.LastCommit = &tangled.RepoTree_LastCommit{ 130 + Hash: lastCommitTree.Hash.String(), 131 + Message: lastCommitTree.Message, 132 + When: lastCommitTree.When.Format(time.RFC3339), 133 + } 134 + 135 + // try to get author information 136 + commit, err := gr.Commit(lastCommitTree.Hash) 137 + if err == nil { 138 + response.LastCommit.Author = &tangled.RepoTree_Signature{ 139 + Name: commit.Author.Name, 140 + Email: commit.Author.Email, 141 + } 142 + } 108 143 } 109 144 110 145 writeJson(w, response)
+1
knotserver/xrpc/xrpc.go
··· 59 59 r.Get("/"+tangled.RepoLogNSID, x.RepoLog) 60 60 r.Get("/"+tangled.RepoBranchesNSID, x.RepoBranches) 61 61 r.Get("/"+tangled.RepoTagsNSID, x.RepoTags) 62 + r.Get("/"+tangled.RepoTagNSID, x.RepoTag) 62 63 r.Get("/"+tangled.RepoBlobNSID, x.RepoBlob) 63 64 r.Get("/"+tangled.RepoDiffNSID, x.RepoDiff) 64 65 r.Get("/"+tangled.RepoCompareNSID, x.RepoCompare)
+9 -4
lexicons/actor/profile.json
··· 8 8 "key": "literal:self", 9 9 "record": { 10 10 "type": "object", 11 - "required": [ 12 - "bluesky" 13 - ], 11 + "required": ["bluesky"], 14 12 "properties": { 13 + "avatar": { 14 + "type": "blob", 15 + "description": "Small image to be displayed next to posts from account. AKA, 'profile picture'", 16 + "accept": ["image/png", "image/jpeg"], 17 + "maxSize": 1000000 18 + }, 15 19 "description": { 16 20 "type": "string", 17 21 "description": "Free-form profile description text.", ··· 41 45 "open-pull-request-count", 42 46 "open-issue-count", 43 47 "closed-issue-count", 44 - "repository-count" 48 + "repository-count", 49 + "star-count" 45 50 ] 46 51 } 47 52 },
-57
lexicons/ci/event.json
··· 1 - { 2 - "lexicon": 1, 3 - "id": "sh.tangled.ci.event", 4 - "needsCbor": true, 5 - "needsType": true, 6 - "defs": { 7 - "main": { 8 - "type": "object", 9 - "required": [ 10 - "meta" 11 - ], 12 - "properties": { 13 - "meta": { 14 - "type": "union", 15 - "refs": [ 16 - "#pullRequest", 17 - "#push", 18 - "#manual" 19 - ] 20 - } 21 - } 22 - }, 23 - "pullRequest": { 24 - "type": "object", 25 - "required": [], 26 - "properties": {} 27 - }, 28 - "push": { 29 - "type": "object", 30 - "required": [ 31 - "ref", 32 - "oldSha", 33 - "newSha" 34 - ], 35 - "properties": { 36 - "ref": { 37 - "type": "string" 38 - }, 39 - "oldSha": { 40 - "type": "string", 41 - "minLength": 40, 42 - "maxLength": 40 43 - }, 44 - "newSha": { 45 - "type": "string", 46 - "minLength": 40, 47 - "maxLength": 40 48 - } 49 - } 50 - }, 51 - "manual": { 52 - "type": "object", 53 - "required": [], 54 - "properties": {} 55 - } 56 - } 57 - }
-30
lexicons/ci/pipeline.json
··· 1 - { 2 - "lexicon": 1, 3 - "id": "sh.tangled.ci.pipeline", 4 - "defs": { 5 - "main": { 6 - "type": "record", 7 - "key": "tid", 8 - "record": { 9 - "type": "object", 10 - "required": [ 11 - "event", 12 - "workflowRuns" 13 - ], 14 - "properties": { 15 - "event": { 16 - "type": "ref", 17 - "ref": "sh.tangled.ci.event" 18 - }, 19 - "workflowRuns": { 20 - "type": "array", 21 - "items": { 22 - "type": "string", 23 - "format": "at-uri" 24 - } 25 - } 26 - } 27 - } 28 - } 29 - } 30 - }
-30
lexicons/ci/workflow/run.json
··· 1 - { 2 - "lexicon": 1, 3 - "id": "sh.tangled.ci.workflow.run", 4 - "defs": { 5 - "main": { 6 - "type": "record", 7 - "key": "tid", 8 - "record": { 9 - "type": "object", 10 - "required": [ 11 - "name", 12 - "adapter", 13 - "status" 14 - ], 15 - "properties": { 16 - "name": { 17 - "type": "string" 18 - }, 19 - "adapter": { 20 - "type": "string" 21 - }, 22 - "status": { 23 - "type": "ref", 24 - "ref": "sh.tangled.ci.workflow.status" 25 - } 26 - } 27 - } 28 - } 29 - } 30 - }
-18
lexicons/ci/workflow/status.json
··· 1 - { 2 - "lexicon": 1, 3 - "id": "sh.tangled.ci.workflow.status", 4 - "defs": { 5 - "main": { 6 - "type": "string", 7 - "description": "status of the workflow run", 8 - "enum": [ 9 - "pending", 10 - "running", 11 - "failed", 12 - "timeout", 13 - "cancelled", 14 - "success" 15 - ] 16 - } 17 - } 18 - }
-4
lexicons/repo/blob.json
··· 115 115 "type": "string", 116 116 "description": "Commit hash" 117 117 }, 118 - "shortHash": { 119 - "type": "string", 120 - "description": "Short commit hash" 121 - }, 122 118 "message": { 123 119 "type": "string", 124 120 "description": "Commit message"
+43
lexicons/repo/tag.json
··· 1 + { 2 + "lexicon": 1, 3 + "id": "sh.tangled.repo.tag", 4 + "defs": { 5 + "main": { 6 + "type": "query", 7 + "parameters": { 8 + "type": "params", 9 + "required": [ 10 + "repo", 11 + "tag" 12 + ], 13 + "properties": { 14 + "repo": { 15 + "type": "string", 16 + "description": "Repository identifier in format 'did:plc:.../repoName'" 17 + }, 18 + "tag": { 19 + "type": "string", 20 + "description": "Name of tag, such as v1.3.0" 21 + } 22 + } 23 + }, 24 + "output": { 25 + "encoding": "*/*" 26 + }, 27 + "errors": [ 28 + { 29 + "name": "RepoNotFound", 30 + "description": "Repository not found or access denied" 31 + }, 32 + { 33 + "name": "TagNotFound", 34 + "description": "Tag not found" 35 + }, 36 + { 37 + "name": "InvalidRequest", 38 + "description": "Invalid request parameters" 39 + } 40 + ] 41 + } 42 + } 43 + }
+53 -5
lexicons/repo/tree.json
··· 6 6 "type": "query", 7 7 "parameters": { 8 8 "type": "params", 9 - "required": ["repo", "ref"], 9 + "required": [ 10 + "repo", 11 + "ref" 12 + ], 10 13 "properties": { 11 14 "repo": { 12 15 "type": "string", ··· 27 30 "encoding": "application/json", 28 31 "schema": { 29 32 "type": "object", 30 - "required": ["ref", "files"], 33 + "required": [ 34 + "ref", 35 + "files" 36 + ], 31 37 "properties": { 32 38 "ref": { 33 39 "type": "string", ··· 45 51 "type": "ref", 46 52 "ref": "#readme", 47 53 "description": "Readme for this file tree" 54 + }, 55 + "lastCommit": { 56 + "type": "ref", 57 + "ref": "#lastCommit" 48 58 }, 49 59 "files": { 50 60 "type": "array", ··· 77 87 }, 78 88 "readme": { 79 89 "type": "object", 80 - "required": ["filename", "contents"], 90 + "required": [ 91 + "filename", 92 + "contents" 93 + ], 81 94 "properties": { 82 95 "filename": { 83 96 "type": "string", ··· 91 104 }, 92 105 "treeEntry": { 93 106 "type": "object", 94 - "required": ["name", "mode", "size"], 107 + "required": [ 108 + "name", 109 + "mode", 110 + "size" 111 + ], 95 112 "properties": { 96 113 "name": { 97 114 "type": "string", ··· 113 130 }, 114 131 "lastCommit": { 115 132 "type": "object", 116 - "required": ["hash", "message", "when"], 133 + "required": [ 134 + "hash", 135 + "message", 136 + "when" 137 + ], 117 138 "properties": { 118 139 "hash": { 119 140 "type": "string", ··· 123 144 "type": "string", 124 145 "description": "Commit message" 125 146 }, 147 + "author": { 148 + "type": "ref", 149 + "ref": "#signature" 150 + }, 126 151 "when": { 127 152 "type": "string", 128 153 "format": "datetime", 129 154 "description": "Commit timestamp" 155 + } 156 + } 157 + }, 158 + "signature": { 159 + "type": "object", 160 + "required": [ 161 + "name", 162 + "email", 163 + "when" 164 + ], 165 + "properties": { 166 + "name": { 167 + "type": "string", 168 + "description": "Author name" 169 + }, 170 + "email": { 171 + "type": "string", 172 + "description": "Author email" 173 + }, 174 + "when": { 175 + "type": "string", 176 + "format": "datetime", 177 + "description": "Author timestamp" 130 178 } 131 179 } 132 180 }
+4 -7
nix/gomod2nix.toml
··· 20 20 version = "v2.11.0" 21 21 hash = "sha256-tDJCDKZ0R4qNA7hgMKWrpDyogt1802LCJDBCExxdqaU=" 22 22 [mod."github.com/alecthomas/chroma/v2"] 23 - version = "v2.19.0" 24 - hash = "sha256-dxsu43a+PvHg2jYR0Tfys6a8x6IVR+9oCGAh+fvL3SM=" 23 + version = "v2.24.2" 24 + hash = "sha256-Xz4DLZpn98rwaLmNNztK3PJu9MVxDLSrhJI82ZzyFZo=" 25 25 replaced = "github.com/oppiliappan/chroma/v2" 26 26 [mod."github.com/alecthomas/repr"] 27 - version = "v0.4.0" 28 - hash = "sha256-CyAzMSTfLGHDtfGXi91y7XMVpPUDNOKjsznb+osl9dU=" 27 + version = "v0.5.2" 28 + hash = "sha256-PfIeyHh7xTbDN0g2otuDyUOQqbgS4KftVC1JKZ+6sdM=" 29 29 [mod."github.com/anmitsu/go-shlex"] 30 30 version = "v0.0.0-20200514113438-38f4b401e2be" 31 31 hash = "sha256-L3Ak4X2z7WXq7vMKuiHCOJ29nlpajUQ08Sfb9T0yP54=" ··· 304 304 [mod."github.com/hashicorp/go-sockaddr"] 305 305 version = "v1.0.7" 306 306 hash = "sha256-p6eDOrGzN1jMmT/F/f/VJMq0cKNFhUcEuVVwTE6vSrs=" 307 - [mod."github.com/hashicorp/go-version"] 308 - version = "v1.8.0" 309 - hash = "sha256-KXtqERmYrWdpqPCViWcHbe6jnuH7k16bvBIcuJuevj8=" 310 307 [mod."github.com/hashicorp/golang-lru"] 311 308 version = "v1.0.2" 312 309 hash = "sha256-yy+5botc6T5wXgOe2mfNXJP3wr+MkVlUZ2JBkmmrA48="
+2 -2
nix/modules/appview.nix
··· 41 41 42 42 appviewHost = mkOption { 43 43 type = types.str; 44 - default = "https://tangled.org"; 45 - example = "https://example.com"; 44 + default = "tangled.org"; 45 + example = "example.com"; 46 46 description = "Public host URL for the appview instance"; 47 47 }; 48 48
-64
nix/modules/bluesky-jetstream.nix
··· 1 - { 2 - config, 3 - pkgs, 4 - lib, 5 - ... 6 - }: let 7 - cfg = config.services.bluesky-jetstream; 8 - in 9 - with lib; { 10 - options.services.bluesky-jetstream = { 11 - enable = mkEnableOption "jetstream server"; 12 - package = mkPackageOption pkgs "bluesky-jetstream" {}; 13 - 14 - # dataDir = mkOption { 15 - # type = types.str; 16 - # default = "/var/lib/jetstream"; 17 - # description = "directory to store data (pebbleDB)"; 18 - # }; 19 - livenessTtl = mkOption { 20 - type = types.int; 21 - default = 15; 22 - description = "time to restart when no event detected (seconds)"; 23 - }; 24 - websocketUrl = mkOption { 25 - type = types.str; 26 - default = "wss://bsky.network/xrpc/com.atproto.sync.subscribeRepos"; 27 - description = "full websocket path to the ATProto SubscribeRepos XRPC endpoint"; 28 - }; 29 - }; 30 - config = mkIf cfg.enable { 31 - systemd.services.bluesky-jetstream = { 32 - description = "bluesky jetstream"; 33 - after = ["network.target" "pds.service"]; 34 - wantedBy = ["multi-user.target"]; 35 - 36 - serviceConfig = { 37 - User = "jetstream"; 38 - Group = "jetstream"; 39 - StateDirectory = "jetstream"; 40 - StateDirectoryMode = "0755"; 41 - # preStart = '' 42 - # mkdir -p "${cfg.dataDir}" 43 - # chown -R jetstream:jetstream "${cfg.dataDir}" 44 - # ''; 45 - # WorkingDirectory = cfg.dataDir; 46 - Environment = [ 47 - "JETSTREAM_DATA_DIR=/var/lib/jetstream/data" 48 - "JETSTREAM_LIVENESS_TTL=${toString cfg.livenessTtl}s" 49 - "JETSTREAM_WS_URL=${cfg.websocketUrl}" 50 - ]; 51 - ExecStart = getExe cfg.package; 52 - Restart = "always"; 53 - RestartSec = 5; 54 - }; 55 - }; 56 - users = { 57 - users.jetstream = { 58 - group = "jetstream"; 59 - isSystemUser = true; 60 - }; 61 - groups.jetstream = {}; 62 - }; 63 - }; 64 - }
-48
nix/modules/bluesky-relay.nix
··· 1 - { 2 - config, 3 - pkgs, 4 - lib, 5 - ... 6 - }: let 7 - cfg = config.services.bluesky-relay; 8 - in 9 - with lib; { 10 - options.services.bluesky-relay = { 11 - enable = mkEnableOption "relay server"; 12 - package = mkPackageOption pkgs "bluesky-relay" {}; 13 - }; 14 - config = mkIf cfg.enable { 15 - systemd.services.bluesky-relay = { 16 - description = "bluesky relay"; 17 - after = ["network.target" "pds.service"]; 18 - wantedBy = ["multi-user.target"]; 19 - 20 - serviceConfig = { 21 - User = "relay"; 22 - Group = "relay"; 23 - StateDirectory = "relay"; 24 - StateDirectoryMode = "0755"; 25 - Environment = [ 26 - "RELAY_ADMIN_PASSWORD=password" 27 - "RELAY_PLC_HOST=https://plc.tngl.boltless.dev" 28 - "DATABASE_URL=sqlite:///var/lib/relay/relay.sqlite" 29 - "RELAY_IP_BIND=:2470" 30 - "RELAY_PERSIST_DIR=/var/lib/relay" 31 - "RELAY_DISABLE_REQUEST_CRAWL=0" 32 - "RELAY_INITIAL_SEQ_NUMBER=1" 33 - "RELAY_ALLOW_INSECURE_HOSTS=1" 34 - ]; 35 - ExecStart = "${getExe cfg.package} serve"; 36 - Restart = "always"; 37 - RestartSec = 5; 38 - }; 39 - }; 40 - users = { 41 - users.relay = { 42 - group = "relay"; 43 - isSystemUser = true; 44 - }; 45 - groups.relay = {}; 46 - }; 47 - }; 48 - }
-76
nix/modules/did-method-plc.nix
··· 1 - { 2 - config, 3 - pkgs, 4 - lib, 5 - ... 6 - }: let 7 - cfg = config.services.did-method-plc; 8 - in 9 - with lib; { 10 - options.services.did-method-plc = { 11 - enable = mkEnableOption "did-method-plc server"; 12 - package = mkPackageOption pkgs "did-method-plc" {}; 13 - }; 14 - config = mkIf cfg.enable { 15 - services.postgresql = { 16 - enable = true; 17 - package = pkgs.postgresql_14; 18 - ensureDatabases = ["plc"]; 19 - ensureUsers = [ 20 - { 21 - name = "pg"; 22 - # ensurePermissions."DATABASE plc" = "ALL PRIVILEGES"; 23 - } 24 - ]; 25 - authentication = '' 26 - local all all trust 27 - host all all 127.0.0.1/32 trust 28 - ''; 29 - }; 30 - systemd.services.did-method-plc = { 31 - description = "did-method-plc"; 32 - 33 - after = ["postgresql.service"]; 34 - wants = ["postgresql.service"]; 35 - wantedBy = ["multi-user.target"]; 36 - 37 - environment = let 38 - db_creds_json = builtins.toJSON { 39 - username = "pg"; 40 - password = ""; 41 - host = "127.0.0.1"; 42 - port = 5432; 43 - }; 44 - in { 45 - # TODO: inherit from config 46 - DEBUG_MODE = "1"; 47 - LOG_ENABLED = "true"; 48 - LOG_LEVEL = "debug"; 49 - LOG_DESTINATION = "1"; 50 - ENABLE_MIGRATIONS = "true"; 51 - DB_CREDS_JSON = db_creds_json; 52 - DB_MIGRATE_CREDS_JSON = db_creds_json; 53 - PLC_VERSION = "0.0.1"; 54 - PORT = "8080"; 55 - }; 56 - 57 - serviceConfig = { 58 - ExecStart = getExe cfg.package; 59 - User = "plc"; 60 - Group = "plc"; 61 - StateDirectory = "plc"; 62 - StateDirectoryMode = "0755"; 63 - Restart = "always"; 64 - 65 - # Hardening 66 - }; 67 - }; 68 - users = { 69 - users.plc = { 70 - group = "plc"; 71 - isSystemUser = true; 72 - }; 73 - groups.plc = {}; 74 - }; 75 - }; 76 - }
+12 -46
nix/modules/spindle.nix
··· 1 1 { 2 2 config, 3 - pkgs, 4 3 lib, 5 4 ... 6 5 }: let ··· 18 17 type = types.package; 19 18 description = "Package to use for the spindle"; 20 19 }; 21 - tap-package = mkOption { 22 - type = types.package; 23 - description = "Package to use for the spindle"; 24 - }; 25 - 26 - atpRelayUrl = mkOption { 27 - type = types.str; 28 - default = "https://relay1.us-east.bsky.network"; 29 - description = "atproto relay"; 30 - }; 31 20 32 21 server = { 33 22 listenAddr = mkOption { ··· 36 25 description = "Address to listen on"; 37 26 }; 38 27 39 - stateDir = mkOption { 28 + dbPath = mkOption { 40 29 type = types.path; 41 - default = "/var/lib/spindle"; 42 - description = "Tangled spindle data directory"; 30 + default = "/var/lib/spindle/spindle.db"; 31 + description = "Path to the database file"; 43 32 }; 44 33 45 34 hostname = mkOption { ··· 52 41 type = types.str; 53 42 default = "https://plc.directory"; 54 43 description = "atproto PLC directory"; 44 + }; 45 + 46 + jetstreamEndpoint = mkOption { 47 + type = types.str; 48 + default = "wss://jetstream1.us-west.bsky.network/subscribe"; 49 + description = "Jetstream endpoint to subscribe to"; 55 50 }; 56 51 57 52 dev = mkOption { ··· 119 114 config = mkIf cfg.enable { 120 115 virtualisation.docker.enable = true; 121 116 122 - systemd.services.spindle-tap = { 123 - description = "spindle tap service"; 124 - after = ["network.target" "docker.service"]; 125 - wantedBy = ["multi-user.target"]; 126 - serviceConfig = { 127 - LogsDirectory = "spindle-tap"; 128 - StateDirectory = "spindle-tap"; 129 - Environment = [ 130 - "TAP_BIND=:2480" 131 - "TAP_PLC_URL=${cfg.server.plcUrl}" 132 - "TAP_RELAY_URL=${cfg.atpRelayUrl}" 133 - "TAP_DATABASE_URL=sqlite:///var/lib/spindle-tap/tap.db" 134 - "TAP_RETRY_TIMEOUT=3s" 135 - "TAP_COLLECTION_FILTERS=${concatStringsSep "," [ 136 - "sh.tangled.repo" 137 - "sh.tangled.repo.collaborator" 138 - "sh.tangled.spindle.member" 139 - "sh.tangled.repo.pull" 140 - ]}" 141 - # temporary hack to listen for repo.pull from non-tangled users 142 - "TAP_SIGNAL_COLLECTION=sh.tangled.repo.pull" 143 - ]; 144 - ExecStart = "${getExe cfg.tap-package} run"; 145 - }; 146 - }; 147 - 148 117 systemd.services.spindle = { 149 118 description = "spindle service"; 150 - after = ["network.target" "docker.service" "spindle-tap.service"]; 119 + after = ["network.target" "docker.service"]; 151 120 wantedBy = ["multi-user.target"]; 152 - path = [ 153 - pkgs.git 154 - ]; 155 121 serviceConfig = { 156 122 LogsDirectory = "spindle"; 157 123 StateDirectory = "spindle"; 158 124 Environment = [ 159 125 "SPINDLE_SERVER_LISTEN_ADDR=${cfg.server.listenAddr}" 160 - "SPINDLE_SERVER_DATA_DIR=${cfg.server.stateDir}" 126 + "SPINDLE_SERVER_DB_PATH=${cfg.server.dbPath}" 161 127 "SPINDLE_SERVER_HOSTNAME=${cfg.server.hostname}" 162 128 "SPINDLE_SERVER_PLC_URL=${cfg.server.plcUrl}" 129 + "SPINDLE_SERVER_JETSTREAM_ENDPOINT=${cfg.server.jetstreamEndpoint}" 163 130 "SPINDLE_SERVER_DEV=${lib.boolToString cfg.server.dev}" 164 131 "SPINDLE_SERVER_OWNER=${cfg.server.owner}" 165 132 "SPINDLE_SERVER_MAX_JOB_COUNT=${toString cfg.server.maxJobCount}" ··· 167 134 "SPINDLE_SERVER_SECRETS_PROVIDER=${cfg.server.secrets.provider}" 168 135 "SPINDLE_SERVER_SECRETS_OPENBAO_PROXY_ADDR=${cfg.server.secrets.openbao.proxyAddr}" 169 136 "SPINDLE_SERVER_SECRETS_OPENBAO_MOUNT=${cfg.server.secrets.openbao.mount}" 170 - "SPINDLE_SERVER_TAP_URL=http://localhost:2480" 171 137 "SPINDLE_NIXERY_PIPELINES_NIXERY=${cfg.pipelines.nixery}" 172 138 "SPINDLE_NIXERY_PIPELINES_WORKFLOW_TIMEOUT=${cfg.pipelines.workflowTimeout}" 173 139 ];
-20
nix/pkgs/bluesky-jetstream.nix
··· 1 - { 2 - buildGoModule, 3 - fetchFromGitHub, 4 - }: 5 - buildGoModule { 6 - pname = "bluesky-jetstream"; 7 - version = "0.1.0"; 8 - src = fetchFromGitHub { 9 - owner = "bluesky-social"; 10 - repo = "jetstream"; 11 - rev = "7d7efa58d7f14101a80ccc4f1085953948b7d5de"; 12 - sha256 = "sha256-1e9SL/8gaDPMA4YZed51ffzgpkptbMd0VTbTTDbPTFw="; 13 - }; 14 - subPackages = ["cmd/jetstream"]; 15 - vendorHash = "sha256-/21XJQH6fo9uPzlABUAbdBwt1O90odmppH6gXu2wkiQ="; 16 - doCheck = false; 17 - meta = { 18 - mainProgram = "jetstream"; 19 - }; 20 - }
-20
nix/pkgs/bluesky-relay.nix
··· 1 - { 2 - buildGoModule, 3 - fetchFromGitHub, 4 - }: 5 - buildGoModule { 6 - pname = "bluesky-relay"; 7 - version = "0.1.0"; 8 - src = fetchFromGitHub { 9 - owner = "boltlessengineer"; 10 - repo = "indigo"; 11 - rev = "7fe70a304d795b998f354d2b7b2050b909709c99"; 12 - sha256 = "sha256-+h34x67cqH5t30+8rua53/ucvbn3BanrmH0Og3moHok="; 13 - }; 14 - subPackages = ["cmd/relay"]; 15 - vendorHash = "sha256-UOedwNYnM8Jx6B7Y9tFcZX8IeUBESAFAPTRYk7n0yo8="; 16 - doCheck = false; 17 - meta = { 18 - mainProgram = "relay"; 19 - }; 20 - }
-65
nix/pkgs/did-method-plc.nix
··· 1 - # inspired by https://github.com/NixOS/nixpkgs/blob/333bfb7c258fab089a834555ea1c435674c459b4/pkgs/by-name/ga/gatsby-cli/package.nix 2 - { 3 - lib, 4 - stdenv, 5 - fetchFromGitHub, 6 - fetchYarnDeps, 7 - yarnConfigHook, 8 - yarnBuildHook, 9 - nodejs, 10 - makeBinaryWrapper, 11 - }: 12 - stdenv.mkDerivation (finalAttrs: { 13 - pname = "did-method-plc"; 14 - version = "0.0.1"; 15 - 16 - src = fetchFromGitHub { 17 - owner = "did-method-plc"; 18 - repo = "did-method-plc"; 19 - rev = "158ba5535ac3da4fd4309954bde41deab0b45972"; 20 - sha256 = "sha256-O5smubbrnTDMCvL6iRyMXkddr5G7YHxkQRVMRULHanQ="; 21 - }; 22 - postPatch = '' 23 - # remove dd-trace dependency 24 - sed -i '3d' packages/server/service/index.js 25 - ''; 26 - 27 - yarnOfflineCache = fetchYarnDeps { 28 - yarnLock = finalAttrs.src + "/yarn.lock"; 29 - hash = "sha256-g8GzaAbWSnWwbQjJMV2DL5/ZlWCCX0sRkjjvX3tqU4Y="; 30 - }; 31 - 32 - nativeBuildInputs = [ 33 - yarnConfigHook 34 - yarnBuildHook 35 - nodejs 36 - makeBinaryWrapper 37 - ]; 38 - yarnBuildScript = "lerna"; 39 - yarnBuildFlags = [ 40 - "run" 41 - "build" 42 - "--scope" 43 - "@did-plc/server" 44 - "--include-dependencies" 45 - ]; 46 - 47 - installPhase = '' 48 - runHook preInstall 49 - 50 - mkdir -p $out/lib/node_modules/ 51 - mv packages/ $out/lib/packages/ 52 - mv node_modules/* $out/lib/node_modules/ 53 - 54 - makeWrapper ${lib.getExe nodejs} $out/bin/plc \ 55 - --add-flags $out/lib/packages/server/service/index.js \ 56 - --add-flags --enable-source-maps \ 57 - --set NODE_PATH $out/lib/node_modules 58 - 59 - runHook postInstall 60 - ''; 61 - 62 - meta = { 63 - mainProgram = "plc"; 64 - }; 65 - })
+3 -8
nix/pkgs/lexgen.nix
··· 1 1 { 2 2 buildGoModule, 3 - fetchFromGitHub, 3 + indigo, 4 4 }: 5 5 buildGoModule { 6 6 pname = "lexgen"; 7 7 version = "0.1.0"; 8 - src = fetchFromGitHub { 9 - owner = "boltlessengineer"; 10 - repo = "indigo"; 11 - rev = "push-rossmvqxklvk"; 12 - sha256 = "sha256-t3SgVmfaOG+VaDHh4I03sifc3Als05zaUcDhFywfJao="; 13 - }; 8 + src = indigo; 14 9 subPackages = ["cmd/lexgen"]; 15 - vendorHash = "sha256-UOedwNYnM8Jx6B7Y9tFcZX8IeUBESAFAPTRYk7n0yo8="; 10 + vendorHash = "sha256-VbDrcN4r5b7utRFQzVsKgDsVgdQLSXl7oZ5kdPA/huw="; 16 11 doCheck = false; 17 12 }
-20
nix/pkgs/tap.nix
··· 1 - { 2 - buildGoModule, 3 - fetchFromGitHub, 4 - }: 5 - buildGoModule { 6 - pname = "tap"; 7 - version = "0.1.0"; 8 - src = fetchFromGitHub { 9 - owner = "bluesky-social"; 10 - repo = "indigo"; 11 - rev = "498ecb9693e8ae050f73234c86f340f51ad896a9"; 12 - sha256 = "sha256-KASCdwkg/hlKBt7RTW3e3R5J3hqJkphoarFbaMgtN1k="; 13 - }; 14 - subPackages = ["cmd/tap"]; 15 - vendorHash = "sha256-UOedwNYnM8Jx6B7Y9tFcZX8IeUBESAFAPTRYk7n0yo8="; 16 - doCheck = false; 17 - meta = { 18 - mainProgram = "tap"; 19 - }; 20 - }
+3 -132
nix/vm.nix
··· 19 19 20 20 plcUrl = envVarOr "TANGLED_VM_PLC_URL" "https://plc.directory"; 21 21 jetstream = envVarOr "TANGLED_VM_JETSTREAM_ENDPOINT" "wss://jetstream1.us-west.bsky.network/subscribe"; 22 - relayUrl = envVarOr "TANGLED_VM_RELAY_URL" "https://relay1.us-east.bsky.network"; 23 22 in 24 23 nixpkgs.lib.nixosSystem { 25 24 inherit system; 26 25 modules = [ 27 - self.nixosModules.did-method-plc 28 - self.nixosModules.bluesky-jetstream 29 - self.nixosModules.bluesky-relay 30 26 self.nixosModules.knot 31 27 self.nixosModules.spindle 32 28 ({ ··· 43 39 diskSize = 10 * 1024; 44 40 cores = 2; 45 41 forwardPorts = [ 46 - # caddy 47 - { 48 - from = "host"; 49 - host.port = 80; 50 - guest.port = 80; 51 - } 52 - { 53 - from = "host"; 54 - host.port = 443; 55 - guest.port = 443; 56 - } 57 - { 58 - from = "host"; 59 - proto = "udp"; 60 - host.port = 443; 61 - guest.port = 443; 62 - } 63 42 # ssh 64 43 { 65 44 from = "host"; ··· 77 56 from = "host"; 78 57 host.port = 6555; 79 58 guest.port = 6555; 80 - } 81 - { 82 - from = "host"; 83 - host.port = 6556; 84 - guest.port = 2480; 85 59 } 86 60 ]; 87 61 sharedDirectories = { ··· 89 63 # as SQLite is incompatible with them. So instead we 90 64 # mount the shared directories to a different location 91 65 # and copy the contents around on service start/stop. 92 - caddyData = { 93 - source = "$TANGLED_VM_DATA_DIR/caddy"; 94 - target = config.services.caddy.dataDir; 95 - }; 96 66 knotData = { 97 67 source = "$TANGLED_VM_DATA_DIR/knot"; 98 68 target = "/mnt/knot-data"; ··· 109 79 }; 110 80 # This is fine because any and all ports that are forwarded to host are explicitly marked above, we don't need a separate guest firewall 111 81 networking.firewall.enable = false; 112 - # resolve `*.tngl.boltless.dev` to host 113 - services.dnsmasq.enable = true; 114 - services.dnsmasq.settings.address = "/tngl.boltless.dev/10.0.2.2"; 115 - security.pki.certificates = [ 116 - (builtins.readFile ../contrib/certs/root.crt) 117 - ]; 118 82 time.timeZone = "Europe/London"; 119 - services.timesyncd.enable = lib.mkVMOverride true; 120 83 services.getty.autologinUser = "root"; 121 84 environment.systemPackages = with pkgs; [curl vim git sqlite litecli]; 122 - virtualisation.docker.extraOptions = '' 123 - --dns 172.17.0.1 124 - ''; 125 85 services.tangled.knot = { 126 86 enable = true; 127 87 motd = "Welcome to the development knot!\n"; ··· 131 91 plcUrl = plcUrl; 132 92 jetstreamEndpoint = jetstream; 133 93 listenAddr = "0.0.0.0:6444"; 134 - dev = true; 135 94 }; 136 95 }; 137 96 services.tangled.spindle = { 138 97 enable = true; 139 - atpRelayUrl = relayUrl; 140 98 server = { 141 99 owner = envVar "TANGLED_VM_SPINDLE_OWNER"; 142 100 hostname = envVarOr "TANGLED_VM_SPINDLE_HOST" "localhost:6555"; 143 101 plcUrl = plcUrl; 102 + jetstreamEndpoint = jetstream; 144 103 listenAddr = "0.0.0.0:6555"; 145 - dev = false; 104 + dev = true; 146 105 queueSize = 100; 147 106 maxJobCount = 2; 148 107 secrets = { ··· 150 109 }; 151 110 }; 152 111 }; 153 - services.did-method-plc.enable = true; 154 - services.bluesky-pds = { 155 - enable = true; 156 - # overriding package version to support emails 157 - package = pkgs.bluesky-pds.overrideAttrs (old: rec { 158 - version = "0.4.188"; 159 - src = pkgs.fetchFromGitHub { 160 - owner = "bluesky-social"; 161 - repo = "pds"; 162 - tag = "v${version}"; 163 - hash = "sha256-t8KdyEygXdbj/5Rhj8W40e1o8mXprELpjsKddHExmo0="; 164 - }; 165 - pnpmDeps = pkgs.fetchPnpmDeps { 166 - inherit version src; 167 - pname = old.pname; 168 - sourceRoot = old.sourceRoot; 169 - fetcherVersion = 2; 170 - hash = "sha256-lQie7f8JbWKSpoavnMjHegBzH3GB9teXsn+S2SLJHHU="; 171 - }; 172 - }); 173 - settings = { 174 - LOG_ENABLED = "true"; 175 - 176 - PDS_JWT_SECRET = "8cae8bffcc73d9932819650791e4e89a"; 177 - PDS_ADMIN_PASSWORD = "d6a902588cd93bee1af83f924f60cfd3"; 178 - PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX = "2e92e336a50a618458e1097d94a1db86ec3fd8829d7735020cbae80625c761d7"; 179 - 180 - PDS_EMAIL_SMTP_URL = envVarOr "TANGLED_VM_PDS_EMAIL_SMTP_URL" null; 181 - PDS_EMAIL_FROM_ADDRESS = envVarOr "TANGLED_VM_PDS_EMAIL_FROM_ADDRESS" null; 182 - 183 - PDS_DID_PLC_URL = "http://localhost:8080"; 184 - PDS_CRAWLERS = "https://relay.tngl.boltless.dev"; 185 - PDS_HOSTNAME = "pds.tngl.boltless.dev"; 186 - PDS_PORT = 3000; 187 - }; 188 - }; 189 - services.bluesky-relay = { 190 - enable = true; 191 - }; 192 - services.bluesky-jetstream = { 193 - enable = true; 194 - livenessTtl = 300; 195 - websocketUrl = "ws://localhost:3000/xrpc/com.atproto.sync.subscribeRepos"; 196 - }; 197 - services.caddy = { 198 - enable = true; 199 - configFile = pkgs.writeText "Caddyfile" '' 200 - { 201 - debug 202 - cert_lifetime 3601d 203 - pki { 204 - ca local { 205 - intermediate_lifetime 3599d 206 - } 207 - } 208 - } 209 - 210 - plc.tngl.boltless.dev { 211 - tls internal 212 - reverse_proxy http://localhost:8080 213 - } 214 - 215 - *.pds.tngl.boltless.dev, pds.tngl.boltless.dev { 216 - tls internal 217 - reverse_proxy http://localhost:3000 218 - } 219 - 220 - jetstream.tngl.boltless.dev { 221 - tls internal 222 - reverse_proxy http://localhost:6008 223 - } 224 - 225 - relay.tngl.boltless.dev { 226 - tls internal 227 - reverse_proxy http://localhost:2470 228 - } 229 - 230 - knot.tngl.boltless.dev { 231 - tls internal 232 - reverse_proxy http://localhost:6444 233 - } 234 - 235 - spindle.tngl.boltless.dev { 236 - tls internal 237 - reverse_proxy http://localhost:6555 238 - } 239 - ''; 240 - }; 241 112 users = { 242 113 # So we don't have to deal with permission clashing between 243 114 # blank disk VMs and existing state ··· 263 134 }; 264 135 in { 265 136 knot = mkDataSyncScripts "/mnt/knot-data" config.services.tangled.knot.stateDir; 266 - spindle = mkDataSyncScripts "/mnt/spindle-data" config.services.tangled.spindle.server.stateDir; 137 + spindle = mkDataSyncScripts "/mnt/spindle-data" (builtins.dirOf config.services.tangled.spindle.server.dbPath); 267 138 }; 268 139 }) 269 140 ];
-10
orm/orm.go
··· 20 20 } 21 21 defer tx.Rollback() 22 22 23 - _, err = tx.Exec(` 24 - create table if not exists migrations ( 25 - id integer primary key autoincrement, 26 - name text unique 27 - ); 28 - `) 29 - if err != nil { 30 - return fmt.Errorf("creating migrations table: %w", err) 31 - } 32 - 33 23 var exists bool 34 24 err = tx.QueryRow("select exists (select 1 from migrations where name = ?)", name).Scan(&exists) 35 25 if err != nil {
-52
rbac2/bytesadapter/adapter.go
··· 1 - package bytesadapter 2 - 3 - import ( 4 - "bufio" 5 - "bytes" 6 - "errors" 7 - "strings" 8 - 9 - "github.com/casbin/casbin/v2/model" 10 - "github.com/casbin/casbin/v2/persist" 11 - ) 12 - 13 - var ( 14 - errNotImplemented = errors.New("not implemented") 15 - ) 16 - 17 - type Adapter struct { 18 - b []byte 19 - } 20 - 21 - var _ persist.Adapter = &Adapter{} 22 - 23 - func NewAdapter(b []byte) *Adapter { 24 - return &Adapter{b} 25 - } 26 - 27 - func (a *Adapter) LoadPolicy(model model.Model) error { 28 - scanner := bufio.NewScanner(bytes.NewReader(a.b)) 29 - for scanner.Scan() { 30 - line := strings.TrimSpace(scanner.Text()) 31 - if err := persist.LoadPolicyLine(line, model); err != nil { 32 - return err 33 - } 34 - } 35 - return scanner.Err() 36 - } 37 - 38 - func (a *Adapter) AddPolicy(sec string, ptype string, rule []string) error { 39 - return errNotImplemented 40 - } 41 - 42 - func (a *Adapter) RemoveFilteredPolicy(sec string, ptype string, fieldIndex int, fieldValues ...string) error { 43 - return errNotImplemented 44 - } 45 - 46 - func (a *Adapter) RemovePolicy(sec string, ptype string, rule []string) error { 47 - return errNotImplemented 48 - } 49 - 50 - func (a *Adapter) SavePolicy(model model.Model) error { 51 - return errNotImplemented 52 - }
-139
rbac2/rbac2.go
··· 1 - package rbac2 2 - 3 - import ( 4 - "database/sql" 5 - _ "embed" 6 - "fmt" 7 - 8 - adapter "github.com/Blank-Xu/sql-adapter" 9 - "github.com/bluesky-social/indigo/atproto/syntax" 10 - "github.com/casbin/casbin/v2" 11 - "github.com/casbin/casbin/v2/model" 12 - "github.com/casbin/casbin/v2/util" 13 - "tangled.org/core/rbac2/bytesadapter" 14 - ) 15 - 16 - const ( 17 - Model = ` 18 - [request_definition] 19 - r = sub, dom, obj, act 20 - 21 - [policy_definition] 22 - p = sub, dom, obj, act 23 - 24 - [role_definition] 25 - g = _, _, _ 26 - 27 - [policy_effect] 28 - e = some(where (p.eft == allow)) 29 - 30 - [matchers] 31 - m = g(r.sub, p.sub, r.dom) && keyMatch4(r.dom, p.dom) && r.obj == p.obj && r.act == p.act 32 - ` 33 - ) 34 - 35 - type Enforcer struct { 36 - e *casbin.Enforcer 37 - } 38 - 39 - //go:embed tangled_policy.csv 40 - var tangledPolicy []byte 41 - 42 - func NewEnforcer(path string) (*Enforcer, error) { 43 - db, err := sql.Open("sqlite3", path+"?_foreign_keys=1") 44 - if err != nil { 45 - return nil, err 46 - } 47 - return NewEnforcerWithDB(db) 48 - } 49 - 50 - func NewEnforcerWithDB(db *sql.DB) (*Enforcer, error) { 51 - m, err := model.NewModelFromString(Model) 52 - if err != nil { 53 - return nil, err 54 - } 55 - 56 - a, err := adapter.NewAdapter(db, "sqlite3", "acl") 57 - if err != nil { 58 - return nil, err 59 - } 60 - 61 - // // PATCH: create unique index to make `AddPoliciesEx` work 62 - // _, err = db.Exec(fmt.Sprintf( 63 - // `create unique index if not exists uq_%[1]s on %[1]s (p_type,v0,v1,v2,v3,v4,v5);`, 64 - // tableName, 65 - // )) 66 - // if err != nil { 67 - // return nil, err 68 - // } 69 - 70 - e, _ := casbin.NewEnforcer() // NewEnforcer() without param won't return error 71 - // e.EnableLog(true) 72 - 73 - // NOTE: casbin clears the model on init, so we should intialize with temporary adapter first 74 - // and then override the adapter to sql-adapter. 75 - // `e.SetModel(m)` after init doesn't work for some reason 76 - if err := e.InitWithModelAndAdapter(m, bytesadapter.NewAdapter(tangledPolicy)); err != nil { 77 - return nil, err 78 - } 79 - 80 - // load dynamic policy from db 81 - e.EnableAutoSave(false) 82 - if err := a.LoadPolicy(e.GetModel()); err != nil { 83 - return nil, err 84 - } 85 - e.AddNamedDomainMatchingFunc("g", "keyMatch4", util.KeyMatch4) 86 - e.BuildRoleLinks() 87 - e.SetAdapter(a) 88 - e.EnableAutoSave(true) 89 - 90 - return &Enforcer{e}, nil 91 - } 92 - 93 - // CaptureModel returns copy of current model. Used for testing 94 - func (e *Enforcer) CaptureModel() model.Model { 95 - return e.e.GetModel().Copy() 96 - } 97 - 98 - func (e *Enforcer) hasImplicitRoleForUser(name string, role string, domain ...string) (bool, error) { 99 - roles, err := e.e.GetImplicitRolesForUser(name, domain...) 100 - if err != nil { 101 - return false, err 102 - } 103 - for _, r := range roles { 104 - if r == role { 105 - return true, nil 106 - } 107 - } 108 - return false, nil 109 - } 110 - 111 - // setRoleForUser sets single user role for specified domain. 112 - // All existing users with that role will be removed. 113 - func (e *Enforcer) setRoleForUser(name string, role string, domain ...string) error { 114 - currentUsers, err := e.e.GetUsersForRole(role, domain...) 115 - if err != nil { 116 - return err 117 - } 118 - 119 - for _, oldUser := range currentUsers { 120 - _, err = e.e.DeleteRoleForUser(oldUser, role, domain...) 121 - if err != nil { 122 - return err 123 - } 124 - } 125 - 126 - _, err = e.e.AddRoleForUser(name, role, domain...) 127 - return err 128 - } 129 - 130 - // validateAtUri enforeces AT-URI to have valid did as authority and match collection NSID. 131 - func validateAtUri(uri syntax.ATURI, expected string) error { 132 - if !uri.Authority().IsDID() { 133 - return fmt.Errorf("expected at-uri with did") 134 - } 135 - if expected != "" && uri.Collection().String() != expected { 136 - return fmt.Errorf("incorrect repo at-uri collection nsid '%s' (expected '%s')", uri.Collection(), expected) 137 - } 138 - return nil 139 - }
-150
rbac2/rbac2_test.go
··· 1 - package rbac2_test 2 - 3 - import ( 4 - "database/sql" 5 - "testing" 6 - 7 - "github.com/bluesky-social/indigo/atproto/syntax" 8 - _ "github.com/mattn/go-sqlite3" 9 - "github.com/stretchr/testify/assert" 10 - "tangled.org/core/rbac2" 11 - ) 12 - 13 - func setup(t *testing.T) *rbac2.Enforcer { 14 - enforcer, err := rbac2.NewEnforcer(":memory:") 15 - assert.NoError(t, err) 16 - 17 - return enforcer 18 - } 19 - 20 - func TestNewEnforcer(t *testing.T) { 21 - db, err := sql.Open("sqlite3", ":memory:?_foreign_keys=1") 22 - assert.NoError(t, err) 23 - 24 - enforcer1, err := rbac2.NewEnforcerWithDB(db) 25 - assert.NoError(t, err) 26 - enforcer1.AddRepo(syntax.ATURI("at://did:plc:foo/sh.tangled.repo/reporkey")) 27 - model1 := enforcer1.CaptureModel() 28 - 29 - enforcer2, err := rbac2.NewEnforcerWithDB(db) 30 - assert.NoError(t, err) 31 - model2 := enforcer2.CaptureModel() 32 - 33 - // model1.GetLogger().EnableLog(true) 34 - // model1.PrintModel() 35 - // model1.PrintPolicy() 36 - // model1.GetLogger().EnableLog(false) 37 - 38 - model2.GetLogger().EnableLog(true) 39 - model2.PrintModel() 40 - model2.PrintPolicy() 41 - model2.GetLogger().EnableLog(false) 42 - 43 - assert.Equal(t, model1, model2) 44 - } 45 - 46 - func TestRepoOwnerPermissions(t *testing.T) { 47 - var ( 48 - e = setup(t) 49 - ok bool 50 - err error 51 - fooRepo = syntax.ATURI("at://did:plc:foo/sh.tangled.repo/reporkey") 52 - fooUser = syntax.DID("did:plc:foo") 53 - ) 54 - 55 - assert.NoError(t, e.AddRepo(fooRepo)) 56 - 57 - ok, err = e.IsRepoOwner(fooUser, fooRepo) 58 - assert.NoError(t, err) 59 - assert.True(t, ok, "repo author should be repo owner") 60 - 61 - ok, err = e.IsRepoWriteAllowed(fooUser, fooRepo) 62 - assert.NoError(t, err) 63 - assert.True(t, ok, "repo owner should be able to modify the repo itself") 64 - 65 - ok, err = e.IsRepoCollaborator(fooUser, fooRepo) 66 - assert.NoError(t, err) 67 - assert.True(t, ok, "repo owner should inherit role role:collaborator") 68 - 69 - ok, err = e.IsRepoSettingsWriteAllowed(fooUser, fooRepo) 70 - assert.NoError(t, err) 71 - assert.True(t, ok, "repo owner should inherit collaborator permissions") 72 - } 73 - 74 - func TestRepoCollaboratorPermissions(t *testing.T) { 75 - var ( 76 - e = setup(t) 77 - ok bool 78 - err error 79 - fooRepo = syntax.ATURI("at://did:plc:foo/sh.tangled.repo/reporkey") 80 - barUser = syntax.DID("did:plc:bar") 81 - ) 82 - 83 - assert.NoError(t, e.AddRepo(fooRepo)) 84 - assert.NoError(t, e.AddRepoCollaborator(barUser, fooRepo)) 85 - 86 - ok, err = e.IsRepoCollaborator(barUser, fooRepo) 87 - assert.NoError(t, err) 88 - assert.True(t, ok, "should set repo collaborator") 89 - 90 - ok, err = e.IsRepoSettingsWriteAllowed(barUser, fooRepo) 91 - assert.NoError(t, err) 92 - assert.True(t, ok, "repo collaborator should be able to edit repo settings") 93 - 94 - ok, err = e.IsRepoWriteAllowed(barUser, fooRepo) 95 - assert.NoError(t, err) 96 - assert.False(t, ok, "repo collaborator shouldn't be able to modify the repo itself") 97 - } 98 - 99 - func TestGetByRole(t *testing.T) { 100 - var ( 101 - e = setup(t) 102 - err error 103 - fooRepo = syntax.ATURI("at://did:plc:foo/sh.tangled.repo/reporkey") 104 - owner = syntax.DID("did:plc:foo") 105 - collaborator1 = syntax.DID("did:plc:bar") 106 - collaborator2 = syntax.DID("did:plc:baz") 107 - ) 108 - 109 - assert.NoError(t, e.AddRepo(fooRepo)) 110 - assert.NoError(t, e.AddRepoCollaborator(collaborator1, fooRepo)) 111 - assert.NoError(t, e.AddRepoCollaborator(collaborator2, fooRepo)) 112 - 113 - collaborators, err := e.GetRepoCollaborators(fooRepo) 114 - assert.NoError(t, err) 115 - assert.ElementsMatch(t, []syntax.DID{ 116 - owner, 117 - collaborator1, 118 - collaborator2, 119 - }, collaborators) 120 - } 121 - 122 - func TestSpindleOwnerPermissions(t *testing.T) { 123 - var ( 124 - e = setup(t) 125 - ok bool 126 - err error 127 - spindle = syntax.DID("did:web:spindle.example.com") 128 - owner = syntax.DID("did:plc:foo") 129 - member = syntax.DID("did:plc:bar") 130 - ) 131 - 132 - assert.NoError(t, e.SetSpindleOwner(owner, spindle)) 133 - assert.NoError(t, e.AddSpindleMember(member, spindle)) 134 - 135 - ok, err = e.IsSpindleMember(owner, spindle) 136 - assert.NoError(t, err) 137 - assert.True(t, ok, "spindle owner is spindle member") 138 - 139 - ok, err = e.IsSpindleMember(member, spindle) 140 - assert.NoError(t, err) 141 - assert.True(t, ok, "spindle member is spindle member") 142 - 143 - ok, err = e.IsSpindleMemberInviteAllowed(owner, spindle) 144 - assert.NoError(t, err) 145 - assert.True(t, ok, "spindle owner can invite members") 146 - 147 - ok, err = e.IsSpindleMemberInviteAllowed(member, spindle) 148 - assert.NoError(t, err) 149 - assert.False(t, ok, "spindle member cannot invite members") 150 - }
-91
rbac2/repo.go
··· 1 - package rbac2 2 - 3 - import ( 4 - "slices" 5 - "strings" 6 - 7 - "github.com/bluesky-social/indigo/atproto/syntax" 8 - "tangled.org/core/api/tangled" 9 - ) 10 - 11 - // AddRepo adds new repo with its owner to rbac enforcer 12 - func (e *Enforcer) AddRepo(repo syntax.ATURI) error { 13 - if err := validateAtUri(repo, tangled.RepoNSID); err != nil { 14 - return err 15 - } 16 - user := repo.Authority() 17 - 18 - return e.setRoleForUser(user.String(), "repo:owner", repo.String()) 19 - } 20 - 21 - // DeleteRepo deletes all policies related to the repo 22 - func (e *Enforcer) DeleteRepo(repo syntax.ATURI) error { 23 - if err := validateAtUri(repo, tangled.RepoNSID); err != nil { 24 - return err 25 - } 26 - 27 - _, err := e.e.DeleteDomains(repo.String()) 28 - return err 29 - } 30 - 31 - // AddRepoCollaborator adds new collaborator to the repo 32 - func (e *Enforcer) AddRepoCollaborator(user syntax.DID, repo syntax.ATURI) error { 33 - if err := validateAtUri(repo, tangled.RepoNSID); err != nil { 34 - return err 35 - } 36 - 37 - _, err := e.e.AddRoleForUser(user.String(), "repo:collaborator", repo.String()) 38 - return err 39 - } 40 - 41 - // RemoveRepoCollaborator removes the collaborator from the repo. 42 - // This won't remove inherited roles like repository owner. 43 - func (e *Enforcer) RemoveRepoCollaborator(user syntax.DID, repo syntax.ATURI) error { 44 - if err := validateAtUri(repo, tangled.RepoNSID); err != nil { 45 - return err 46 - } 47 - 48 - _, err := e.e.DeleteRoleForUser(user.String(), "repo:collaborator", repo.String()) 49 - return err 50 - } 51 - 52 - func (e *Enforcer) GetRepoCollaborators(repo syntax.ATURI) ([]syntax.DID, error) { 53 - var collaborators []syntax.DID 54 - members, err := e.e.GetImplicitUsersForRole("repo:collaborator", repo.String()) 55 - if err != nil { 56 - return nil, err 57 - } 58 - for _, m := range members { 59 - if !strings.HasPrefix(m, "did:") { // skip non-user subjects like 'repo:owner' 60 - continue 61 - } 62 - collaborators = append(collaborators, syntax.DID(m)) 63 - } 64 - 65 - slices.Sort(collaborators) 66 - return slices.Compact(collaborators), nil 67 - } 68 - 69 - func (e *Enforcer) IsRepoOwner(user syntax.DID, repo syntax.ATURI) (bool, error) { 70 - return e.e.HasRoleForUser(user.String(), "repo:owner", repo.String()) 71 - } 72 - 73 - func (e *Enforcer) IsRepoCollaborator(user syntax.DID, repo syntax.ATURI) (bool, error) { 74 - return e.hasImplicitRoleForUser(user.String(), "repo:collaborator", repo.String()) 75 - } 76 - 77 - func (e *Enforcer) IsRepoWriteAllowed(user syntax.DID, repo syntax.ATURI) (bool, error) { 78 - return e.e.Enforce(user.String(), repo.String(), "/", "write") 79 - } 80 - 81 - func (e *Enforcer) IsRepoSettingsWriteAllowed(user syntax.DID, repo syntax.ATURI) (bool, error) { 82 - return e.e.Enforce(user.String(), repo.String(), "/settings", "write") 83 - } 84 - 85 - func (e *Enforcer) IsRepoCollaboratorInviteAllowed(user syntax.DID, repo syntax.ATURI) (bool, error) { 86 - return e.e.Enforce(user.String(), repo.String(), "/collaborator", "write") 87 - } 88 - 89 - func (e *Enforcer) IsRepoGitPushAllowed(user syntax.DID, repo syntax.ATURI) (bool, error) { 90 - return e.e.Enforce(user.String(), repo.String(), "/git", "write") 91 - }
-29
rbac2/spindle.go
··· 1 - package rbac2 2 - 3 - import "github.com/bluesky-social/indigo/atproto/syntax" 4 - 5 - func (e *Enforcer) SetSpindleOwner(user syntax.DID, spindle syntax.DID) error { 6 - return e.setRoleForUser(user.String(), "server:owner", intoSpindle(spindle)) 7 - } 8 - 9 - func (e *Enforcer) IsSpindleMember(user syntax.DID, spindle syntax.DID) (bool, error) { 10 - return e.hasImplicitRoleForUser(user.String(), "server:member", intoSpindle(spindle)) 11 - } 12 - 13 - func (e *Enforcer) AddSpindleMember(user syntax.DID, spindle syntax.DID) error { 14 - _, err := e.e.AddRoleForUser(user.String(), "server:member", intoSpindle(spindle)) 15 - return err 16 - } 17 - 18 - func (e *Enforcer) RemoveSpindleMember(user syntax.DID, spindle syntax.DID) error { 19 - _, err := e.e.DeleteRoleForUser(user.String(), "server:member", intoSpindle(spindle)) 20 - return err 21 - } 22 - 23 - func (e *Enforcer) IsSpindleMemberInviteAllowed(user syntax.DID, spindle syntax.DID) (bool, error) { 24 - return e.e.Enforce(user.String(), intoSpindle(spindle), "/member", "write") 25 - } 26 - 27 - func intoSpindle(did syntax.DID) string { 28 - return "/spindle/" + did.String() 29 - }
-19
rbac2/tangled_policy.csv
··· 1 - #, policies 2 - #, sub, dom, obj, act 3 - p, repo:owner, at://{did}/sh.tangled.repo/{rkey}, /, write 4 - p, repo:owner, at://{did}/sh.tangled.repo/{rkey}, /collaborator, write 5 - p, repo:collaborator, at://{did}/sh.tangled.repo/{rkey}, /settings, write 6 - p, repo:collaborator, at://{did}/sh.tangled.repo/{rkey}, /git, write 7 - 8 - p, server:owner, /knot/{did}, /member, write 9 - p, server:member, /knot/{did}, /git, write 10 - 11 - p, server:owner, /spindle/{did}, /member, write 12 - 13 - 14 - #, group policies 15 - #, sub, role, dom 16 - g, repo:owner, repo:collaborator, at://{did}/sh.tangled.repo/{rkey} 17 - 18 - g, server:owner, server:member, /knot/{did} 19 - g, server:owner, server:member, /spindle/{did}
-459
spindle/adapters/nixery/adapter.go
··· 1 - package nixery 2 - 3 - import ( 4 - "context" 5 - "fmt" 6 - "io" 7 - "log/slog" 8 - "os" 9 - "path" 10 - "path/filepath" 11 - "regexp" 12 - "runtime" 13 - "sync" 14 - "time" 15 - 16 - "github.com/bluesky-social/indigo/atproto/syntax" 17 - "github.com/docker/docker/api/types/container" 18 - "github.com/docker/docker/api/types/filters" 19 - "github.com/docker/docker/api/types/image" 20 - "github.com/docker/docker/api/types/mount" 21 - "github.com/docker/docker/api/types/network" 22 - "github.com/docker/docker/client" 23 - "github.com/stretchr/testify/assert/yaml" 24 - "tangled.org/core/api/tangled" 25 - "tangled.org/core/sets" 26 - "tangled.org/core/spindle/config" 27 - "tangled.org/core/spindle/models" 28 - "tangled.org/core/spindle/repomanager" 29 - "tangled.org/core/tid" 30 - "tangled.org/core/workflow" 31 - ) 32 - 33 - const AdapterID = "nixery" 34 - 35 - type Adapter struct { 36 - l *slog.Logger 37 - repoManager *repomanager.RepoManager 38 - docker client.APIClient 39 - Timeout time.Duration 40 - spindleDid syntax.DID 41 - cfg config.NixeryPipelines 42 - 43 - mu sync.RWMutex 44 - activeRuns map[syntax.ATURI]models.WorkflowRun 45 - subscribers sets.Set[chan<- models.WorkflowRun] 46 - } 47 - 48 - var _ models.Adapter = (*Adapter)(nil) 49 - 50 - func New(l *slog.Logger, cfg config.Config, repoManager *repomanager.RepoManager) (*Adapter, error) { 51 - dc, err := client.NewClientWithOpts(client.FromEnv, client.WithAPIVersionNegotiation()) 52 - if err != nil { 53 - return nil, fmt.Errorf("creating docker client: %w", err) 54 - } 55 - return &Adapter{ 56 - l: l, 57 - repoManager: repoManager, 58 - docker: dc, 59 - Timeout: time.Minute * 5, // TODO: set timeout from config 60 - spindleDid: cfg.Server.Did(), 61 - cfg: cfg.NixeryPipelines, 62 - 63 - activeRuns: make(map[syntax.ATURI]models.WorkflowRun), 64 - subscribers: sets.New[chan<- models.WorkflowRun](), 65 - }, nil 66 - } 67 - 68 - func (a *Adapter) Init() error { 69 - // no-op 70 - return nil 71 - } 72 - 73 - func (a *Adapter) Shutdown(ctx context.Context) error { 74 - // TODO: cleanup spawned containers just in case 75 - panic("unimplemented") 76 - } 77 - 78 - func (a *Adapter) SetupRepo(ctx context.Context, repo syntax.ATURI) error { 79 - if err := a.repoManager.RegisterRepo(ctx, repo, []string{"/.tangled/workflows"}); err != nil { 80 - return fmt.Errorf("syncing repo: %w", err) 81 - } 82 - return nil 83 - } 84 - 85 - func (a *Adapter) ListWorkflowDefs(ctx context.Context, repo syntax.ATURI, rev string) ([]models.WorkflowDef, error) { 86 - defs, err := a.listWorkflowDefs(ctx, repo, rev) 87 - if err != nil { 88 - return nil, err 89 - } 90 - retDefs := make([]models.WorkflowDef, len(defs)) 91 - for i, def := range defs { 92 - retDefs[i] = def.AsInfo() 93 - } 94 - return retDefs, nil 95 - } 96 - 97 - func (a *Adapter) listWorkflowDefs(ctx context.Context, repo syntax.ATURI, rev string) ([]WorkflowDef, error) { 98 - workflowDir, err := a.repoManager.FileTree(ctx, repo, rev, workflow.WorkflowDir) 99 - if err != nil { 100 - return nil, fmt.Errorf("loading file tree: %w", err) 101 - } 102 - 103 - if len(workflowDir) == 0 { 104 - return nil, nil 105 - } 106 - 107 - // TODO(boltless): repoManager.FileTree() should be smart enough so we don't need to do this: 108 - gr, err := a.repoManager.Open(repo, rev) 109 - if err != nil { 110 - return nil, fmt.Errorf("opening git repo: %w", err) 111 - } 112 - 113 - var defs []WorkflowDef 114 - for _, e := range workflowDir { 115 - if !e.IsFile() { 116 - continue 117 - } 118 - 119 - fpath := filepath.Join(workflow.WorkflowDir, e.Name) 120 - contents, err := gr.RawContent(fpath) 121 - if err != nil { 122 - return nil, fmt.Errorf("reading raw content of '%s': %w", fpath, err) 123 - } 124 - 125 - var wf WorkflowDef 126 - if err := yaml.Unmarshal(contents, &wf); err != nil { 127 - return nil, fmt.Errorf("parsing yaml: %w", err) 128 - } 129 - wf.Name = e.Name 130 - 131 - defs = append(defs, wf) 132 - } 133 - 134 - return defs, nil 135 - } 136 - 137 - func (a *Adapter) EvaluateEvent(ctx context.Context, event models.Event) ([]models.WorkflowRun, error) { 138 - defs, err := a.listWorkflowDefs(ctx, event.SourceRepo, event.SourceSha) 139 - if err != nil { 140 - return nil, fmt.Errorf("fetching workflow definitions: %w", err) 141 - } 142 - 143 - // filter out triggered workflows 144 - var triggered []nixeryWorkflow 145 - for _, def := range defs { 146 - if def.ShouldRunOn(event) { 147 - triggered = append(triggered, nixeryWorkflow{ 148 - event: event, 149 - def: def, 150 - }) 151 - } 152 - } 153 - 154 - // TODO: append more workflows from "on_workflow" event 155 - 156 - // schedule workflows and return immediately 157 - runs := make([]models.WorkflowRun, len(triggered)) 158 - for i, workflow := range triggered { 159 - runs[i] = a.scheduleWorkflow(ctx, workflow) 160 - } 161 - return runs, nil 162 - } 163 - 164 - // NOTE: nixery adapter is volatile. GetActiveWorkflowRun will return error 165 - // when the workflow is terminated. It lets spindle to mark lost workflow.run 166 - // as "Failed". 167 - func (a *Adapter) GetActiveWorkflowRun(ctx context.Context, runId syntax.ATURI) (models.WorkflowRun, error) { 168 - a.mu.RLock() 169 - run, exists := a.activeRuns[runId] 170 - a.mu.RUnlock() 171 - if !exists { 172 - return run, fmt.Errorf("unknown or terminated workflow") 173 - } 174 - return run, nil 175 - } 176 - 177 - func (a *Adapter) ListActiveWorkflowRuns(ctx context.Context) ([]models.WorkflowRun, error) { 178 - a.mu.RLock() 179 - defer a.mu.RUnlock() 180 - 181 - runs := make([]models.WorkflowRun, 0, len(a.activeRuns)) 182 - for _, run := range a.activeRuns { 183 - runs = append(runs, run) 184 - } 185 - return runs, nil 186 - } 187 - 188 - func (a *Adapter) SubscribeWorkflowRun(ctx context.Context) <-chan models.WorkflowRun { 189 - ch := make(chan models.WorkflowRun, 1) 190 - 191 - a.mu.Lock() 192 - a.subscribers.Insert(ch) 193 - a.mu.Unlock() 194 - 195 - // cleanup spindle stops listening 196 - go func() { 197 - <-ctx.Done() 198 - a.mu.Lock() 199 - a.subscribers.Remove(ch) 200 - a.mu.Unlock() 201 - close(ch) 202 - }() 203 - 204 - return ch 205 - } 206 - 207 - func (a *Adapter) emit(run models.WorkflowRun) { 208 - a.mu.Lock() 209 - if run.Status.IsActive() { 210 - a.activeRuns[run.AtUri()] = run 211 - } else { 212 - delete(a.activeRuns, run.AtUri()) 213 - } 214 - 215 - // Snapshot subscribers to broadcast outside the lock 216 - subs := make([]chan<- models.WorkflowRun, 0, a.subscribers.Len()) 217 - for ch := range a.subscribers.All() { 218 - subs = append(subs, ch) 219 - } 220 - a.mu.Unlock() 221 - 222 - for _, ch := range subs { 223 - select { 224 - case ch <- run: 225 - default: 226 - // avoid blocking if channel is full 227 - // spindle will catch the state by regular GetWorkflowRun poll 228 - } 229 - } 230 - } 231 - 232 - func (a *Adapter) StreamWorkflowRunLogs(ctx context.Context, runId syntax.ATURI, handle func(line models.LogLine) error) error { 233 - panic("unimplemented") 234 - } 235 - 236 - func (a *Adapter) CancelWorkflowRun(ctx context.Context, runId syntax.ATURI) error { 237 - // remove network 238 - if err := a.docker.NetworkRemove(ctx, networkName(runId)); err != nil { 239 - return fmt.Errorf("removing network: %w", err) 240 - } 241 - 242 - // stop & remove docker containers with label 243 - containers, err := a.docker.ContainerList(ctx, container.ListOptions{ 244 - Filters: labelFilter(tangled.CiWorkflowRunNSID, runId.String()), 245 - }) 246 - if err != nil { 247 - return fmt.Errorf("finding container with label: %w", err) 248 - } 249 - for _, c := range containers { 250 - if err := a.docker.ContainerStop(ctx, c.ID, container.StopOptions{}); err != nil { 251 - return fmt.Errorf("stopping container: %w", err) 252 - } 253 - 254 - if err := a.docker.ContainerRemove(ctx, c.ID, container.RemoveOptions{ 255 - RemoveVolumes: true, 256 - RemoveLinks: false, 257 - Force: false, 258 - }); err != nil { 259 - return fmt.Errorf("removing container: %w", err) 260 - } 261 - } 262 - return nil 263 - } 264 - 265 - func labelFilter(labelKey, labelVal string) filters.Args { 266 - filterArgs := filters.NewArgs() 267 - filterArgs.Add("label", fmt.Sprintf("%s=%s", labelKey, labelVal)) 268 - return filterArgs 269 - } 270 - 271 - const ( 272 - workspaceDir = "/tangled/workspace" 273 - homeDir = "/tangled/home" 274 - ) 275 - 276 - // scheduleWorkflow schedules a workflow run in job queue and return queued run 277 - func (a *Adapter) scheduleWorkflow(ctx context.Context, workflow nixeryWorkflow) models.WorkflowRun { 278 - l := a.l 279 - 280 - run := models.WorkflowRun{ 281 - Did: a.spindleDid, 282 - Rkey: syntax.RecordKey(tid.TID()), 283 - AdapterId: AdapterID, 284 - Name: workflow.def.Name, 285 - Status: models.WorkflowStatusPending, 286 - } 287 - 288 - a.mu.Lock() 289 - a.activeRuns[run.AtUri()] = run 290 - a.mu.Unlock() 291 - 292 - go func() { 293 - defer a.CancelWorkflowRun(ctx, run.AtUri()) 294 - 295 - containerId, err := a.initNixeryContainer(ctx, workflow.def, run.AtUri()) 296 - if err != nil { 297 - l.Error("failed to intialize container", "err", err) 298 - // TODO: put user-facing logs in workflow log 299 - a.emit(run.WithStatus(models.WorkflowStatusFailed)) 300 - return 301 - } 302 - 303 - ctx, cancel := context.WithTimeout(ctx, a.Timeout) 304 - defer cancel() 305 - 306 - for stepIdx, step := range workflow.def.Steps { 307 - if err := a.runStep(ctx, containerId, stepIdx, step); err != nil { 308 - l.Error("failed to run step", "stepIdx", stepIdx, "err", err) 309 - return 310 - } 311 - } 312 - l.Info("all steps completed successfully") 313 - }() 314 - 315 - l.Info("workflow scheduled to background", "workflow.run", run.AtUri()) 316 - 317 - return run 318 - } 319 - 320 - func (a *Adapter) runStep(ctx context.Context, containerId string, stepIdx int, step Step) error { 321 - // TODO: implement this 322 - 323 - // TODO: configure envs 324 - var envs []string 325 - 326 - select { 327 - case <-ctx.Done(): 328 - return ctx.Err() 329 - default: 330 - } 331 - 332 - mkExecResp, err := a.docker.ContainerExecCreate(ctx, containerId, container.ExecOptions{ 333 - Cmd: []string{"bash", "-c", step.Command}, 334 - AttachStdout: true, 335 - AttachStderr: true, 336 - Env: envs, 337 - }) 338 - if err != nil { 339 - return fmt.Errorf("creating exec: %w", err) 340 - } 341 - 342 - panic("unimplemented") 343 - } 344 - 345 - // initNixeryContainer pulls the image from nixery and start the container. 346 - func (a *Adapter) initNixeryContainer(ctx context.Context, def WorkflowDef, runAt syntax.ATURI) (string, error) { 347 - imageName := workflowImageName(def.Dependencies, a.cfg.Nixery) 348 - 349 - _, err := a.docker.NetworkCreate(ctx, networkName(runAt), network.CreateOptions{ 350 - Driver: "bridge", 351 - }) 352 - if err != nil { 353 - return "", fmt.Errorf("creating network: %w", err) 354 - } 355 - 356 - reader, err := a.docker.ImagePull(ctx, imageName, image.PullOptions{}) 357 - if err != nil { 358 - return "", fmt.Errorf("pulling image: %w", err) 359 - } 360 - defer reader.Close() 361 - io.Copy(os.Stdout, reader) 362 - 363 - resp, err := a.docker.ContainerCreate(ctx, &container.Config{ 364 - Image: imageName, 365 - Cmd: []string{"cat"}, 366 - OpenStdin: true, // so cat stays alive :3 367 - Tty: false, 368 - Hostname: "spindle", 369 - WorkingDir: workspaceDir, 370 - Labels: map[string]string{ 371 - tangled.CiWorkflowRunNSID: runAt.String(), 372 - }, 373 - // TODO(winter): investigate whether environment variables passed here 374 - // get propagated to ContainerExec processes 375 - }, &container.HostConfig{ 376 - Mounts: []mount.Mount{ 377 - { 378 - Type: mount.TypeTmpfs, 379 - Target: "/tmp", 380 - ReadOnly: false, 381 - TmpfsOptions: &mount.TmpfsOptions{ 382 - Mode: 0o1777, // world-writeable sticky bit 383 - Options: [][]string{ 384 - {"exec"}, 385 - }, 386 - }, 387 - }, 388 - }, 389 - ReadonlyRootfs: false, 390 - CapDrop: []string{"ALL"}, 391 - CapAdd: []string{"CAP_DAC_OVERRIDE", "CAP_CHOWN", "CAP_FOWNER", "CAP_SETUID", "CAP_SETGID"}, 392 - SecurityOpt: []string{"no-new-privileges"}, 393 - ExtraHosts: []string{"host.docker.internal:host-gateway"}, 394 - }, nil, nil, "") 395 - if err != nil { 396 - return "", fmt.Errorf("creating container: %w", err) 397 - } 398 - 399 - if err := a.docker.ContainerStart(ctx, resp.ID, container.StartOptions{}); err != nil { 400 - return "", fmt.Errorf("starting container: %w", err) 401 - } 402 - 403 - mkExecResp, err := a.docker.ContainerExecCreate(ctx, resp.ID, container.ExecOptions{ 404 - Cmd: []string{"mkdir", "-p", workspaceDir, homeDir}, 405 - AttachStdout: true, // NOTE(winter): pretty sure this will make it so that when stdout read is done below, mkdir is done. maybe?? 406 - AttachStderr: true, // for good measure, backed up by docker/cli ("If -d is not set, attach to everything by default") 407 - }) 408 - if err != nil { 409 - return "", err 410 - } 411 - 412 - // This actually *starts* the command. Thanks, Docker! 413 - execResp, err := a.docker.ContainerExecAttach(ctx, mkExecResp.ID, container.ExecAttachOptions{}) 414 - if err != nil { 415 - return "", err 416 - } 417 - defer execResp.Close() 418 - 419 - // This is apparently best way to wait for the command to complete. 420 - _, err = io.ReadAll(execResp.Reader) 421 - if err != nil { 422 - return "", err 423 - } 424 - 425 - execInspectResp, err := a.docker.ContainerExecInspect(ctx, mkExecResp.ID) 426 - if err != nil { 427 - return "", err 428 - } 429 - 430 - if execInspectResp.ExitCode != 0 { 431 - return "", fmt.Errorf("mkdir exited with exit code %d", execInspectResp.ExitCode) 432 - } else if execInspectResp.Running { 433 - return "", fmt.Errorf("mkdir is somehow still running??") 434 - } 435 - 436 - return resp.ID, nil 437 - } 438 - 439 - func workflowImageName(deps map[string][]string, nixery string) string { 440 - var dependencies string 441 - for reg, ds := range deps { 442 - if reg == "nixpkgs" { 443 - dependencies = path.Join(ds...) 444 - } 445 - } 446 - // NOTE: shouldn't base dependencies come first? 447 - // like: nixery.tangled.sh/arm64/bash/git/coreutils/nix 448 - dependencies = path.Join(dependencies, "bash", "git", "coreutils", "nix") 449 - if runtime.GOARCH == "arm64" { 450 - dependencies = path.Join("arm64", dependencies) 451 - } 452 - 453 - return path.Join(nixery, dependencies) 454 - } 455 - 456 - var re = regexp.MustCompile(`[^a-zA-Z0-9_.-]`) 457 - func networkName(runId syntax.ATURI) string { 458 - return re.ReplaceAllString(runId.String()[5:], "-") 459 - }
-3
spindle/adapters/nixery/readme.md
··· 1 - # Nixery spindle adapter implementation 2 - 3 - Nixery adapter uses `/.tangled/workflows/*.yml` files as workflow definitions.
-42
spindle/adapters/nixery/workflow.go
··· 1 - package nixery 2 - 3 - import ( 4 - "tangled.org/core/spindle/models" 5 - "tangled.org/core/workflow" 6 - ) 7 - 8 - type nixeryWorkflow struct { 9 - event models.Event // event that triggered the workflow 10 - def WorkflowDef // definition of the workflow 11 - } 12 - 13 - // TODO: extract general fields to workflow.WorkflowDef struct 14 - 15 - // nixery adapter workflow definition spec 16 - type WorkflowDef struct { 17 - Name string `yaml:"-"` // name of the workflow file 18 - When []workflow.Constraint `yaml:"when"` 19 - CloneOpts workflow.CloneOpts `yaml:"clone"` 20 - 21 - Dependencies map[string][]string // nix packages used for the workflow 22 - Steps []Step // workflow steps 23 - } 24 - 25 - type Step struct { 26 - Name string `yaml:"name"` 27 - Command string `yaml:"command"` 28 - Enviornment map[string]string `yaml:"environment"` 29 - } 30 - 31 - func (d *WorkflowDef) AsInfo() models.WorkflowDef { 32 - return models.WorkflowDef{ 33 - AdapterId: AdapterID, 34 - Name: d.Name, 35 - When: d.When, 36 - } 37 - } 38 - 39 - func (d *WorkflowDef) ShouldRunOn(event models.Event) bool { 40 - // panic("unimplemented") 41 - return false 42 - }
+13 -23
spindle/config/config.go
··· 3 3 import ( 4 4 "context" 5 5 "fmt" 6 - "path/filepath" 7 - "time" 8 6 9 7 "github.com/bluesky-social/indigo/atproto/syntax" 10 8 "github.com/sethvargo/go-envconfig" 11 9 ) 12 10 13 11 type Server struct { 14 - ListenAddr string `env:"LISTEN_ADDR, default=0.0.0.0:6555"` 15 - Hostname string `env:"HOSTNAME, required"` 16 - TapUrl string `env:"TAP_URL, required"` 17 - PlcUrl string `env:"PLC_URL, default=https://plc.directory"` 18 - Dev bool `env:"DEV, default=false"` 19 - Owner syntax.DID `env:"OWNER, required"` 20 - Secrets Secrets `env:",prefix=SECRETS_"` 21 - LogDir string `env:"LOG_DIR, default=/var/log/spindle"` 22 - DataDir string `env:"DATA_DIR, default=/var/lib/spindle"` 23 - QueueSize int `env:"QUEUE_SIZE, default=100"` 24 - MaxJobCount int `env:"MAX_JOB_COUNT, default=2"` // max number of jobs that run at a time 12 + ListenAddr string `env:"LISTEN_ADDR, default=0.0.0.0:6555"` 13 + DBPath string `env:"DB_PATH, default=spindle.db"` 14 + Hostname string `env:"HOSTNAME, required"` 15 + JetstreamEndpoint string `env:"JETSTREAM_ENDPOINT, default=wss://jetstream1.us-west.bsky.network/subscribe"` 16 + PlcUrl string `env:"PLC_URL, default=https://plc.directory"` 17 + Dev bool `env:"DEV, default=false"` 18 + Owner string `env:"OWNER, required"` 19 + Secrets Secrets `env:",prefix=SECRETS_"` 20 + LogDir string `env:"LOG_DIR, default=/var/log/spindle"` 21 + QueueSize int `env:"QUEUE_SIZE, default=100"` 22 + MaxJobCount int `env:"MAX_JOB_COUNT, default=2"` // max number of jobs that run at a time 25 23 } 26 24 27 25 func (s Server) Did() syntax.DID { 28 26 return syntax.DID(fmt.Sprintf("did:web:%s", s.Hostname)) 29 27 } 30 28 31 - func (s Server) RepoDir() string { 32 - return filepath.Join(s.DataDir, "repos") 33 - } 34 - 35 - func (s Server) DBPath() string { 36 - return filepath.Join(s.DataDir, "spindle.db") 37 - } 38 - 39 29 type Secrets struct { 40 30 Provider string `env:"PROVIDER, default=sqlite"` 41 31 OpenBao OpenBaoConfig `env:",prefix=OPENBAO_"` ··· 47 37 } 48 38 49 39 type NixeryPipelines struct { 50 - Nixery string `env:"NIXERY, default=nixery.tangled.sh"` 51 - WorkflowTimeout time.Duration `env:"WORKFLOW_TIMEOUT, default=5m"` 40 + Nixery string `env:"NIXERY, default=nixery.tangled.sh"` 41 + WorkflowTimeout string `env:"WORKFLOW_TIMEOUT, default=5m"` 52 42 } 53 43 54 44 type Config struct {
+18 -73
spindle/db/db.go
··· 1 1 package db 2 2 3 3 import ( 4 - "context" 5 4 "database/sql" 6 5 "strings" 7 6 8 - "github.com/bluesky-social/indigo/atproto/syntax" 9 7 _ "github.com/mattn/go-sqlite3" 10 - "tangled.org/core/log" 11 - "tangled.org/core/orm" 12 8 ) 13 9 14 10 type DB struct { 15 11 *sql.DB 16 12 } 17 13 18 - func Make(ctx context.Context, dbPath string) (*DB, error) { 14 + func Make(dbPath string) (*DB, error) { 19 15 // https://github.com/mattn/go-sqlite3#connection-string 20 16 opts := []string{ 21 17 "_foreign_keys=1", ··· 24 20 "_auto_vacuum=incremental", 25 21 } 26 22 27 - logger := log.FromContext(ctx) 28 - logger = log.SubLogger(logger, "db") 29 - 30 23 db, err := sql.Open("sqlite3", dbPath+"?"+strings.Join(opts, "&")) 31 24 if err != nil { 32 25 return nil, err 33 26 } 34 27 35 - conn, err := db.Conn(ctx) 36 - if err != nil { 37 - return nil, err 38 - } 39 - defer conn.Close() 28 + // NOTE: If any other migration is added here, you MUST 29 + // copy the pattern in appview: use a single sql.Conn 30 + // for every migration. 40 31 41 32 _, err = db.Exec(` 42 33 create table if not exists _jetstream ( ··· 58 49 unique(owner, name) 59 50 ); 60 51 61 - create table if not exists repo_collaborators ( 62 - -- identifiers 63 - id integer primary key autoincrement, 64 - did text not null, 65 - rkey text not null, 66 - at_uri text generated always as ('at://' || did || '/' || 'sh.tangled.repo.collaborator' || '/' || rkey) stored, 67 - 68 - repo text not null, 69 - subject text not null, 70 - 71 - addedAt text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), 72 - unique(did, rkey) 73 - ); 74 - 75 52 create table if not exists spindle_members ( 76 53 -- identifiers for the record 77 54 id integer primary key autoincrement, ··· 99 76 return nil, err 100 77 } 101 78 102 - // run migrations 79 + return &DB{db}, nil 80 + } 103 81 104 - // NOTE: this won't migrate existing records 105 - // they will be fetched again with tap instead 106 - orm.RunMigration(conn, logger, "add-rkey-to-repos", func(tx *sql.Tx) error { 107 - // archive legacy repos (just in case) 108 - _, err = tx.Exec(`alter table repos rename to repos_old`) 109 - if err != nil { 110 - return err 111 - } 112 - 113 - _, err := tx.Exec(` 114 - create table repos ( 115 - -- identifiers 116 - id integer primary key autoincrement, 117 - did text not null, 118 - rkey text not null, 119 - at_uri text generated always as ('at://' || did || '/' || 'sh.tangled.repo' || '/' || rkey) stored, 120 - 121 - name text not null, 122 - knot text not null, 123 - 124 - addedAt text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), 125 - unique(did, rkey) 126 - ); 127 - `) 128 - if err != nil { 129 - return err 130 - } 131 - 132 - return nil 133 - }) 134 - 135 - return &DB{db}, nil 82 + func (d *DB) SaveLastTimeUs(lastTimeUs int64) error { 83 + _, err := d.Exec(` 84 + insert into _jetstream (id, last_time_us) 85 + values (1, ?) 86 + on conflict(id) do update set last_time_us = excluded.last_time_us 87 + `, lastTimeUs) 88 + return err 136 89 } 137 90 138 - func (d *DB) IsKnownDid(did syntax.DID) (bool, error) { 139 - // is spindle member / repo collaborator 140 - var exists bool 141 - err := d.QueryRow( 142 - `select exists ( 143 - select 1 from repo_collaborators where subject = ? 144 - union all 145 - select 1 from spindle_members where did = ? 146 - )`, 147 - did, 148 - did, 149 - ).Scan(&exists) 150 - return exists, err 91 + func (d *DB) GetLastTimeUs() (int64, error) { 92 + var lastTimeUs int64 93 + row := d.QueryRow(`select last_time_us from _jetstream where id = 1;`) 94 + err := row.Scan(&lastTimeUs) 95 + return lastTimeUs, err 151 96 }
-14
spindle/db/events.go
··· 70 70 return evts, nil 71 71 } 72 72 73 - func (d *DB) CreatePipelineEvent(rkey string, pipeline tangled.Pipeline, n *notifier.Notifier) error { 74 - eventJson, err := json.Marshal(pipeline) 75 - if err != nil { 76 - return err 77 - } 78 - event := Event{ 79 - Rkey: rkey, 80 - Nsid: tangled.PipelineNSID, 81 - Created: time.Now().UnixNano(), 82 - EventJson: string(eventJson), 83 - } 84 - return d.insertEvent(event, n) 85 - } 86 - 87 73 func (d *DB) createStatusEvent( 88 74 workflowId models.WorkflowId, 89 75 statusKind models.StatusKind,
+44
spindle/db/known_dids.go
··· 1 + package db 2 + 3 + func (d *DB) AddDid(did string) error { 4 + _, err := d.Exec(`insert or ignore into known_dids (did) values (?)`, did) 5 + return err 6 + } 7 + 8 + func (d *DB) RemoveDid(did string) error { 9 + _, err := d.Exec(`delete from known_dids where did = ?`, did) 10 + return err 11 + } 12 + 13 + func (d *DB) GetAllDids() ([]string, error) { 14 + var dids []string 15 + 16 + rows, err := d.Query(`select did from known_dids`) 17 + if err != nil { 18 + return nil, err 19 + } 20 + defer rows.Close() 21 + 22 + for rows.Next() { 23 + var did string 24 + if err := rows.Scan(&did); err != nil { 25 + return nil, err 26 + } 27 + dids = append(dids, did) 28 + } 29 + 30 + if err := rows.Err(); err != nil { 31 + return nil, err 32 + } 33 + 34 + return dids, nil 35 + } 36 + 37 + func (d *DB) HasKnownDids() bool { 38 + var count int 39 + err := d.QueryRow(`select count(*) from known_dids`).Scan(&count) 40 + if err != nil { 41 + return false 42 + } 43 + return count > 0 44 + }
+11 -119
spindle/db/repos.go
··· 1 1 package db 2 2 3 - import "github.com/bluesky-social/indigo/atproto/syntax" 4 - 5 3 type Repo struct { 6 - Did syntax.DID 7 - Rkey syntax.RecordKey 8 - Name string 9 - Knot string 4 + Knot string 5 + Owner string 6 + Name string 10 7 } 11 8 12 - type RepoCollaborator struct { 13 - Did syntax.DID 14 - Rkey syntax.RecordKey 15 - Repo syntax.ATURI 16 - Subject syntax.DID 17 - } 18 - 19 - func (d *DB) PutRepo(repo *Repo) error { 20 - _, err := d.Exec( 21 - `insert or ignore into repos (did, rkey, name, knot) 22 - values (?, ?, ?, ?) 23 - on conflict(did, rkey) do update set 24 - name = excluded.name, 25 - knot = excluded.knot`, 26 - repo.Did, 27 - repo.Rkey, 28 - repo.Name, 29 - repo.Knot, 30 - ) 31 - return err 32 - } 33 - 34 - func (d *DB) DeleteRepo(did syntax.DID, rkey syntax.RecordKey) error { 35 - _, err := d.Exec( 36 - `delete from repos where did = ? and rkey = ?`, 37 - did, 38 - rkey, 39 - ) 9 + func (d *DB) AddRepo(knot, owner, name string) error { 10 + _, err := d.Exec(`insert or ignore into repos (knot, owner, name) values (?, ?, ?)`, knot, owner, name) 40 11 return err 41 12 } 42 13 ··· 63 34 return knots, nil 64 35 } 65 36 66 - func (d *DB) GetRepo(repoAt syntax.ATURI) (*Repo, error) { 37 + func (d *DB) GetRepo(knot, owner, name string) (*Repo, error) { 67 38 var repo Repo 68 - err := d.DB.QueryRow( 69 - `select 70 - did, 71 - rkey, 72 - name, 73 - knot 74 - from repos where at_uri = ?`, 75 - repoAt, 76 - ).Scan( 77 - &repo.Did, 78 - &repo.Rkey, 79 - &repo.Name, 80 - &repo.Knot, 81 - ) 82 - if err != nil { 83 - return nil, err 84 - } 85 - return &repo, nil 86 - } 87 39 88 - func (d *DB) GetRepoWithName(did syntax.DID, name string) (*Repo, error) { 89 - var repo Repo 90 - err := d.DB.QueryRow( 91 - `select 92 - did, 93 - rkey, 94 - name, 95 - knot 96 - from repos where did = ? and name = ?`, 97 - did, 98 - name, 99 - ).Scan( 100 - &repo.Did, 101 - &repo.Rkey, 102 - &repo.Name, 103 - &repo.Knot, 104 - ) 40 + query := "select knot, owner, name from repos where knot = ? and owner = ? and name = ?" 41 + err := d.DB.QueryRow(query, knot, owner, name). 42 + Scan(&repo.Knot, &repo.Owner, &repo.Name) 43 + 105 44 if err != nil { 106 45 return nil, err 107 46 } 47 + 108 48 return &repo, nil 109 49 } 110 - 111 - func (d *DB) PutRepoCollaborator(collaborator *RepoCollaborator) error { 112 - _, err := d.Exec( 113 - `insert into repo_collaborators (did, rkey, repo, subject) 114 - values (?, ?, ?, ?) 115 - on conflict(did, rkey) do update set 116 - repo = excluded.repo, 117 - subject = excluded.subject`, 118 - collaborator.Did, 119 - collaborator.Rkey, 120 - collaborator.Repo, 121 - collaborator.Subject, 122 - ) 123 - return err 124 - } 125 - 126 - func (d *DB) RemoveRepoCollaborator(did syntax.DID, rkey syntax.RecordKey) error { 127 - _, err := d.Exec( 128 - `delete from repo_collaborators where did = ? and rkey = ?`, 129 - did, 130 - rkey, 131 - ) 132 - return err 133 - } 134 - 135 - func (d *DB) GetRepoCollaborator(did syntax.DID, rkey syntax.RecordKey) (*RepoCollaborator, error) { 136 - var collaborator RepoCollaborator 137 - err := d.DB.QueryRow( 138 - `select 139 - did, 140 - rkey, 141 - repo, 142 - subject 143 - from repo_collaborators 144 - where did = ? and rkey = ?`, 145 - did, 146 - rkey, 147 - ).Scan( 148 - &collaborator.Did, 149 - &collaborator.Rkey, 150 - &collaborator.Repo, 151 - &collaborator.Subject, 152 - ) 153 - if err != nil { 154 - return nil, err 155 - } 156 - return &collaborator, nil 157 - }
+16 -14
spindle/engine/engine.go
··· 30 30 } 31 31 } 32 32 33 + secretValues := make([]string, len(allSecrets)) 34 + for i, s := range allSecrets { 35 + secretValues[i] = s.Value 36 + } 37 + 33 38 var wg sync.WaitGroup 34 39 for eng, wfs := range pipeline.Workflows { 35 40 workflowTimeout := eng.WorkflowTimeout() ··· 45 50 Name: w.Name, 46 51 } 47 52 48 - err := db.StatusRunning(wid, n) 53 + wfLogger, err := models.NewFileWorkflowLogger(cfg.Server.LogDir, wid, secretValues) 54 + if err != nil { 55 + l.Warn("failed to setup step logger; logs will not be persisted", "error", err) 56 + wfLogger = models.NullLogger{} 57 + } else { 58 + l.Info("setup step logger; logs will be persisted", "logDir", cfg.Server.LogDir, "wid", wid) 59 + defer wfLogger.Close() 60 + } 61 + 62 + err = db.StatusRunning(wid, n) 49 63 if err != nil { 50 64 l.Error("failed to set workflow status to running", "wid", wid, "err", err) 51 65 return 52 66 } 53 67 54 - err = eng.SetupWorkflow(ctx, wid, &w) 68 + err = eng.SetupWorkflow(ctx, wid, &w, wfLogger) 55 69 if err != nil { 56 70 // TODO(winter): Should this always set StatusFailed? 57 71 // In the original, we only do in a subset of cases. ··· 69 83 return 70 84 } 71 85 defer eng.DestroyWorkflow(ctx, wid) 72 - 73 - secretValues := make([]string, len(allSecrets)) 74 - for i, s := range allSecrets { 75 - secretValues[i] = s.Value 76 - } 77 - wfLogger, err := models.NewWorkflowLogger(cfg.Server.LogDir, wid, secretValues) 78 - if err != nil { 79 - l.Warn("failed to setup step logger; logs will not be persisted", "error", err) 80 - wfLogger = nil 81 - } else { 82 - defer wfLogger.Close() 83 - } 84 86 85 87 ctx, cancel := context.WithTimeout(ctx, workflowTimeout) 86 88 defer cancel()
+60 -10
spindle/engines/nixery/engine.go
··· 1 1 package nixery 2 2 3 3 import ( 4 + "bufio" 4 5 "context" 5 6 "errors" 6 7 "fmt" 7 8 "io" 8 9 "log/slog" 9 - "os" 10 10 "path" 11 11 "runtime" 12 12 "sync" ··· 122 122 } 123 123 124 124 func (e *Engine) WorkflowTimeout() time.Duration { 125 - return e.cfg.NixeryPipelines.WorkflowTimeout 125 + workflowTimeoutStr := e.cfg.NixeryPipelines.WorkflowTimeout 126 + workflowTimeout, err := time.ParseDuration(workflowTimeoutStr) 127 + if err != nil { 128 + e.l.Error("failed to parse workflow timeout", "error", err, "timeout", workflowTimeoutStr) 129 + workflowTimeout = 5 * time.Minute 130 + } 131 + 132 + return workflowTimeout 126 133 } 127 134 128 135 func workflowImage(deps map[string][]string, nixery string) string { ··· 162 169 return e, nil 163 170 } 164 171 165 - func (e *Engine) SetupWorkflow(ctx context.Context, wid models.WorkflowId, wf *models.Workflow) error { 166 - e.l.Info("setting up workflow", "workflow", wid) 172 + func (e *Engine) SetupWorkflow(ctx context.Context, wid models.WorkflowId, wf *models.Workflow, wfLogger models.WorkflowLogger) error { 173 + /// -------------------------INITIAL SETUP------------------------------------------ 174 + l := e.l.With("workflow", wid) 175 + l.Info("setting up workflow") 176 + 177 + setupStep := Step{ 178 + name: "nixery image pull", 179 + kind: models.StepKindSystem, 180 + } 181 + setupStepIdx := -1 167 182 183 + wfLogger.ControlWriter(setupStepIdx, setupStep, models.StepStatusStart).Write([]byte{0}) 184 + defer wfLogger.ControlWriter(setupStepIdx, setupStep, models.StepStatusEnd).Write([]byte{0}) 185 + 186 + /// -------------------------NETWORK CREATION--------------------------------------- 168 187 _, err := e.docker.NetworkCreate(ctx, networkName(wid), network.CreateOptions{ 169 188 Driver: "bridge", 170 189 }) 171 190 if err != nil { 172 191 return err 173 192 } 193 + 174 194 e.registerCleanup(wid, func(ctx context.Context) error { 175 195 if err := e.docker.NetworkRemove(ctx, networkName(wid)); err != nil { 176 196 return fmt.Errorf("removing network: %w", err) ··· 178 198 return nil 179 199 }) 180 200 201 + /// -------------------------IMAGE PULL--------------------------------------------- 181 202 addl := wf.Data.(addlFields) 203 + l.Info("pulling image", "image", addl.image) 204 + fmt.Fprintf( 205 + wfLogger.DataWriter(setupStepIdx, "stdout"), 206 + "pulling image: %s", 207 + addl.image, 208 + ) 182 209 183 210 reader, err := e.docker.ImagePull(ctx, addl.image, image.PullOptions{}) 184 211 if err != nil { 185 - e.l.Error("pipeline image pull failed!", "image", addl.image, "workflowId", wid, "error", err.Error()) 186 - 212 + l.Error("pipeline image pull failed!", "error", err.Error()) 213 + fmt.Fprintf(wfLogger.DataWriter(setupStepIdx, "stderr"), "image pull failed: %s", err) 187 214 return fmt.Errorf("pulling image: %w", err) 188 215 } 189 216 defer reader.Close() 190 - io.Copy(os.Stdout, reader) 217 + 218 + scanner := bufio.NewScanner(reader) 219 + for scanner.Scan() { 220 + line := scanner.Text() 221 + wfLogger.DataWriter(setupStepIdx, "stdout").Write([]byte(line)) 222 + l.Info("image pull progress", "stdout", line) 223 + } 224 + 225 + /// -------------------------CONTAINER CREATION------------------------------------- 226 + l.Info("creating container") 227 + wfLogger.DataWriter(setupStepIdx, "stdout").Write([]byte("creating container...")) 191 228 192 229 resp, err := e.docker.ContainerCreate(ctx, &container.Config{ 193 230 Image: addl.image, ··· 222 259 ExtraHosts: []string{"host.docker.internal:host-gateway"}, 223 260 }, nil, nil, "") 224 261 if err != nil { 262 + fmt.Fprintf( 263 + wfLogger.DataWriter(setupStepIdx, "stderr"), 264 + "container creation failed: %s", 265 + err, 266 + ) 225 267 return fmt.Errorf("creating container: %w", err) 226 268 } 269 + 227 270 e.registerCleanup(wid, func(ctx context.Context) error { 228 271 if err := e.docker.ContainerStop(ctx, resp.ID, container.StopOptions{}); err != nil { 229 272 return fmt.Errorf("stopping container: %w", err) ··· 237 280 if err != nil { 238 281 return fmt.Errorf("removing container: %w", err) 239 282 } 283 + 240 284 return nil 241 285 }) 242 286 287 + /// -------------------------CONTAINER START---------------------------------------- 288 + wfLogger.DataWriter(setupStepIdx, "stdout").Write([]byte("starting container...")) 243 289 if err := e.docker.ContainerStart(ctx, resp.ID, container.StartOptions{}); err != nil { 244 290 return fmt.Errorf("starting container: %w", err) 245 291 } ··· 266 312 return err 267 313 } 268 314 315 + /// -----------------------------------FINISH--------------------------------------- 269 316 execInspectResp, err := e.docker.ContainerExecInspect(ctx, mkExecResp.ID) 270 317 if err != nil { 271 318 return err ··· 283 330 return nil 284 331 } 285 332 286 - func (e *Engine) RunStep(ctx context.Context, wid models.WorkflowId, w *models.Workflow, idx int, secrets []secrets.UnlockedSecret, wfLogger *models.WorkflowLogger) error { 333 + func (e *Engine) RunStep(ctx context.Context, wid models.WorkflowId, w *models.Workflow, idx int, secrets []secrets.UnlockedSecret, wfLogger models.WorkflowLogger) error { 287 334 addl := w.Data.(addlFields) 288 335 workflowEnvs := ConstructEnvs(w.Environment) 289 336 // TODO(winter): should SetupWorkflow also have secret access? ··· 306 353 envs.AddEnv(k, v) 307 354 } 308 355 } 356 + 309 357 envs.AddEnv("HOME", homeDir) 358 + existingPath := "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" 359 + envs.AddEnv("PATH", fmt.Sprintf("%s/.nix-profile/bin:/nix/var/nix/profiles/default/bin:%s", homeDir, existingPath)) 310 360 311 361 mkExecResp, err := e.docker.ContainerExecCreate(ctx, addl.container, container.ExecOptions{ 312 362 Cmd: []string{"bash", "-c", step.Command()}, ··· 321 371 // start tailing logs in background 322 372 tailDone := make(chan error, 1) 323 373 go func() { 324 - tailDone <- e.tailStep(ctx, wfLogger, mkExecResp.ID, wid, idx, step) 374 + tailDone <- e.tailStep(ctx, wfLogger, mkExecResp.ID, idx) 325 375 }() 326 376 327 377 select { ··· 367 417 return nil 368 418 } 369 419 370 - func (e *Engine) tailStep(ctx context.Context, wfLogger *models.WorkflowLogger, execID string, wid models.WorkflowId, stepIdx int, step models.Step) error { 420 + func (e *Engine) tailStep(ctx context.Context, wfLogger models.WorkflowLogger, execID string, stepIdx int) error { 371 421 if wfLogger == nil { 372 422 return nil 373 423 }
+1 -1
spindle/engines/nixery/setup_steps.go
··· 37 37 } 38 38 39 39 if len(customPackages) > 0 { 40 - installCmd := "nix --extra-experimental-features nix-command --extra-experimental-features flakes profile install" 40 + installCmd := "nix --extra-experimental-features nix-command --extra-experimental-features flakes profile add" 41 41 cmd := fmt.Sprintf("%s %s", installCmd, strings.Join(customPackages, " ")) 42 42 installStep := Step{ 43 43 command: cmd,
-73
spindle/git/git.go
··· 1 - package git 2 - 3 - import ( 4 - "bytes" 5 - "context" 6 - "fmt" 7 - "os" 8 - "os/exec" 9 - "strings" 10 - 11 - "github.com/hashicorp/go-version" 12 - ) 13 - 14 - func Version() (*version.Version, error) { 15 - var buf bytes.Buffer 16 - cmd := exec.Command("git", "version") 17 - cmd.Stdout = &buf 18 - cmd.Stderr = os.Stderr 19 - err := cmd.Run() 20 - if err != nil { 21 - return nil, err 22 - } 23 - fields := strings.Fields(buf.String()) 24 - if len(fields) < 3 { 25 - return nil, fmt.Errorf("invalid git version: %s", buf.String()) 26 - } 27 - 28 - // version string is like: "git version 2.29.3" or "git version 2.29.3.windows.1" 29 - versionString := fields[2] 30 - if pos := strings.Index(versionString, "windows"); pos >= 1 { 31 - versionString = versionString[:pos-1] 32 - } 33 - return version.NewVersion(versionString) 34 - } 35 - 36 - const WorkflowDir = `/.tangled/workflows` 37 - 38 - func SparseSyncGitRepo(ctx context.Context, cloneUri, path, rev string) error { 39 - exist, err := isDir(path) 40 - if err != nil { 41 - return err 42 - } 43 - if rev == "" { 44 - rev = "HEAD" 45 - } 46 - if !exist { 47 - if err := exec.Command("git", "clone", "--no-checkout", "--depth=1", "--filter=tree:0", "--revision="+rev, cloneUri, path).Run(); err != nil { 48 - return fmt.Errorf("git clone: %w", err) 49 - } 50 - if err := exec.Command("git", "-C", path, "sparse-checkout", "set", "--no-cone", WorkflowDir).Run(); err != nil { 51 - return fmt.Errorf("git sparse-checkout set: %w", err) 52 - } 53 - } else { 54 - if err := exec.Command("git", "-C", path, "fetch", "--depth=1", "--filter=tree:0", "origin", rev).Run(); err != nil { 55 - return fmt.Errorf("git pull: %w", err) 56 - } 57 - } 58 - if err := exec.Command("git", "-C", path, "checkout", rev).Run(); err != nil { 59 - return fmt.Errorf("git checkout: %w", err) 60 - } 61 - return nil 62 - } 63 - 64 - func isDir(path string) (bool, error) { 65 - info, err := os.Stat(path) 66 - if err == nil && info.IsDir() { 67 - return true, nil 68 - } 69 - if os.IsNotExist(err) { 70 - return false, nil 71 - } 72 - return false, err 73 - }
+300
spindle/ingester.go
··· 1 + package spindle 2 + 3 + import ( 4 + "context" 5 + "encoding/json" 6 + "errors" 7 + "fmt" 8 + "time" 9 + 10 + "tangled.org/core/api/tangled" 11 + "tangled.org/core/eventconsumer" 12 + "tangled.org/core/rbac" 13 + "tangled.org/core/spindle/db" 14 + 15 + comatproto "github.com/bluesky-social/indigo/api/atproto" 16 + "github.com/bluesky-social/indigo/atproto/identity" 17 + "github.com/bluesky-social/indigo/atproto/syntax" 18 + "github.com/bluesky-social/indigo/xrpc" 19 + "github.com/bluesky-social/jetstream/pkg/models" 20 + securejoin "github.com/cyphar/filepath-securejoin" 21 + ) 22 + 23 + type Ingester func(ctx context.Context, e *models.Event) error 24 + 25 + func (s *Spindle) ingest() Ingester { 26 + return func(ctx context.Context, e *models.Event) error { 27 + var err error 28 + defer func() { 29 + eventTime := e.TimeUS 30 + lastTimeUs := eventTime + 1 31 + if err := s.db.SaveLastTimeUs(lastTimeUs); err != nil { 32 + err = fmt.Errorf("(deferred) failed to save last time us: %w", err) 33 + } 34 + }() 35 + 36 + if e.Kind != models.EventKindCommit { 37 + return nil 38 + } 39 + 40 + switch e.Commit.Collection { 41 + case tangled.SpindleMemberNSID: 42 + err = s.ingestMember(ctx, e) 43 + case tangled.RepoNSID: 44 + err = s.ingestRepo(ctx, e) 45 + case tangled.RepoCollaboratorNSID: 46 + err = s.ingestCollaborator(ctx, e) 47 + } 48 + 49 + if err != nil { 50 + s.l.Debug("failed to process message", "nsid", e.Commit.Collection, "err", err) 51 + } 52 + 53 + return nil 54 + } 55 + } 56 + 57 + func (s *Spindle) ingestMember(_ context.Context, e *models.Event) error { 58 + var err error 59 + did := e.Did 60 + rkey := e.Commit.RKey 61 + 62 + l := s.l.With("component", "ingester", "record", tangled.SpindleMemberNSID) 63 + 64 + switch e.Commit.Operation { 65 + case models.CommitOperationCreate, models.CommitOperationUpdate: 66 + raw := e.Commit.Record 67 + record := tangled.SpindleMember{} 68 + err = json.Unmarshal(raw, &record) 69 + if err != nil { 70 + l.Error("invalid record", "error", err) 71 + return err 72 + } 73 + 74 + domain := s.cfg.Server.Hostname 75 + recordInstance := record.Instance 76 + 77 + if recordInstance != domain { 78 + l.Error("domain mismatch", "domain", recordInstance, "expected", domain) 79 + return fmt.Errorf("domain mismatch: %s != %s", record.Instance, domain) 80 + } 81 + 82 + ok, err := s.e.IsSpindleInviteAllowed(did, rbacDomain) 83 + if err != nil || !ok { 84 + l.Error("failed to add member", "did", did, "error", err) 85 + return fmt.Errorf("failed to enforce permissions: %w", err) 86 + } 87 + 88 + if err := db.AddSpindleMember(s.db, db.SpindleMember{ 89 + Did: syntax.DID(did), 90 + Rkey: rkey, 91 + Instance: recordInstance, 92 + Subject: syntax.DID(record.Subject), 93 + Created: time.Now(), 94 + }); err != nil { 95 + l.Error("failed to add member", "error", err) 96 + return fmt.Errorf("failed to add member: %w", err) 97 + } 98 + 99 + if err := s.e.AddSpindleMember(rbacDomain, record.Subject); err != nil { 100 + l.Error("failed to add member", "error", err) 101 + return fmt.Errorf("failed to add member: %w", err) 102 + } 103 + l.Info("added member from firehose", "member", record.Subject) 104 + 105 + if err := s.db.AddDid(record.Subject); err != nil { 106 + l.Error("failed to add did", "error", err) 107 + return fmt.Errorf("failed to add did: %w", err) 108 + } 109 + s.jc.AddDid(record.Subject) 110 + 111 + return nil 112 + 113 + case models.CommitOperationDelete: 114 + record, err := db.GetSpindleMember(s.db, did, rkey) 115 + if err != nil { 116 + l.Error("failed to find member", "error", err) 117 + return fmt.Errorf("failed to find member: %w", err) 118 + } 119 + 120 + if err := db.RemoveSpindleMember(s.db, did, rkey); err != nil { 121 + l.Error("failed to remove member", "error", err) 122 + return fmt.Errorf("failed to remove member: %w", err) 123 + } 124 + 125 + if err := s.e.RemoveSpindleMember(rbacDomain, record.Subject.String()); err != nil { 126 + l.Error("failed to add member", "error", err) 127 + return fmt.Errorf("failed to add member: %w", err) 128 + } 129 + l.Info("added member from firehose", "member", record.Subject) 130 + 131 + if err := s.db.RemoveDid(record.Subject.String()); err != nil { 132 + l.Error("failed to add did", "error", err) 133 + return fmt.Errorf("failed to add did: %w", err) 134 + } 135 + s.jc.RemoveDid(record.Subject.String()) 136 + 137 + } 138 + return nil 139 + } 140 + 141 + func (s *Spindle) ingestRepo(ctx context.Context, e *models.Event) error { 142 + var err error 143 + did := e.Did 144 + 145 + l := s.l.With("component", "ingester", "record", tangled.RepoNSID) 146 + 147 + l.Info("ingesting repo record", "did", did) 148 + 149 + switch e.Commit.Operation { 150 + case models.CommitOperationCreate, models.CommitOperationUpdate: 151 + raw := e.Commit.Record 152 + record := tangled.Repo{} 153 + err = json.Unmarshal(raw, &record) 154 + if err != nil { 155 + l.Error("invalid record", "error", err) 156 + return err 157 + } 158 + 159 + domain := s.cfg.Server.Hostname 160 + 161 + // no spindle configured for this repo 162 + if record.Spindle == nil { 163 + l.Info("no spindle configured", "name", record.Name) 164 + return nil 165 + } 166 + 167 + // this repo did not want this spindle 168 + if *record.Spindle != domain { 169 + l.Info("different spindle configured", "name", record.Name, "spindle", *record.Spindle, "domain", domain) 170 + return nil 171 + } 172 + 173 + // add this repo to the watch list 174 + if err := s.db.AddRepo(record.Knot, did, record.Name); err != nil { 175 + l.Error("failed to add repo", "error", err) 176 + return fmt.Errorf("failed to add repo: %w", err) 177 + } 178 + 179 + didSlashRepo, err := securejoin.SecureJoin(did, record.Name) 180 + if err != nil { 181 + return err 182 + } 183 + 184 + // add repo to rbac 185 + if err := s.e.AddRepo(did, rbac.ThisServer, didSlashRepo); err != nil { 186 + l.Error("failed to add repo to enforcer", "error", err) 187 + return fmt.Errorf("failed to add repo: %w", err) 188 + } 189 + 190 + // add collaborators to rbac 191 + owner, err := s.res.ResolveIdent(ctx, did) 192 + if err != nil || owner.Handle.IsInvalidHandle() { 193 + return err 194 + } 195 + if err := s.fetchAndAddCollaborators(ctx, owner, didSlashRepo); err != nil { 196 + return err 197 + } 198 + 199 + // add this knot to the event consumer 200 + src := eventconsumer.NewKnotSource(record.Knot) 201 + s.ks.AddSource(context.Background(), src) 202 + 203 + return nil 204 + 205 + } 206 + return nil 207 + } 208 + 209 + func (s *Spindle) ingestCollaborator(ctx context.Context, e *models.Event) error { 210 + var err error 211 + 212 + l := s.l.With("component", "ingester", "record", tangled.RepoCollaboratorNSID, "did", e.Did) 213 + 214 + l.Info("ingesting collaborator record") 215 + 216 + switch e.Commit.Operation { 217 + case models.CommitOperationCreate, models.CommitOperationUpdate: 218 + raw := e.Commit.Record 219 + record := tangled.RepoCollaborator{} 220 + err = json.Unmarshal(raw, &record) 221 + if err != nil { 222 + l.Error("invalid record", "error", err) 223 + return err 224 + } 225 + 226 + subjectId, err := s.res.ResolveIdent(ctx, record.Subject) 227 + if err != nil || subjectId.Handle.IsInvalidHandle() { 228 + return err 229 + } 230 + 231 + repoAt, err := syntax.ParseATURI(record.Repo) 232 + if err != nil { 233 + l.Info("rejecting record, invalid repoAt", "repoAt", record.Repo) 234 + return nil 235 + } 236 + 237 + // TODO: get rid of this entirely 238 + // resolve this aturi to extract the repo record 239 + owner, err := s.res.ResolveIdent(ctx, repoAt.Authority().String()) 240 + if err != nil || owner.Handle.IsInvalidHandle() { 241 + return fmt.Errorf("failed to resolve handle: %w", err) 242 + } 243 + 244 + xrpcc := xrpc.Client{ 245 + Host: owner.PDSEndpoint(), 246 + } 247 + 248 + resp, err := comatproto.RepoGetRecord(ctx, &xrpcc, "", tangled.RepoNSID, repoAt.Authority().String(), repoAt.RecordKey().String()) 249 + if err != nil { 250 + return err 251 + } 252 + 253 + repo := resp.Value.Val.(*tangled.Repo) 254 + didSlashRepo, _ := securejoin.SecureJoin(owner.DID.String(), repo.Name) 255 + 256 + // check perms for this user 257 + if ok, err := s.e.IsCollaboratorInviteAllowed(owner.DID.String(), rbac.ThisServer, didSlashRepo); !ok || err != nil { 258 + return fmt.Errorf("insufficient permissions: %w", err) 259 + } 260 + 261 + // add collaborator to rbac 262 + if err := s.e.AddCollaborator(record.Subject, rbac.ThisServer, didSlashRepo); err != nil { 263 + l.Error("failed to add repo to enforcer", "error", err) 264 + return fmt.Errorf("failed to add repo: %w", err) 265 + } 266 + 267 + return nil 268 + } 269 + return nil 270 + } 271 + 272 + func (s *Spindle) fetchAndAddCollaborators(ctx context.Context, owner *identity.Identity, didSlashRepo string) error { 273 + l := s.l.With("component", "ingester", "handler", "fetchAndAddCollaborators") 274 + 275 + l.Info("fetching and adding existing collaborators") 276 + 277 + xrpcc := xrpc.Client{ 278 + Host: owner.PDSEndpoint(), 279 + } 280 + 281 + resp, err := comatproto.RepoListRecords(ctx, &xrpcc, tangled.RepoCollaboratorNSID, "", 50, owner.DID.String(), false) 282 + if err != nil { 283 + return err 284 + } 285 + 286 + var errs error 287 + for _, r := range resp.Records { 288 + if r == nil { 289 + continue 290 + } 291 + record := r.Value.Val.(*tangled.RepoCollaborator) 292 + 293 + if err := s.e.AddCollaborator(record.Subject, rbac.ThisServer, didSlashRepo); err != nil { 294 + l.Error("failed to add repo to enforcer", "error", err) 295 + errors.Join(errs, fmt.Errorf("failed to add repo: %w", err)) 296 + } 297 + } 298 + 299 + return errs 300 + }
-93
spindle/models/adapter.go
··· 1 - package models 2 - 3 - import ( 4 - "context" 5 - 6 - "github.com/bluesky-social/indigo/atproto/syntax" 7 - ) 8 - 9 - // Adapter is the core of the spindle. It can use its own way to configure and 10 - // run the workflows. The workflow definition can be either yaml files in git 11 - // repositories or even from dedicated web UI. 12 - // 13 - // An adapter is expected to be hold all created workflow runs. 14 - type Adapter interface { 15 - // Init intializes the adapter 16 - Init() error 17 - 18 - // Shutdown gracefully shuts down background jobs 19 - Shutdown(ctx context.Context) error 20 - 21 - // SetupRepo ensures adapter connected to the repository. 22 - // This usually includes adding repository watcher that does sparse-clone. 23 - SetupRepo(ctx context.Context, repo syntax.ATURI) error 24 - 25 - // ListWorkflowDefs parses and returns all workflow definitions in the given 26 - // repository at the specified revision 27 - ListWorkflowDefs(ctx context.Context, repo syntax.ATURI, rev string) ([]WorkflowDef, error) 28 - 29 - // EvaluateEvent consumes a trigger event and returns a list of triggered 30 - // workflow runs. It is expected to return immediately after scheduling the 31 - // workflows. 32 - EvaluateEvent(ctx context.Context, event Event) ([]WorkflowRun, error) 33 - 34 - // GetActiveWorkflowRun returns current state of specific workflow run. 35 - // This method will be called regularly for active workflow runs. 36 - GetActiveWorkflowRun(ctx context.Context, runId syntax.ATURI) (WorkflowRun, error) 37 - 38 - 39 - 40 - 41 - // NOTE: baisically I'm not sure about this method. 42 - // How to properly sync workflow.run states? 43 - // 44 - // for adapters with external engine, they will hold every past 45 - // workflow.run objects. 46 - // for adapters with internal engine, they... should also hold every 47 - // past workflow.run objects..? 48 - // 49 - // problem: 50 - // when spindle suffer downtime (spindle server shutdown), 51 - // external `workflow.run`s might be unsynced in "running" or "pending" state 52 - // same for internal `workflow.run`s. 53 - // 54 - // BUT, spindle itself is holding the runs, 55 - // so it already knows unsynced workflows (=workflows not finished) 56 - // therefore, it can just fetch them again. 57 - // for adapters with internal engines, they will fail to fetch previous 58 - // run. 59 - // Leaving spindle to mark the run as "Lost" or "Failed". 60 - // Because of _lacking_ adaters, spindle should be able to manually 61 - // mark unknown runs with "lost" state. 62 - // 63 - // GetWorkflowRun : used to get background crawling 64 - // XCodeCloud: ok 65 - // Nixery: (will fail if unknown) -> spindle will mark workflow as failed anyways 66 - // StreamWorkflowRun : used to notify real-time updates 67 - // XCodeCloud: ok (but old events will be lost) 68 - // Nixery: same. old events on spindle downtime will be lost 69 - // 70 - // 71 - // To avoid this, each adapters should hold outbox buffer 72 - // 73 - // | 74 - // v 75 - 76 - // StreamWorkflowRun(ctx context.Context) <-chan WorkflowRun 77 - 78 - 79 - // ListActiveWorkflowRuns returns current list of active workflow runs. 80 - // Runs where status is either Pending or Running 81 - ListActiveWorkflowRuns(ctx context.Context) ([]WorkflowRun, error) 82 - SubscribeWorkflowRun(ctx context.Context) <-chan WorkflowRun 83 - 84 - 85 - 86 - 87 - // StreamWorkflowRunLogs streams logs for a running workflow execution 88 - StreamWorkflowRunLogs(ctx context.Context, runId syntax.ATURI, handle func(line LogLine) error) error 89 - 90 - // CancelWorkflowRun attempts to stop a running workflow execution. 91 - // It won't do anything when the workflow has already completed. 92 - CancelWorkflowRun(ctx context.Context, runId syntax.ATURI) error 93 - }
+1 -1
spindle/models/clone.go
··· 69 69 commands: []string{ 70 70 "git init", 71 71 fmt.Sprintf("git remote add origin %s", repoURL), 72 - fmt.Sprintf("GIT_SSL_NO_VERIFY=true git -c http.sslVerify=false fetch %s", strings.Join(fetchArgs, " ")), 72 + fmt.Sprintf("git fetch %s", strings.Join(fetchArgs, " ")), 73 73 "git checkout FETCH_HEAD", 74 74 }, 75 75 }
+2 -2
spindle/models/engine.go
··· 10 10 11 11 type Engine interface { 12 12 InitWorkflow(twf tangled.Pipeline_Workflow, tpl tangled.Pipeline) (*Workflow, error) 13 - SetupWorkflow(ctx context.Context, wid WorkflowId, wf *Workflow) error 13 + SetupWorkflow(ctx context.Context, wid WorkflowId, wf *Workflow, wfLogger WorkflowLogger) error 14 14 WorkflowTimeout() time.Duration 15 15 DestroyWorkflow(ctx context.Context, wid WorkflowId) error 16 - RunStep(ctx context.Context, wid WorkflowId, w *Workflow, idx int, secrets []secrets.UnlockedSecret, wfLogger *WorkflowLogger) error 16 + RunStep(ctx context.Context, wid WorkflowId, w *Workflow, idx int, secrets []secrets.UnlockedSecret, wfLogger WorkflowLogger) error 17 17 }
+22 -10
spindle/models/logger.go
··· 9 9 "strings" 10 10 ) 11 11 12 - type WorkflowLogger struct { 12 + type WorkflowLogger interface { 13 + Close() error 14 + DataWriter(idx int, stream string) io.Writer 15 + ControlWriter(idx int, step Step, stepStatus StepStatus) io.Writer 16 + } 17 + 18 + type NullLogger struct{} 19 + 20 + func (l NullLogger) Close() error { return nil } 21 + func (l NullLogger) DataWriter(idx int, stream string) io.Writer { return io.Discard } 22 + func (l NullLogger) ControlWriter(idx int, step Step, stepStatus StepStatus) io.Writer { 23 + return io.Discard 24 + } 25 + 26 + type FileWorkflowLogger struct { 13 27 file *os.File 14 28 encoder *json.Encoder 15 29 mask *SecretMask 16 30 } 17 31 18 - func NewWorkflowLogger(baseDir string, wid WorkflowId, secretValues []string) (*WorkflowLogger, error) { 32 + func NewFileWorkflowLogger(baseDir string, wid WorkflowId, secretValues []string) (WorkflowLogger, error) { 19 33 path := LogFilePath(baseDir, wid) 20 - 21 34 file, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644) 22 35 if err != nil { 23 36 return nil, fmt.Errorf("creating log file: %w", err) 24 37 } 25 - 26 - return &WorkflowLogger{ 38 + return &FileWorkflowLogger{ 27 39 file: file, 28 40 encoder: json.NewEncoder(file), 29 41 mask: NewSecretMask(secretValues), ··· 35 47 return logFilePath 36 48 } 37 49 38 - func (l *WorkflowLogger) Close() error { 50 + func (l *FileWorkflowLogger) Close() error { 39 51 return l.file.Close() 40 52 } 41 53 42 - func (l *WorkflowLogger) DataWriter(idx int, stream string) io.Writer { 54 + func (l *FileWorkflowLogger) DataWriter(idx int, stream string) io.Writer { 43 55 return &dataWriter{ 44 56 logger: l, 45 57 idx: idx, ··· 47 59 } 48 60 } 49 61 50 - func (l *WorkflowLogger) ControlWriter(idx int, step Step, stepStatus StepStatus) io.Writer { 62 + func (l *FileWorkflowLogger) ControlWriter(idx int, step Step, stepStatus StepStatus) io.Writer { 51 63 return &controlWriter{ 52 64 logger: l, 53 65 idx: idx, ··· 57 69 } 58 70 59 71 type dataWriter struct { 60 - logger *WorkflowLogger 72 + logger *FileWorkflowLogger 61 73 idx int 62 74 stream string 63 75 } ··· 75 87 } 76 88 77 89 type controlWriter struct { 78 - logger *WorkflowLogger 90 + logger *FileWorkflowLogger 79 91 idx int 80 92 step Step 81 93 stepStatus StepStatus
-124
spindle/models/pipeline2.go
··· 1 - package models 2 - 3 - import ( 4 - "fmt" 5 - "slices" 6 - 7 - "github.com/bluesky-social/indigo/atproto/syntax" 8 - "tangled.org/core/api/tangled" 9 - ) 10 - 11 - // `sh.tangled.ci.event` 12 - type Event struct { 13 - SourceRepo syntax.ATURI // repository to find the workflow definition 14 - SourceSha string // sha to find the workflow definition 15 - TargetSha string // sha to run the workflow 16 - // union type of: 17 - // 1. PullRequestEvent 18 - // 2. PushEvent 19 - // 3. ManualEvent 20 - } 21 - 22 - func (e *Event) AsRecord() tangled.CiEvent { 23 - // var meta tangled.CiEvent_Meta 24 - // return tangled.CiEvent{ 25 - // Meta: &meta, 26 - // } 27 - panic("unimplemented") 28 - } 29 - 30 - // `sh.tangled.ci.pipeline` 31 - // 32 - // Pipeline is basically a group of workflows triggered by single event. 33 - type Pipeline2 struct { 34 - Did syntax.DID 35 - Rkey syntax.RecordKey 36 - 37 - Event Event // event that triggered the pipeline 38 - WorkflowRuns []WorkflowRun // workflow runs inside this pipeline 39 - } 40 - 41 - func (p *Pipeline2) AtUri() syntax.ATURI { 42 - return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", p.Did, tangled.CiPipelineNSID, p.Rkey)) 43 - } 44 - 45 - func (p *Pipeline2) AsRecord() tangled.CiPipeline { 46 - event := p.Event.AsRecord() 47 - runs := make([]string, len(p.WorkflowRuns)) 48 - for i, run := range p.WorkflowRuns { 49 - runs[i] = run.AtUri().String() 50 - } 51 - return tangled.CiPipeline{ 52 - Event: &event, 53 - WorkflowRuns: runs, 54 - } 55 - } 56 - 57 - // `sh.tangled.ci.workflow.run` 58 - type WorkflowRun struct { 59 - Did syntax.DID 60 - Rkey syntax.RecordKey 61 - 62 - AdapterId string // adapter id 63 - Name string // name of workflow run (not workflow definition name!) 64 - Status WorkflowStatus // workflow status 65 - // TODO: can add some custom fields like adapter-specific log-id 66 - } 67 - 68 - func (r WorkflowRun) WithStatus(status WorkflowStatus) WorkflowRun { 69 - return WorkflowRun{ 70 - Did: r.Did, 71 - Rkey: r.Rkey, 72 - AdapterId: r.AdapterId, 73 - Name: r.Name, 74 - Status: status, 75 - } 76 - } 77 - 78 - func (r *WorkflowRun) AtUri() syntax.ATURI { 79 - return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", r.Did, tangled.CiWorkflowRunNSID, r.Rkey)) 80 - } 81 - 82 - func (r *WorkflowRun) AsRecord() tangled.CiWorkflowRun { 83 - statusStr := string(r.Status) 84 - return tangled.CiWorkflowRun{ 85 - Adapter: r.AdapterId, 86 - Name: r.Name, 87 - Status: &statusStr, 88 - } 89 - } 90 - 91 - // `sh.tangled.ci.workflow.status` 92 - type WorkflowStatus string 93 - 94 - var ( 95 - WorkflowStatusPending WorkflowStatus = "pending" 96 - WorkflowStatusRunning WorkflowStatus = "running" 97 - WorkflowStatusFailed WorkflowStatus = "failed" 98 - WorkflowStatusCancelled WorkflowStatus = "cancelled" 99 - WorkflowStatusSuccess WorkflowStatus = "success" 100 - WorkflowStatusTimeout WorkflowStatus = "timeout" 101 - 102 - activeStatuses [2]WorkflowStatus = [2]WorkflowStatus{ 103 - WorkflowStatusPending, 104 - WorkflowStatusRunning, 105 - } 106 - ) 107 - 108 - func (s WorkflowStatus) IsActive() bool { 109 - return slices.Contains(activeStatuses[:], s) 110 - } 111 - 112 - func (s WorkflowStatus) IsFinish() bool { 113 - return !s.IsActive() 114 - } 115 - 116 - // `sh.tangled.ci.workflow.def` 117 - // 118 - // Brief information of the workflow definition. A workflow can be defined in 119 - // any form. This is a common info struct for any workflow definitions 120 - type WorkflowDef struct { 121 - AdapterId string // adapter id 122 - Name string // name or the workflow (usually the yml file name) 123 - When any // events the workflow is listening to 124 - }
-40
spindle/pipeline.go
··· 1 - package spindle 2 - 3 - import ( 4 - "context" 5 - 6 - "tangled.org/core/spindle/models" 7 - ) 8 - 9 - // createPipeline creates a pipeline from given event. 10 - // It will call `EvaluateEvent` for all adapters, gather the triggered workflow 11 - // runs, and constuct a pipeline record from them. pipeline record. It will 12 - // return nil if no workflow run has triggered. 13 - // 14 - // NOTE: This method won't fail. If `adapter.EvaluateEvent` returns an error, 15 - // the error will be logged but won't bubble-up. 16 - // 17 - // NOTE: Adapters might create sub-event on its own for workflows triggered by 18 - // other workflow runs. 19 - func (s *Spindle) createPipeline(ctx context.Context, event models.Event) (*models.Pipeline2) { 20 - l := s.l 21 - 22 - pipeline := models.Pipeline2{ 23 - Event: event, 24 - } 25 - 26 - // TODO: run in parallel 27 - for id, adapter := range s.adapters { 28 - runs, err := adapter.EvaluateEvent(ctx, event) 29 - if err != nil { 30 - l.Error("failed to process trigger from adapter '%s': %w", id, err) 31 - } 32 - pipeline.WorkflowRuns = append(pipeline.WorkflowRuns, runs...) 33 - } 34 - 35 - if len(pipeline.WorkflowRuns) == 0 { 36 - return nil 37 - } 38 - 39 - return &pipeline 40 - }
-169
spindle/repomanager/repomanager.go
··· 1 - package repomanager 2 - 3 - import ( 4 - "bufio" 5 - "bytes" 6 - "context" 7 - "errors" 8 - "fmt" 9 - "os" 10 - "os/exec" 11 - "path/filepath" 12 - "slices" 13 - "strings" 14 - 15 - "github.com/bluesky-social/indigo/atproto/syntax" 16 - "github.com/go-git/go-git/v5" 17 - "github.com/go-git/go-git/v5/config" 18 - "github.com/go-git/go-git/v5/plumbing/object" 19 - kgit "tangled.org/core/knotserver/git" 20 - "tangled.org/core/types" 21 - ) 22 - 23 - // RepoManager manages a `sh.tangled.repo` record with its git context. 24 - // It can be used to efficiently fetch the filetree of the repository. 25 - type RepoManager struct { 26 - repoDir string 27 - // TODO: it would be nice if RepoManager can be configured with different 28 - // strategies: 29 - // - use db as an only source for repo records 30 - // - use atproto if record doesn't exist from the db 31 - // - always use atproto 32 - // hmm do we need `RepoStore` interface? 33 - // now `DbRepoStore` and `AtprotoRepoStore` can implement both. 34 - // all `RepoStore` objects will hold `KnotStore` interface, so they can 35 - // source the knot store if needed. 36 - 37 - // but now we can't do complex queries like "get repo with issue count" 38 - // that kind of queries will be done directly from `appview.DB` struct 39 - // is graphql better tech for atproto? 40 - } 41 - 42 - func New(repoDir string) RepoManager { 43 - return RepoManager{ 44 - repoDir: repoDir, 45 - } 46 - } 47 - 48 - // TODO: RepoManager can return file tree from repoAt & rev 49 - // It will start syncing the repository if doesn't exist 50 - 51 - // RegisterRepo starts sparse-syncing repository with paths 52 - func (m *RepoManager) RegisterRepo(ctx context.Context, repoAt syntax.ATURI, paths []string) error { 53 - repoPath := m.repoPath(repoAt) 54 - exist, err := isDir(repoPath) 55 - if err != nil { 56 - return fmt.Errorf("checking dir info: %w", err) 57 - } 58 - var sparsePaths []string 59 - if !exist { 60 - // init bare git repo 61 - repo, err := git.PlainInit(repoPath, true) 62 - if err != nil { 63 - return fmt.Errorf("initializing repo: %w", err) 64 - } 65 - _, err = repo.CreateRemote(&config.RemoteConfig{ 66 - Name: "origin", 67 - URLs: []string{m.repoCloneUrl(repoAt)}, 68 - }) 69 - if err != nil { 70 - return fmt.Errorf("configuring repo remote: %w", err) 71 - } 72 - } else { 73 - // get sparse-checkout list 74 - sparsePaths, err = func(path string) ([]string, error) { 75 - var stdout bytes.Buffer 76 - listCmd := exec.Command("git", "-C", path, "sparse-checkout", "list") 77 - listCmd.Stdout = &stdout 78 - if err := listCmd.Run(); err != nil { 79 - return nil, err 80 - } 81 - 82 - var sparseList []string 83 - scanner := bufio.NewScanner(&stdout) 84 - for scanner.Scan() { 85 - line := strings.TrimSpace(scanner.Text()) 86 - if line == "" { 87 - continue 88 - } 89 - sparseList = append(sparseList, line) 90 - } 91 - if err := scanner.Err(); err != nil { 92 - return nil, fmt.Errorf("scanning stdout: %w", err) 93 - } 94 - 95 - return sparseList, nil 96 - }(repoPath) 97 - if err != nil { 98 - return fmt.Errorf("parsing sparse-checkout list: %w", err) 99 - } 100 - 101 - // add paths to sparse-checkout list 102 - for _, path := range paths { 103 - sparsePaths = append(sparsePaths, path) 104 - } 105 - sparsePaths = slices.Collect(slices.Values(sparsePaths)) 106 - } 107 - 108 - // set sparse-checkout list 109 - args := append([]string{"-C", repoPath, "sparse-checkout", "set", "--no-cone"}, sparsePaths...) 110 - if err := exec.Command("git", args...).Run(); err != nil { 111 - return fmt.Errorf("setting sparse-checkout list: %w", err) 112 - } 113 - return nil 114 - } 115 - 116 - // SyncRepo sparse-fetch specific rev of the repo 117 - func (m *RepoManager) SyncRepo(ctx context.Context, repo syntax.ATURI, rev string) error { 118 - // TODO: fetch repo with rev. 119 - panic("unimplemented") 120 - } 121 - 122 - func (m *RepoManager) Open(repo syntax.ATURI, rev string) (*kgit.GitRepo, error) { 123 - // TODO: don't depend on knot/git 124 - return kgit.Open(m.repoPath(repo), rev) 125 - } 126 - 127 - func (m *RepoManager) FileTree(ctx context.Context, repo syntax.ATURI, rev, path string) ([]types.NiceTree, error) { 128 - if err := m.SyncRepo(ctx, repo, rev); err != nil { 129 - return nil, fmt.Errorf("syncing git repo") 130 - } 131 - gr, err := m.Open(repo, rev) 132 - if err != nil { 133 - return nil, err 134 - } 135 - dir, err := gr.FileTree(ctx, path) 136 - if err != nil { 137 - if errors.Is(err, object.ErrDirectoryNotFound) { 138 - return nil, nil 139 - } 140 - return nil, fmt.Errorf("loading file tree: %w", err) 141 - } 142 - return dir, err 143 - } 144 - 145 - func (m *RepoManager) repoPath(repo syntax.ATURI) string { 146 - return filepath.Join( 147 - m.repoDir, 148 - repo.Authority().String(), 149 - repo.Collection().String(), 150 - repo.RecordKey().String(), 151 - ) 152 - } 153 - 154 - func (m *RepoManager) repoCloneUrl(repo syntax.ATURI) string { 155 - // 1. get repo & knot models from db. fetch it if doesn't exist 156 - // 2. construct https clone url 157 - panic("unimplemented") 158 - } 159 - 160 - func isDir(path string) (bool, error) { 161 - info, err := os.Stat(path) 162 - if err == nil && info.IsDir() { 163 - return true, nil 164 - } 165 - if os.IsNotExist(err) { 166 - return false, nil 167 - } 168 - return false, err 169 - }
+150 -223
spindle/server.go
··· 4 4 "context" 5 5 _ "embed" 6 6 "encoding/json" 7 - "errors" 8 7 "fmt" 9 8 "log/slog" 10 9 "maps" 11 10 "net/http" 12 - "path/filepath" 13 11 "sync" 14 12 15 - "github.com/bluesky-social/indigo/atproto/syntax" 16 13 "github.com/go-chi/chi/v5" 17 - "github.com/go-git/go-git/v5/plumbing/object" 18 - "github.com/hashicorp/go-version" 19 14 "tangled.org/core/api/tangled" 20 15 "tangled.org/core/eventconsumer" 21 16 "tangled.org/core/eventconsumer/cursor" 22 17 "tangled.org/core/idresolver" 23 - kgit "tangled.org/core/knotserver/git" 18 + "tangled.org/core/jetstream" 24 19 "tangled.org/core/log" 25 20 "tangled.org/core/notifier" 26 - "tangled.org/core/rbac2" 21 + "tangled.org/core/rbac" 27 22 "tangled.org/core/spindle/config" 28 23 "tangled.org/core/spindle/db" 29 24 "tangled.org/core/spindle/engine" 30 25 "tangled.org/core/spindle/engines/nixery" 31 - "tangled.org/core/spindle/git" 32 26 "tangled.org/core/spindle/models" 33 27 "tangled.org/core/spindle/queue" 34 28 "tangled.org/core/spindle/secrets" 35 29 "tangled.org/core/spindle/xrpc" 36 - "tangled.org/core/tap" 37 - "tangled.org/core/tid" 38 - "tangled.org/core/workflow" 39 30 "tangled.org/core/xrpc/serviceauth" 40 31 ) 41 32 42 33 //go:embed motd 43 34 var defaultMotd []byte 44 35 36 + const ( 37 + rbacDomain = "thisserver" 38 + ) 39 + 45 40 type Spindle struct { 46 - tap *tap.Client 41 + jc *jetstream.JetstreamClient 47 42 db *db.DB 48 - e *rbac2.Enforcer 43 + e *rbac.Enforcer 49 44 l *slog.Logger 50 45 n *notifier.Notifier 51 46 engs map[string]models.Engine 52 - adapters map[string]models.Adapter 53 47 jq *queue.Queue 54 48 cfg *config.Config 55 49 ks *eventconsumer.Consumer ··· 63 57 func New(ctx context.Context, cfg *config.Config, engines map[string]models.Engine) (*Spindle, error) { 64 58 logger := log.FromContext(ctx) 65 59 66 - if err := ensureGitVersion(); err != nil { 67 - return nil, fmt.Errorf("ensuring git version: %w", err) 68 - } 69 - 70 - d, err := db.Make(ctx, cfg.Server.DBPath()) 60 + d, err := db.Make(cfg.Server.DBPath) 71 61 if err != nil { 72 62 return nil, fmt.Errorf("failed to setup db: %w", err) 73 63 } 74 64 75 - e, err := rbac2.NewEnforcer(cfg.Server.DBPath()) 65 + e, err := rbac.NewEnforcer(cfg.Server.DBPath) 76 66 if err != nil { 77 67 return nil, fmt.Errorf("failed to setup rbac enforcer: %w", err) 78 68 } 69 + e.E.EnableAutoSave(true) 79 70 80 71 n := notifier.New() 81 72 ··· 95 86 } 96 87 logger.Info("using openbao secrets provider", "proxy_address", cfg.Server.Secrets.OpenBao.ProxyAddr, "mount", cfg.Server.Secrets.OpenBao.Mount) 97 88 case "sqlite", "": 98 - vault, err = secrets.NewSQLiteManager(cfg.Server.DBPath(), secrets.WithTableName("secrets")) 89 + vault, err = secrets.NewSQLiteManager(cfg.Server.DBPath, secrets.WithTableName("secrets")) 99 90 if err != nil { 100 91 return nil, fmt.Errorf("failed to setup sqlite secrets provider: %w", err) 101 92 } 102 - logger.Info("using sqlite secrets provider", "path", cfg.Server.DBPath()) 93 + logger.Info("using sqlite secrets provider", "path", cfg.Server.DBPath) 103 94 default: 104 95 return nil, fmt.Errorf("unknown secrets provider: %s", cfg.Server.Secrets.Provider) 105 96 } ··· 107 98 jq := queue.NewQueue(cfg.Server.QueueSize, cfg.Server.MaxJobCount) 108 99 logger.Info("initialized queue", "queueSize", cfg.Server.QueueSize, "numWorkers", cfg.Server.MaxJobCount) 109 100 110 - tap := tap.NewClient(cfg.Server.TapUrl, "") 101 + collections := []string{ 102 + tangled.SpindleMemberNSID, 103 + tangled.RepoNSID, 104 + tangled.RepoCollaboratorNSID, 105 + } 106 + jc, err := jetstream.NewJetstreamClient(cfg.Server.JetstreamEndpoint, "spindle", collections, nil, log.SubLogger(logger, "jetstream"), d, true, true) 107 + if err != nil { 108 + return nil, fmt.Errorf("failed to setup jetstream client: %w", err) 109 + } 110 + jc.AddDid(cfg.Server.Owner) 111 + 112 + // Check if the spindle knows about any Dids; 113 + dids, err := d.GetAllDids() 114 + if err != nil { 115 + return nil, fmt.Errorf("failed to get all dids: %w", err) 116 + } 117 + for _, d := range dids { 118 + jc.AddDid(d) 119 + } 111 120 112 121 resolver := idresolver.DefaultResolver(cfg.Server.PlcUrl) 113 122 114 123 spindle := &Spindle{ 115 - tap: &tap, 124 + jc: jc, 116 125 e: e, 117 126 db: d, 118 127 l: logger, ··· 125 134 motd: defaultMotd, 126 135 } 127 136 128 - err = e.SetSpindleOwner(spindle.cfg.Server.Owner, spindle.cfg.Server.Did()) 137 + err = e.AddSpindle(rbacDomain) 138 + if err != nil { 139 + return nil, fmt.Errorf("failed to set rbac domain: %w", err) 140 + } 141 + err = spindle.configureOwner() 129 142 if err != nil { 130 143 return nil, err 131 144 } 132 145 logger.Info("owner set", "did", cfg.Server.Owner) 133 146 134 - cursorStore, err := cursor.NewSQLiteStore(cfg.Server.DBPath()) 147 + cursorStore, err := cursor.NewSQLiteStore(cfg.Server.DBPath) 135 148 if err != nil { 136 149 return nil, fmt.Errorf("failed to setup sqlite3 cursor store: %w", err) 137 150 } 138 151 139 - // spindle listen to knot stream for sh.tangled.git.refUpdate 140 - // which will sync the local workflow files in spindle and enqueues the 141 - // pipeline job for on-push workflows 152 + err = jc.StartJetstream(ctx, spindle.ingest()) 153 + if err != nil { 154 + return nil, fmt.Errorf("failed to start jetstream consumer: %w", err) 155 + } 156 + 157 + // for each incoming sh.tangled.pipeline, we execute 158 + // spindle.processPipeline, which in turn enqueues the pipeline 159 + // job in the above registered queue. 142 160 ccfg := eventconsumer.NewConsumerConfig() 143 161 ccfg.Logger = log.SubLogger(logger, "eventconsumer") 144 162 ccfg.Dev = cfg.Server.Dev 145 - ccfg.ProcessFunc = spindle.processKnotStream 163 + ccfg.ProcessFunc = spindle.processPipeline 146 164 ccfg.CursorStore = cursorStore 147 165 knownKnots, err := d.Knots() 148 166 if err != nil { ··· 183 201 } 184 202 185 203 // Enforcer returns the RBAC enforcer instance. 186 - func (s *Spindle) Enforcer() *rbac2.Enforcer { 204 + func (s *Spindle) Enforcer() *rbac.Enforcer { 187 205 return s.e 188 206 } 189 207 ··· 217 235 s.ks.Start(ctx) 218 236 }() 219 237 220 - // ensure server owner is tracked 221 - if err := s.tap.AddRepos(ctx, []syntax.DID{s.cfg.Server.Owner}); err != nil { 222 - return err 223 - } 224 - 225 - go func() { 226 - s.l.Info("starting tap stream consumer") 227 - s.tap.Connect(ctx, &tap.SimpleIndexer{ 228 - EventHandler: s.processEvent, 229 - }) 230 - }() 231 - 232 238 s.l.Info("starting spindle server", "address", s.cfg.Server.ListenAddr) 233 239 return http.ListenAndServe(s.cfg.Server.ListenAddr, s.Router()) 234 240 } ··· 287 293 return x.Router() 288 294 } 289 295 290 - func (s *Spindle) processKnotStream(ctx context.Context, src eventconsumer.Source, msg eventconsumer.Message) error { 291 - l := log.FromContext(ctx).With("handler", "processKnotStream") 292 - l = l.With("src", src.Key(), "msg.Nsid", msg.Nsid, "msg.Rkey", msg.Rkey) 293 - if msg.Nsid == tangled.GitRefUpdateNSID { 294 - event := tangled.GitRefUpdate{} 295 - if err := json.Unmarshal(msg.EventJson, &event); err != nil { 296 - l.Error("error unmarshalling", "err", err) 296 + func (s *Spindle) processPipeline(ctx context.Context, src eventconsumer.Source, msg eventconsumer.Message) error { 297 + if msg.Nsid == tangled.PipelineNSID { 298 + tpl := tangled.Pipeline{} 299 + err := json.Unmarshal(msg.EventJson, &tpl) 300 + if err != nil { 301 + fmt.Println("error unmarshalling", err) 297 302 return err 298 303 } 299 - l = l.With("repoDid", event.RepoDid, "repoName", event.RepoName) 300 304 301 - // resolve repo name to rkey 302 - // TODO: git.refUpdate should respond with rkey instead of repo name 303 - repo, err := s.db.GetRepoWithName(syntax.DID(event.RepoDid), event.RepoName) 304 - if err != nil { 305 - return fmt.Errorf("get repo with did and name (%s/%s): %w", event.RepoDid, event.RepoName, err) 305 + if tpl.TriggerMetadata == nil { 306 + return fmt.Errorf("no trigger metadata found") 306 307 } 307 308 308 - // NOTE: we are blindly trusting the knot that it will return only repos it own 309 - repoCloneUri := s.newRepoCloneUrl(src.Key(), event.RepoDid, event.RepoName) 310 - repoPath := s.newRepoPath(repo.Did, repo.Rkey) 311 - if err := git.SparseSyncGitRepo(ctx, repoCloneUri, repoPath, event.NewSha); err != nil { 312 - return fmt.Errorf("sync git repo: %w", err) 309 + if tpl.TriggerMetadata.Repo == nil { 310 + return fmt.Errorf("no repo data found") 313 311 } 314 - l.Info("synced git repo") 315 312 316 - compiler := workflow.Compiler{ 317 - Trigger: tangled.Pipeline_TriggerMetadata{ 318 - Kind: string(workflow.TriggerKindPush), 319 - Push: &tangled.Pipeline_PushTriggerData{ 320 - Ref: event.Ref, 321 - OldSha: event.OldSha, 322 - NewSha: event.NewSha, 323 - }, 324 - Repo: &tangled.Pipeline_TriggerRepo{ 325 - Did: repo.Did.String(), 326 - Knot: repo.Knot, 327 - Repo: repo.Name, 328 - }, 329 - }, 313 + if src.Key() != tpl.TriggerMetadata.Repo.Knot { 314 + return fmt.Errorf("repo knot does not match event source: %s != %s", src.Key(), tpl.TriggerMetadata.Repo.Knot) 330 315 } 331 316 332 - // load workflow definitions from rev (without spindle context) 333 - rawPipeline, err := s.loadPipeline(ctx, repoCloneUri, repoPath, event.NewSha) 317 + // filter by repos 318 + _, err = s.db.GetRepo( 319 + tpl.TriggerMetadata.Repo.Knot, 320 + tpl.TriggerMetadata.Repo.Did, 321 + tpl.TriggerMetadata.Repo.Repo, 322 + ) 334 323 if err != nil { 335 - return fmt.Errorf("loading pipeline: %w", err) 336 - } 337 - if len(rawPipeline) == 0 { 338 - l.Info("no workflow definition find for the repo. skipping the event") 339 - return nil 340 - } 341 - tpl := compiler.Compile(compiler.Parse(rawPipeline)) 342 - // TODO: pass compile error to workflow log 343 - for _, w := range compiler.Diagnostics.Errors { 344 - l.Error(w.String()) 345 - } 346 - for _, w := range compiler.Diagnostics.Warnings { 347 - l.Warn(w.String()) 324 + return fmt.Errorf("failed to get repo: %w", err) 348 325 } 349 326 350 327 pipelineId := models.PipelineId{ 351 - Knot: tpl.TriggerMetadata.Repo.Knot, 352 - Rkey: tid.TID(), 353 - } 354 - if err := s.db.CreatePipelineEvent(pipelineId.Rkey, tpl, s.n); err != nil { 355 - l.Error("failed to create pipeline event", "err", err) 356 - return nil 357 - } 358 - err = s.processPipeline(ctx, tpl, pipelineId) 359 - if err != nil { 360 - return err 328 + Knot: src.Key(), 329 + Rkey: msg.Rkey, 361 330 } 362 - } 363 331 364 - return nil 365 - } 332 + workflows := make(map[models.Engine][]models.Workflow) 333 + 334 + // Build pipeline environment variables once for all workflows 335 + pipelineEnv := models.PipelineEnvVars(tpl.TriggerMetadata, pipelineId, s.cfg.Server.Dev) 366 336 367 - func (s *Spindle) loadPipeline(ctx context.Context, repoUri, repoPath, rev string) (workflow.RawPipeline, error) { 368 - if err := git.SparseSyncGitRepo(ctx, repoUri, repoPath, rev); err != nil { 369 - return nil, fmt.Errorf("syncing git repo: %w", err) 370 - } 371 - gr, err := kgit.Open(repoPath, rev) 372 - if err != nil { 373 - return nil, fmt.Errorf("opening git repo: %w", err) 374 - } 337 + for _, w := range tpl.Workflows { 338 + if w != nil { 339 + if _, ok := s.engs[w.Engine]; !ok { 340 + err = s.db.StatusFailed(models.WorkflowId{ 341 + PipelineId: pipelineId, 342 + Name: w.Name, 343 + }, fmt.Sprintf("unknown engine %#v", w.Engine), -1, s.n) 344 + if err != nil { 345 + return fmt.Errorf("db.StatusFailed: %w", err) 346 + } 375 347 376 - workflowDir, err := gr.FileTree(ctx, workflow.WorkflowDir) 377 - if errors.Is(err, object.ErrDirectoryNotFound) { 378 - // return empty RawPipeline when directory doesn't exist 379 - return nil, nil 380 - } else if err != nil { 381 - return nil, fmt.Errorf("loading file tree: %w", err) 382 - } 348 + continue 349 + } 383 350 384 - var rawPipeline workflow.RawPipeline 385 - for _, e := range workflowDir { 386 - if !e.IsFile() { 387 - continue 388 - } 351 + eng := s.engs[w.Engine] 389 352 390 - fpath := filepath.Join(workflow.WorkflowDir, e.Name) 391 - contents, err := gr.RawContent(fpath) 392 - if err != nil { 393 - return nil, fmt.Errorf("reading raw content of '%s': %w", fpath, err) 394 - } 353 + if _, ok := workflows[eng]; !ok { 354 + workflows[eng] = []models.Workflow{} 355 + } 395 356 396 - rawPipeline = append(rawPipeline, workflow.RawWorkflow{ 397 - Name: e.Name, 398 - Contents: contents, 399 - }) 400 - } 357 + ewf, err := s.engs[w.Engine].InitWorkflow(*w, tpl) 358 + if err != nil { 359 + return fmt.Errorf("init workflow: %w", err) 360 + } 401 361 402 - return rawPipeline, nil 403 - } 362 + // inject TANGLED_* env vars after InitWorkflow 363 + // This prevents user-defined env vars from overriding them 364 + if ewf.Environment == nil { 365 + ewf.Environment = make(map[string]string) 366 + } 367 + maps.Copy(ewf.Environment, pipelineEnv) 404 368 405 - func (s *Spindle) processPipeline(ctx context.Context, tpl tangled.Pipeline, pipelineId models.PipelineId) error { 406 - // Build pipeline environment variables once for all workflows 407 - pipelineEnv := models.PipelineEnvVars(tpl.TriggerMetadata, pipelineId, s.cfg.Server.Dev) 369 + workflows[eng] = append(workflows[eng], *ewf) 408 370 409 - // filter & init workflows 410 - workflows := make(map[models.Engine][]models.Workflow) 411 - for _, w := range tpl.Workflows { 412 - if w == nil { 413 - continue 414 - } 415 - if _, ok := s.engs[w.Engine]; !ok { 416 - err := s.db.StatusFailed(models.WorkflowId{ 417 - PipelineId: pipelineId, 418 - Name: w.Name, 419 - }, fmt.Sprintf("unknown engine %#v", w.Engine), -1, s.n) 420 - if err != nil { 421 - return fmt.Errorf("db.StatusFailed: %w", err) 371 + err = s.db.StatusPending(models.WorkflowId{ 372 + PipelineId: pipelineId, 373 + Name: w.Name, 374 + }, s.n) 375 + if err != nil { 376 + return fmt.Errorf("db.StatusPending: %w", err) 377 + } 422 378 } 423 - 424 - continue 425 379 } 426 380 427 - eng := s.engs[w.Engine] 428 - 429 - if _, ok := workflows[eng]; !ok { 430 - workflows[eng] = []models.Workflow{} 431 - } 432 - 433 - ewf, err := s.engs[w.Engine].InitWorkflow(*w, tpl) 434 - if err != nil { 435 - return fmt.Errorf("init workflow: %w", err) 436 - } 437 - 438 - // inject TANGLED_* env vars after InitWorkflow 439 - // This prevents user-defined env vars from overriding them 440 - if ewf.Environment == nil { 441 - ewf.Environment = make(map[string]string) 381 + ok := s.jq.Enqueue(queue.Job{ 382 + Run: func() error { 383 + engine.StartWorkflows(log.SubLogger(s.l, "engine"), s.vault, s.cfg, s.db, s.n, ctx, &models.Pipeline{ 384 + RepoOwner: tpl.TriggerMetadata.Repo.Did, 385 + RepoName: tpl.TriggerMetadata.Repo.Repo, 386 + Workflows: workflows, 387 + }, pipelineId) 388 + return nil 389 + }, 390 + OnFail: func(jobError error) { 391 + s.l.Error("pipeline run failed", "error", jobError) 392 + }, 393 + }) 394 + if ok { 395 + s.l.Info("pipeline enqueued successfully", "id", msg.Rkey) 396 + } else { 397 + s.l.Error("failed to enqueue pipeline: queue is full") 442 398 } 443 - maps.Copy(ewf.Environment, pipelineEnv) 444 - 445 - workflows[eng] = append(workflows[eng], *ewf) 446 399 } 447 400 448 - // enqueue pipeline 449 - ok := s.jq.Enqueue(queue.Job{ 450 - Run: func() error { 451 - engine.StartWorkflows(log.SubLogger(s.l, "engine"), s.vault, s.cfg, s.db, s.n, ctx, &models.Pipeline{ 452 - RepoOwner: tpl.TriggerMetadata.Repo.Did, 453 - RepoName: tpl.TriggerMetadata.Repo.Repo, 454 - Workflows: workflows, 455 - }, pipelineId) 456 - return nil 457 - }, 458 - OnFail: func(jobError error) { 459 - s.l.Error("pipeline run failed", "error", jobError) 460 - }, 461 - }) 462 - if !ok { 463 - return fmt.Errorf("failed to enqueue pipeline: queue is full") 464 - } 465 - s.l.Info("pipeline enqueued successfully", "id", pipelineId) 466 - 467 - // emit StatusPending for all workflows here (after successful enqueue) 468 - for _, ewfs := range workflows { 469 - for _, ewf := range ewfs { 470 - err := s.db.StatusPending(models.WorkflowId{ 471 - PipelineId: pipelineId, 472 - Name: ewf.Name, 473 - }, s.n) 474 - if err != nil { 475 - return fmt.Errorf("db.StatusPending: %w", err) 476 - } 477 - } 478 - } 479 401 return nil 480 402 } 481 403 482 - // newRepoPath creates a path to store repository by its did and rkey. 483 - // The path format would be: `/data/repos/did:plc:foo/sh.tangled.repo/repo-rkey 484 - func (s *Spindle) newRepoPath(did syntax.DID, rkey syntax.RecordKey) string { 485 - return filepath.Join(s.cfg.Server.RepoDir(), did.String(), tangled.RepoNSID, rkey.String()) 486 - } 404 + func (s *Spindle) configureOwner() error { 405 + cfgOwner := s.cfg.Server.Owner 487 406 488 - func (s *Spindle) newRepoCloneUrl(knot, did, name string) string { 489 - scheme := "https://" 490 - if s.cfg.Server.Dev { 491 - scheme = "http://" 407 + existing, err := s.e.GetSpindleUsersByRole("server:owner", rbacDomain) 408 + if err != nil { 409 + return err 492 410 } 493 - return fmt.Sprintf("%s%s/%s/%s", scheme, knot, did, name) 494 - } 495 411 496 - const RequiredVersion = "2.49.0" 412 + switch len(existing) { 413 + case 0: 414 + // no owner configured, continue 415 + case 1: 416 + // find existing owner 417 + existingOwner := existing[0] 497 418 498 - func ensureGitVersion() error { 499 - v, err := git.Version() 500 - if err != nil { 501 - return fmt.Errorf("fetching git version: %w", err) 419 + // no ownership change, this is okay 420 + if existingOwner == s.cfg.Server.Owner { 421 + break 422 + } 423 + 424 + // remove existing owner 425 + err = s.e.RemoveSpindleOwner(rbacDomain, existingOwner) 426 + if err != nil { 427 + return nil 428 + } 429 + default: 430 + return fmt.Errorf("more than one owner in DB, try deleting %q and starting over", s.cfg.Server.DBPath) 502 431 } 503 - if v.LessThan(version.Must(version.NewVersion(RequiredVersion))) { 504 - return fmt.Errorf("installed git version %q is not supported, Spindle requires git version >= %q", v, RequiredVersion) 505 - } 506 - return nil 432 + 433 + return s.e.AddSpindleOwner(rbacDomain, cfgOwner) 507 434 }
-391
spindle/tap.go
··· 1 - package spindle 2 - 3 - import ( 4 - "context" 5 - "encoding/json" 6 - "fmt" 7 - "time" 8 - 9 - "github.com/bluesky-social/indigo/atproto/syntax" 10 - "tangled.org/core/api/tangled" 11 - "tangled.org/core/eventconsumer" 12 - "tangled.org/core/spindle/db" 13 - "tangled.org/core/spindle/git" 14 - "tangled.org/core/spindle/models" 15 - "tangled.org/core/tap" 16 - "tangled.org/core/tid" 17 - "tangled.org/core/workflow" 18 - ) 19 - 20 - func (s *Spindle) processEvent(ctx context.Context, evt tap.Event) error { 21 - l := s.l.With("component", "tapIndexer") 22 - 23 - var err error 24 - switch evt.Type { 25 - case tap.EvtRecord: 26 - switch evt.Record.Collection.String() { 27 - case tangled.SpindleMemberNSID: 28 - err = s.processMember(ctx, evt) 29 - case tangled.RepoNSID: 30 - err = s.processRepo(ctx, evt) 31 - case tangled.RepoCollaboratorNSID: 32 - err = s.processCollaborator(ctx, evt) 33 - case tangled.RepoPullNSID: 34 - err = s.processPull(ctx, evt) 35 - } 36 - case tap.EvtIdentity: 37 - // no-op 38 - } 39 - 40 - if err != nil { 41 - l.Error("failed to process message. will retry later", "event.ID", evt.ID, "err", err) 42 - return err 43 - } 44 - return nil 45 - } 46 - 47 - // NOTE: make sure to return nil if we don't need to retry (e.g. forbidden, unrelated) 48 - 49 - func (s *Spindle) processMember(ctx context.Context, evt tap.Event) error { 50 - l := s.l.With("component", "tapIndexer", "record", evt.Record.AtUri()) 51 - 52 - l.Info("processing spindle.member record") 53 - 54 - // only listen to members 55 - if ok, err := s.e.IsSpindleMemberInviteAllowed(evt.Record.Did, s.cfg.Server.Did()); !ok || err != nil { 56 - l.Warn("forbidden request: member invite not allowed", "did", evt.Record.Did, "error", err) 57 - return nil 58 - } 59 - 60 - switch evt.Record.Action { 61 - case tap.RecordCreateAction, tap.RecordUpdateAction: 62 - record := tangled.SpindleMember{} 63 - if err := json.Unmarshal(evt.Record.Record, &record); err != nil { 64 - return fmt.Errorf("parsing record: %w", err) 65 - } 66 - 67 - domain := s.cfg.Server.Hostname 68 - if record.Instance != domain { 69 - l.Info("domain mismatch", "domain", record.Instance, "expected", domain) 70 - return nil 71 - } 72 - 73 - created, err := time.Parse(record.CreatedAt, time.RFC3339) 74 - if err != nil { 75 - created = time.Now() 76 - } 77 - if err := db.AddSpindleMember(s.db, db.SpindleMember{ 78 - Did: evt.Record.Did, 79 - Rkey: evt.Record.Rkey.String(), 80 - Instance: record.Instance, 81 - Subject: syntax.DID(record.Subject), 82 - Created: created, 83 - }); err != nil { 84 - l.Error("failed to add member", "error", err) 85 - return fmt.Errorf("adding member to db: %w", err) 86 - } 87 - if err := s.e.AddSpindleMember(syntax.DID(record.Subject), s.cfg.Server.Did()); err != nil { 88 - return fmt.Errorf("adding member to rbac: %w", err) 89 - } 90 - if err := s.tap.AddRepos(ctx, []syntax.DID{syntax.DID(record.Subject)}); err != nil { 91 - return fmt.Errorf("adding did to tap: %w", err) 92 - } 93 - 94 - l.Info("added member", "member", record.Subject) 95 - return nil 96 - 97 - case tap.RecordDeleteAction: 98 - var ( 99 - did = evt.Record.Did.String() 100 - rkey = evt.Record.Rkey.String() 101 - ) 102 - member, err := db.GetSpindleMember(s.db, did, rkey) 103 - if err != nil { 104 - return fmt.Errorf("finding member: %w", err) 105 - } 106 - 107 - if err := db.RemoveSpindleMember(s.db, did, rkey); err != nil { 108 - return fmt.Errorf("removing member from db: %w", err) 109 - } 110 - if err := s.e.RemoveSpindleMember(member.Subject, s.cfg.Server.Did()); err != nil { 111 - return fmt.Errorf("removing member from rbac: %w", err) 112 - } 113 - if err := s.tapSafeRemoveDid(ctx, member.Subject); err != nil { 114 - return fmt.Errorf("removing did from tap: %w", err) 115 - } 116 - 117 - l.Info("removed member", "member", member.Subject) 118 - return nil 119 - } 120 - return nil 121 - } 122 - 123 - func (s *Spindle) processCollaborator(ctx context.Context, evt tap.Event) error { 124 - l := s.l.With("component", "tapIndexer", "record", evt.Record.AtUri()) 125 - 126 - l.Info("processing repo.collaborator record") 127 - 128 - // only listen to members 129 - if ok, err := s.e.IsSpindleMember(evt.Record.Did, s.cfg.Server.Did()); !ok || err != nil { 130 - l.Warn("forbidden request: not spindle member", "did", evt.Record.Did, "err", err) 131 - return nil 132 - } 133 - 134 - switch evt.Record.Action { 135 - case tap.RecordCreateAction, tap.RecordUpdateAction: 136 - record := tangled.RepoCollaborator{} 137 - if err := json.Unmarshal(evt.Record.Record, &record); err != nil { 138 - l.Error("invalid record", "err", err) 139 - return fmt.Errorf("parsing record: %w", err) 140 - } 141 - 142 - // retry later if target repo is not ingested yet 143 - if _, err := s.db.GetRepo(syntax.ATURI(record.Repo)); err != nil { 144 - l.Warn("target repo is not ingested yet", "repo", record.Repo, "err", err) 145 - return fmt.Errorf("target repo is unknown") 146 - } 147 - 148 - // check perms for this user 149 - if ok, err := s.e.IsRepoCollaboratorInviteAllowed(evt.Record.Did, syntax.ATURI(record.Repo)); !ok || err != nil { 150 - l.Warn("forbidden request collaborator invite not allowed", "did", evt.Record.Did, "err", err) 151 - return nil 152 - } 153 - 154 - if err := s.db.PutRepoCollaborator(&db.RepoCollaborator{ 155 - Did: evt.Record.Did, 156 - Rkey: evt.Record.Rkey, 157 - Repo: syntax.ATURI(record.Repo), 158 - Subject: syntax.DID(record.Subject), 159 - }); err != nil { 160 - return fmt.Errorf("adding collaborator to db: %w", err) 161 - } 162 - if err := s.e.AddRepoCollaborator(syntax.DID(record.Subject), syntax.ATURI(record.Repo)); err != nil { 163 - return fmt.Errorf("adding collaborator to rbac: %w", err) 164 - } 165 - if err := s.tap.AddRepos(ctx, []syntax.DID{syntax.DID(record.Subject)}); err != nil { 166 - return fmt.Errorf("adding did to tap: %w", err) 167 - } 168 - 169 - l.Info("add repo collaborator", "subejct", record.Subject, "repo", record.Repo) 170 - return nil 171 - 172 - case tap.RecordDeleteAction: 173 - // get existing collaborator 174 - collaborator, err := s.db.GetRepoCollaborator(evt.Record.Did, evt.Record.Rkey) 175 - if err != nil { 176 - return fmt.Errorf("failed to get existing collaborator info: %w", err) 177 - } 178 - 179 - // check perms for this user 180 - if ok, err := s.e.IsRepoCollaboratorInviteAllowed(evt.Record.Did, collaborator.Repo); !ok || err != nil { 181 - l.Warn("forbidden request collaborator invite not allowed", "did", evt.Record.Did, "err", err) 182 - return nil 183 - } 184 - 185 - if err := s.db.RemoveRepoCollaborator(collaborator.Subject, collaborator.Rkey); err != nil { 186 - return fmt.Errorf("removing collaborator from db: %w", err) 187 - } 188 - if err := s.e.RemoveRepoCollaborator(collaborator.Subject, collaborator.Repo); err != nil { 189 - return fmt.Errorf("removing collaborator from rbac: %w", err) 190 - } 191 - if err := s.tapSafeRemoveDid(ctx, collaborator.Subject); err != nil { 192 - return fmt.Errorf("removing did from tap: %w", err) 193 - } 194 - 195 - l.Info("removed repo collaborator", "subejct", collaborator.Subject, "repo", collaborator.Repo) 196 - return nil 197 - } 198 - return nil 199 - } 200 - 201 - func (s *Spindle) processRepo(ctx context.Context, evt tap.Event) error { 202 - l := s.l.With("component", "tapIndexer", "record", evt.Record.AtUri()) 203 - 204 - l.Info("processing repo record") 205 - 206 - // only listen to members 207 - if ok, err := s.e.IsSpindleMember(evt.Record.Did, s.cfg.Server.Did()); !ok || err != nil { 208 - l.Warn("forbidden request: not spindle member", "did", evt.Record.Did, "err", err) 209 - return nil 210 - } 211 - 212 - switch evt.Record.Action { 213 - case tap.RecordCreateAction, tap.RecordUpdateAction: 214 - record := tangled.Repo{} 215 - if err := json.Unmarshal(evt.Record.Record, &record); err != nil { 216 - return fmt.Errorf("parsing record: %w", err) 217 - } 218 - 219 - domain := s.cfg.Server.Hostname 220 - if record.Spindle == nil || *record.Spindle != domain { 221 - if record.Spindle == nil { 222 - l.Info("spindle isn't configured", "name", record.Name) 223 - } else { 224 - l.Info("different spindle configured", "name", record.Name, "spindle", *record.Spindle, "domain", domain) 225 - } 226 - if err := s.db.DeleteRepo(evt.Record.Did, evt.Record.Rkey); err != nil { 227 - return fmt.Errorf("deleting repo from db: %w", err) 228 - } 229 - return nil 230 - } 231 - 232 - repo := &db.Repo{ 233 - Did: evt.Record.Did, 234 - Rkey: evt.Record.Rkey, 235 - Name: record.Name, 236 - Knot: record.Knot, 237 - } 238 - 239 - if err := s.db.PutRepo(repo); err != nil { 240 - return fmt.Errorf("adding repo to db: %w", err) 241 - } 242 - 243 - if err := s.e.AddRepo(evt.Record.AtUri()); err != nil { 244 - return fmt.Errorf("adding repo to rbac") 245 - } 246 - 247 - // add this knot to the event consumer 248 - src := eventconsumer.NewKnotSource(record.Knot) 249 - s.ks.AddSource(context.Background(), src) 250 - 251 - // setup sparse sync 252 - repoCloneUri := s.newRepoCloneUrl(repo.Knot, repo.Did.String(), repo.Name) 253 - repoPath := s.newRepoPath(repo.Did, repo.Rkey) 254 - if err := git.SparseSyncGitRepo(ctx, repoCloneUri, repoPath, ""); err != nil { 255 - return fmt.Errorf("setting up sparse-clone git repo: %w", err) 256 - } 257 - 258 - l.Info("added repo", "repo", evt.Record.AtUri()) 259 - return nil 260 - 261 - case tap.RecordDeleteAction: 262 - // check perms for this user 263 - if ok, err := s.e.IsRepoOwner(evt.Record.Did, evt.Record.AtUri()); !ok || err != nil { 264 - l.Warn("forbidden request: not repo owner", "did", evt.Record.Did, "err", err) 265 - return nil 266 - } 267 - 268 - if err := s.db.DeleteRepo(evt.Record.Did, evt.Record.Rkey); err != nil { 269 - return fmt.Errorf("deleting repo from db: %w", err) 270 - } 271 - 272 - if err := s.e.DeleteRepo(evt.Record.AtUri()); err != nil { 273 - return fmt.Errorf("deleting repo from rbac: %w", err) 274 - } 275 - 276 - l.Info("deleted repo", "repo", evt.Record.AtUri()) 277 - return nil 278 - } 279 - return nil 280 - } 281 - 282 - func (s *Spindle) processPull(ctx context.Context, evt tap.Event) error { 283 - l := s.l.With("component", "tapIndexer", "record", evt.Record.AtUri()) 284 - 285 - l.Info("processing pull record") 286 - 287 - // only listen to live events 288 - if !evt.Record.Live { 289 - l.Info("skipping backfill event", "event", evt.Record.AtUri()) 290 - return nil 291 - } 292 - 293 - switch evt.Record.Action { 294 - case tap.RecordCreateAction, tap.RecordUpdateAction: 295 - record := tangled.RepoPull{} 296 - if err := json.Unmarshal(evt.Record.Record, &record); err != nil { 297 - l.Error("invalid record", "err", err) 298 - return fmt.Errorf("parsing record: %w", err) 299 - } 300 - 301 - // ignore legacy records 302 - if record.Target == nil { 303 - l.Info("ignoring pull record: target repo is nil") 304 - return nil 305 - } 306 - 307 - // ignore patch-based and fork-based PRs 308 - if record.Source == nil || record.Source.Repo != nil { 309 - l.Info("ignoring pull record: not a branch-based pull request") 310 - return nil 311 - } 312 - 313 - // skip if target repo is unknown 314 - repo, err := s.db.GetRepo(syntax.ATURI(record.Target.Repo)) 315 - if err != nil { 316 - l.Warn("target repo is not ingested yet", "repo", record.Target.Repo, "err", err) 317 - return fmt.Errorf("target repo is unknown") 318 - } 319 - 320 - compiler := workflow.Compiler{ 321 - Trigger: tangled.Pipeline_TriggerMetadata{ 322 - Kind: string(workflow.TriggerKindPullRequest), 323 - PullRequest: &tangled.Pipeline_PullRequestTriggerData{ 324 - Action: "create", 325 - SourceBranch: record.Source.Branch, 326 - SourceSha: record.Source.Sha, 327 - TargetBranch: record.Target.Branch, 328 - }, 329 - Repo: &tangled.Pipeline_TriggerRepo{ 330 - Did: repo.Did.String(), 331 - Knot: repo.Knot, 332 - Repo: repo.Name, 333 - }, 334 - }, 335 - } 336 - 337 - repoUri := s.newRepoCloneUrl(repo.Knot, repo.Did.String(), repo.Name) 338 - repoPath := s.newRepoPath(repo.Did, repo.Rkey) 339 - 340 - // load workflow definitions from rev (without spindle context) 341 - rawPipeline, err := s.loadPipeline(ctx, repoUri, repoPath, record.Source.Sha) 342 - if err != nil { 343 - // don't retry 344 - l.Error("failed loading pipeline", "err", err) 345 - return nil 346 - } 347 - if len(rawPipeline) == 0 { 348 - l.Info("no workflow definition find for the repo. skipping the event") 349 - return nil 350 - } 351 - tpl := compiler.Compile(compiler.Parse(rawPipeline)) 352 - // TODO: pass compile error to workflow log 353 - for _, w := range compiler.Diagnostics.Errors { 354 - l.Error(w.String()) 355 - } 356 - for _, w := range compiler.Diagnostics.Warnings { 357 - l.Warn(w.String()) 358 - } 359 - 360 - pipelineId := models.PipelineId{ 361 - Knot: tpl.TriggerMetadata.Repo.Knot, 362 - Rkey: tid.TID(), 363 - } 364 - if err := s.db.CreatePipelineEvent(pipelineId.Rkey, tpl, s.n); err != nil { 365 - l.Error("failed to create pipeline event", "err", err) 366 - return nil 367 - } 368 - err = s.processPipeline(ctx, tpl, pipelineId) 369 - if err != nil { 370 - // don't retry 371 - l.Error("failed processing pipeline", "err", err) 372 - return nil 373 - } 374 - case tap.RecordDeleteAction: 375 - // no-op 376 - } 377 - return nil 378 - } 379 - 380 - func (s *Spindle) tapSafeRemoveDid(ctx context.Context, did syntax.DID) error { 381 - known, err := s.db.IsKnownDid(syntax.DID(did)) 382 - if err != nil { 383 - return fmt.Errorf("ensuring did known state: %w", err) 384 - } 385 - if !known { 386 - if err := s.tap.RemoveRepos(ctx, []syntax.DID{did}); err != nil { 387 - return fmt.Errorf("removing did from tap: %w", err) 388 - } 389 - } 390 - return nil 391 - }
+2 -1
spindle/xrpc/add_secret.go
··· 11 11 "github.com/bluesky-social/indigo/xrpc" 12 12 securejoin "github.com/cyphar/filepath-securejoin" 13 13 "tangled.org/core/api/tangled" 14 + "tangled.org/core/rbac" 14 15 "tangled.org/core/spindle/secrets" 15 16 xrpcerr "tangled.org/core/xrpc/errors" 16 17 ) ··· 67 68 return 68 69 } 69 70 70 - if ok, err := x.Enforcer.IsRepoSettingsWriteAllowed(actorDid, repoAt); !ok || err != nil { 71 + if ok, err := x.Enforcer.IsSettingsAllowed(actorDid.String(), rbac.ThisServer, didPath); !ok || err != nil { 71 72 l.Error("insufficent permissions", "did", actorDid.String()) 72 73 writeError(w, xrpcerr.AccessControlError(actorDid.String()), http.StatusUnauthorized) 73 74 return
+2 -1
spindle/xrpc/list_secrets.go
··· 11 11 "github.com/bluesky-social/indigo/xrpc" 12 12 securejoin "github.com/cyphar/filepath-securejoin" 13 13 "tangled.org/core/api/tangled" 14 + "tangled.org/core/rbac" 14 15 "tangled.org/core/spindle/secrets" 15 16 xrpcerr "tangled.org/core/xrpc/errors" 16 17 ) ··· 62 63 return 63 64 } 64 65 65 - if ok, err := x.Enforcer.IsRepoSettingsWriteAllowed(actorDid, repoAt); !ok || err != nil { 66 + if ok, err := x.Enforcer.IsSettingsAllowed(actorDid.String(), rbac.ThisServer, didPath); !ok || err != nil { 66 67 l.Error("insufficent permissions", "did", actorDid.String()) 67 68 writeError(w, xrpcerr.AccessControlError(actorDid.String()), http.StatusUnauthorized) 68 69 return
+1 -1
spindle/xrpc/owner.go
··· 9 9 ) 10 10 11 11 func (x *Xrpc) Owner(w http.ResponseWriter, r *http.Request) { 12 - owner := x.Config.Server.Owner.String() 12 + owner := x.Config.Server.Owner 13 13 if owner == "" { 14 14 writeError(w, xrpcerr.OwnerNotFoundError, http.StatusInternalServerError) 15 15 return
+26 -1
spindle/xrpc/pipeline_cancelPipeline.go
··· 6 6 "net/http" 7 7 "strings" 8 8 9 + "github.com/bluesky-social/indigo/api/atproto" 9 10 "github.com/bluesky-social/indigo/atproto/syntax" 11 + "github.com/bluesky-social/indigo/xrpc" 12 + securejoin "github.com/cyphar/filepath-securejoin" 10 13 "tangled.org/core/api/tangled" 14 + "tangled.org/core/rbac" 11 15 "tangled.org/core/spindle/models" 12 16 xrpcerr "tangled.org/core/xrpc/errors" 13 17 ) ··· 49 53 return 50 54 } 51 55 52 - isRepoOwner, err := x.Enforcer.IsRepoOwner(actorDid, repoAt) 56 + ident, err := x.Resolver.ResolveIdent(r.Context(), repoAt.Authority().String()) 57 + if err != nil || ident.Handle.IsInvalidHandle() { 58 + fail(xrpcerr.GenericError(fmt.Errorf("failed to resolve handle: %w", err))) 59 + return 60 + } 61 + 62 + xrpcc := xrpc.Client{Host: ident.PDSEndpoint()} 63 + resp, err := atproto.RepoGetRecord(r.Context(), &xrpcc, "", tangled.RepoNSID, repoAt.Authority().String(), repoAt.RecordKey().String()) 64 + if err != nil { 65 + fail(xrpcerr.GenericError(err)) 66 + return 67 + } 68 + 69 + repo := resp.Value.Val.(*tangled.Repo) 70 + didSlashRepo, err := securejoin.SecureJoin(ident.DID.String(), repo.Name) 71 + if err != nil { 72 + fail(xrpcerr.GenericError(err)) 73 + return 74 + } 75 + 76 + // TODO: fine-grained role based control 77 + isRepoOwner, err := x.Enforcer.IsRepoOwner(actorDid.String(), rbac.ThisServer, didSlashRepo) 53 78 if err != nil || !isRepoOwner { 54 79 fail(xrpcerr.AccessControlError(actorDid.String())) 55 80 return
+2 -1
spindle/xrpc/remove_secret.go
··· 10 10 "github.com/bluesky-social/indigo/xrpc" 11 11 securejoin "github.com/cyphar/filepath-securejoin" 12 12 "tangled.org/core/api/tangled" 13 + "tangled.org/core/rbac" 13 14 "tangled.org/core/spindle/secrets" 14 15 xrpcerr "tangled.org/core/xrpc/errors" 15 16 ) ··· 61 62 return 62 63 } 63 64 64 - if ok, err := x.Enforcer.IsRepoSettingsWriteAllowed(actorDid, repoAt); !ok || err != nil { 65 + if ok, err := x.Enforcer.IsSettingsAllowed(actorDid.String(), rbac.ThisServer, didPath); !ok || err != nil { 65 66 l.Error("insufficent permissions", "did", actorDid.String()) 66 67 writeError(w, xrpcerr.AccessControlError(actorDid.String()), http.StatusUnauthorized) 67 68 return
+2 -2
spindle/xrpc/xrpc.go
··· 11 11 "tangled.org/core/api/tangled" 12 12 "tangled.org/core/idresolver" 13 13 "tangled.org/core/notifier" 14 - "tangled.org/core/rbac2" 14 + "tangled.org/core/rbac" 15 15 "tangled.org/core/spindle/config" 16 16 "tangled.org/core/spindle/db" 17 17 "tangled.org/core/spindle/models" ··· 25 25 type Xrpc struct { 26 26 Logger *slog.Logger 27 27 Db *db.DB 28 - Enforcer *rbac2.Enforcer 28 + Enforcer *rbac.Enforcer 29 29 Engines map[string]models.Engine 30 30 Config *config.Config 31 31 Resolver *idresolver.Resolver
-24
tap/simpleIndexer.go
··· 1 - package tap 2 - 3 - import "context" 4 - 5 - type SimpleIndexer struct { 6 - EventHandler func(ctx context.Context, evt Event) error 7 - ErrorHandler func(ctx context.Context, err error) 8 - } 9 - 10 - var _ Handler = (*SimpleIndexer)(nil) 11 - 12 - func (i *SimpleIndexer) OnEvent(ctx context.Context, evt Event) error { 13 - if i.EventHandler == nil { 14 - return nil 15 - } 16 - return i.EventHandler(ctx, evt) 17 - } 18 - 19 - func (i *SimpleIndexer) OnError(ctx context.Context, err error) { 20 - if i.ErrorHandler == nil { 21 - return 22 - } 23 - i.ErrorHandler(ctx, err) 24 - }
-169
tap/tap.go
··· 1 - /// heavily inspired by <https://github.com/bluesky-social/atproto/blob/c7f5a868837d3e9b3289f988fee2267789327b06/packages/tap/README.md> 2 - 3 - package tap 4 - 5 - import ( 6 - "bytes" 7 - "context" 8 - "encoding/json" 9 - "fmt" 10 - "net/http" 11 - "net/url" 12 - 13 - "github.com/bluesky-social/indigo/atproto/syntax" 14 - "github.com/gorilla/websocket" 15 - "tangled.org/core/log" 16 - ) 17 - 18 - // type WebsocketOptions struct { 19 - // maxReconnectSeconds int 20 - // heartbeatIntervalMs int 21 - // // onReconnectError 22 - // } 23 - 24 - type Handler interface { 25 - OnEvent(ctx context.Context, evt Event) error 26 - OnError(ctx context.Context, err error) 27 - } 28 - 29 - type Client struct { 30 - Url string 31 - AdminPassword string 32 - HTTPClient *http.Client 33 - } 34 - 35 - func NewClient(url, adminPassword string) Client { 36 - return Client{ 37 - Url: url, 38 - AdminPassword: adminPassword, 39 - HTTPClient: &http.Client{}, 40 - } 41 - } 42 - 43 - func (c *Client) AddRepos(ctx context.Context, dids []syntax.DID) error { 44 - body, err := json.Marshal(map[string][]syntax.DID{"dids": dids}) 45 - if err != nil { 46 - return err 47 - } 48 - req, err := http.NewRequestWithContext(ctx, "POST", c.Url+"/repos/add", bytes.NewReader(body)) 49 - if err != nil { 50 - return err 51 - } 52 - req.SetBasicAuth("admin", c.AdminPassword) 53 - req.Header.Set("Content-Type", "application/json") 54 - 55 - resp, err := c.HTTPClient.Do(req) 56 - if err != nil { 57 - return err 58 - } 59 - defer resp.Body.Close() 60 - if resp.StatusCode != http.StatusOK { 61 - return fmt.Errorf("tap: /repos/add failed with status %d", resp.StatusCode) 62 - } 63 - return nil 64 - } 65 - 66 - func (c *Client) RemoveRepos(ctx context.Context, dids []syntax.DID) error { 67 - body, err := json.Marshal(map[string][]syntax.DID{"dids": dids}) 68 - if err != nil { 69 - return err 70 - } 71 - req, err := http.NewRequestWithContext(ctx, "POST", c.Url+"/repos/remove", bytes.NewReader(body)) 72 - if err != nil { 73 - return err 74 - } 75 - req.SetBasicAuth("admin", c.AdminPassword) 76 - req.Header.Set("Content-Type", "application/json") 77 - 78 - resp, err := c.HTTPClient.Do(req) 79 - if err != nil { 80 - return err 81 - } 82 - defer resp.Body.Close() 83 - if resp.StatusCode != http.StatusOK { 84 - return fmt.Errorf("tap: /repos/remove failed with status %d", resp.StatusCode) 85 - } 86 - return nil 87 - } 88 - 89 - func (c *Client) Connect(ctx context.Context, handler Handler) error { 90 - l := log.FromContext(ctx) 91 - 92 - u, err := url.Parse(c.Url) 93 - if err != nil { 94 - return err 95 - } 96 - if u.Scheme == "https" { 97 - u.Scheme = "wss" 98 - } else { 99 - u.Scheme = "ws" 100 - } 101 - u.Path = "/channel" 102 - 103 - // TODO: set auth on dial 104 - 105 - url := u.String() 106 - 107 - // var backoff int 108 - // for { 109 - // select { 110 - // case <-ctx.Done(): 111 - // return ctx.Err() 112 - // default: 113 - // } 114 - // 115 - // header := http.Header{ 116 - // "Authorization": []string{""}, 117 - // } 118 - // conn, res, err := websocket.DefaultDialer.DialContext(ctx, url, header) 119 - // if err != nil { 120 - // l.Warn("dialing failed", "url", url, "err", err, "backoff", backoff) 121 - // time.Sleep(time.Duration(5+backoff) * time.Second) 122 - // backoff++ 123 - // 124 - // continue 125 - // } else { 126 - // backoff = 0 127 - // } 128 - // 129 - // l.Info("event subscription response", "code", res.StatusCode) 130 - // } 131 - 132 - // TODO: keep websocket connection alive 133 - conn, _, err := websocket.DefaultDialer.DialContext(ctx, url, nil) 134 - if err != nil { 135 - return err 136 - } 137 - defer conn.Close() 138 - 139 - for { 140 - select { 141 - case <-ctx.Done(): 142 - return ctx.Err() 143 - default: 144 - } 145 - _, message, err := conn.ReadMessage() 146 - if err != nil { 147 - return err 148 - } 149 - 150 - var ev Event 151 - if err := json.Unmarshal(message, &ev); err != nil { 152 - handler.OnError(ctx, fmt.Errorf("failed to parse message: %w", err)) 153 - continue 154 - } 155 - if err := handler.OnEvent(ctx, ev); err != nil { 156 - handler.OnError(ctx, fmt.Errorf("failed to process event %d: %w", ev.ID, err)) 157 - continue 158 - } 159 - 160 - ack := map[string]any{ 161 - "type": "ack", 162 - "id": ev.ID, 163 - } 164 - if err := conn.WriteJSON(ack); err != nil { 165 - l.Warn("failed to send ack", "err", err) 166 - continue 167 - } 168 - } 169 - }
-62
tap/types.go
··· 1 - package tap 2 - 3 - import ( 4 - "encoding/json" 5 - "fmt" 6 - 7 - "github.com/bluesky-social/indigo/atproto/syntax" 8 - ) 9 - 10 - type EventType string 11 - 12 - const ( 13 - EvtRecord EventType = "record" 14 - EvtIdentity EventType = "identity" 15 - ) 16 - 17 - type Event struct { 18 - ID int64 `json:"id"` 19 - Type EventType `json:"type"` 20 - Record *RecordEventData `json:"record,omitempty"` 21 - Identity *IdentityEventData `json:"identity,omitempty"` 22 - } 23 - 24 - type RecordEventData struct { 25 - Live bool `json:"live"` 26 - Did syntax.DID `json:"did"` 27 - Rev string `json:"rev"` 28 - Collection syntax.NSID `json:"collection"` 29 - Rkey syntax.RecordKey `json:"rkey"` 30 - Action RecordAction `json:"action"` 31 - Record json.RawMessage `json:"record,omitempty"` 32 - CID *syntax.CID `json:"cid,omitempty"` 33 - } 34 - 35 - func (r *RecordEventData) AtUri() syntax.ATURI { 36 - return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", r.Did, r.Collection, r.Rkey)) 37 - } 38 - 39 - type RecordAction string 40 - 41 - const ( 42 - RecordCreateAction RecordAction = "create" 43 - RecordUpdateAction RecordAction = "update" 44 - RecordDeleteAction RecordAction = "delete" 45 - ) 46 - 47 - type IdentityEventData struct { 48 - DID syntax.DID `json:"did"` 49 - Handle string `json:"handle"` 50 - IsActive bool `json:"is_active"` 51 - Status RepoStatus `json:"status"` 52 - } 53 - 54 - type RepoStatus string 55 - 56 - const ( 57 - RepoStatusActive RepoStatus = "active" 58 - RepoStatusTakendown RepoStatus = "takendown" 59 - RepoStatusSuspended RepoStatus = "suspended" 60 - RepoStatusDeactivated RepoStatus = "deactivated" 61 - RepoStatusDeleted RepoStatus = "deleted" 62 - )
+4 -10
types/repo.go
··· 94 94 Tags []*TagReference `json:"tags,omitempty"` 95 95 } 96 96 97 + type RepoTagResponse struct { 98 + Tag *TagReference `json:"tag,omitempty"` 99 + } 100 + 97 101 type RepoBranchesResponse struct { 98 102 Branches []Branch `json:"branches,omitempty"` 99 103 } ··· 104 108 105 109 type RepoDefaultBranchResponse struct { 106 110 Branch string `json:"branch,omitempty"` 107 - } 108 - 109 - type RepoBlobResponse struct { 110 - Contents string `json:"contents,omitempty"` 111 - Ref string `json:"ref,omitempty"` 112 - Path string `json:"path,omitempty"` 113 - IsBinary bool `json:"is_binary,omitempty"` 114 - 115 - Lines int `json:"lines,omitempty"` 116 - SizeHint uint64 `json:"size_hint,omitempty"` 117 111 } 118 112 119 113 type ForkStatus int
+5
types/tree.go
··· 105 105 Hash plumbing.Hash 106 106 Message string 107 107 When time.Time 108 + Author struct { 109 + Email string 110 + Name string 111 + When time.Time 112 + } 108 113 }
+19
xrpc/blob.go
··· 1 + package xrpc 2 + 3 + import ( 4 + "context" 5 + "io" 6 + 7 + comatproto "github.com/bluesky-social/indigo/api/atproto" 8 + "github.com/bluesky-social/indigo/lex/util" 9 + ) 10 + 11 + // RepoUploadBlob calls the XRPC method "com.atproto.repo.uploadBlob". 12 + func RepoUploadBlob(ctx context.Context, c util.LexClient, input io.Reader, contentType string) (*comatproto.RepoUploadBlob_Output, error) { 13 + var out comatproto.RepoUploadBlob_Output 14 + if err := c.LexDo(ctx, util.Procedure, contentType, "com.atproto.repo.uploadBlob", nil, input, &out); err != nil { 15 + return nil, err 16 + } 17 + 18 + return &out, nil 19 + }