+51
.dockerignore
+51
.dockerignore
···
···
1
+
# Environment files
2
+
*.env*
3
+
!.env.example
4
+
5
+
# Build artifacts
6
+
**/dist/
7
+
**/.vite/
8
+
**/build/
9
+
**/out/
10
+
11
+
# Dependencies
12
+
**/node_modules/
13
+
14
+
# Database files
15
+
**/*.db
16
+
**/*.db-shm
17
+
**/*.db-wal
18
+
19
+
# Git
20
+
.git/
21
+
.gitignore
22
+
23
+
# IDE
24
+
.vscode/
25
+
.idea/
26
+
*.swp
27
+
*.swo
28
+
*~
29
+
30
+
# OS
31
+
.DS_Store
32
+
Thumbs.db
33
+
34
+
# Logs
35
+
*.log
36
+
npm-debug.log*
37
+
yarn-debug.log*
38
+
yarn-error.log*
39
+
40
+
# Testing
41
+
**/coverage/
42
+
**/.nyc_output/
43
+
44
+
# Temporary files
45
+
**/.tmp/
46
+
**/tmp/
47
+
48
+
# Docker
49
+
Dockerfile*
50
+
docker-compose*
51
+
.dockerignore
+23
api/.sqlx/query-0b88e356f9b4ada616b1398baa792aa2012c613ac39527af939a9fed1999cf91.json
+23
api/.sqlx/query-0b88e356f9b4ada616b1398baa792aa2012c613ac39527af939a9fed1999cf91.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT job_id\n FROM job_results\n WHERE user_did = $1\n AND slice_uri = $2\n AND status = 'running'\n AND created_at > NOW() - INTERVAL '10 minutes'\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "job_id",
9
+
"type_info": "Uuid"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Text",
15
+
"Text"
16
+
]
17
+
},
18
+
"nullable": [
19
+
false
20
+
]
21
+
},
22
+
"hash": "0b88e356f9b4ada616b1398baa792aa2012c613ac39527af939a9fed1999cf91"
23
+
}
+22
api/.sqlx/query-2515ba15c6154c0ebdd44e6580bcf642b6a6ea6a31486db7e64f379e6d0312cb.json
+22
api/.sqlx/query-2515ba15c6154c0ebdd44e6580bcf642b6a6ea6a31486db7e64f379e6d0312cb.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT uri\n FROM record\n WHERE collection = 'network.slices.slice'\n AND uri LIKE $1\n LIMIT 1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "uri",
9
+
"type_info": "Text"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Text"
15
+
]
16
+
},
17
+
"nullable": [
18
+
false
19
+
]
20
+
},
21
+
"hash": "2515ba15c6154c0ebdd44e6580bcf642b6a6ea6a31486db7e64f379e6d0312cb"
22
+
}
+14
api/.sqlx/query-2e8604e20759bc693d7197d56aa63bd4ec88c627e137338cc00493acad0f4bb5.json
+14
api/.sqlx/query-2e8604e20759bc693d7197d56aa63bd4ec88c627e137338cc00493acad0f4bb5.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n DELETE FROM mq_msgs\n WHERE id = (\n SELECT m.id\n FROM mq_msgs m\n JOIN mq_payloads p ON m.id = p.id\n WHERE m.channel_name = 'sync_queue'\n AND p.payload_json->>'job_id' = $1\n LIMIT 1\n )\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Text"
9
+
]
10
+
},
11
+
"nullable": []
12
+
},
13
+
"hash": "2e8604e20759bc693d7197d56aa63bd4ec88c627e137338cc00493acad0f4bb5"
14
+
}
+23
api/.sqlx/query-34772b1ea9bbb544f35debd0f4322b8c5fa9b36e89d8037dabea926b41295e1c.json
+23
api/.sqlx/query-34772b1ea9bbb544f35debd0f4322b8c5fa9b36e89d8037dabea926b41295e1c.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT uri\n FROM record\n WHERE collection = 'network.slices.slice'\n AND json->>'actorHandle' = $1\n AND uri LIKE '%' || $2 || '%'\n LIMIT 1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "uri",
9
+
"type_info": "Text"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Text",
15
+
"Text"
16
+
]
17
+
},
18
+
"nullable": [
19
+
false
20
+
]
21
+
},
22
+
"hash": "34772b1ea9bbb544f35debd0f4322b8c5fa9b36e89d8037dabea926b41295e1c"
23
+
}
+22
api/.sqlx/query-3f2817c58f926dd5988eea516a827bae626cec08106fc8863fa29c894ca97ad5.json
+22
api/.sqlx/query-3f2817c58f926dd5988eea516a827bae626cec08106fc8863fa29c894ca97ad5.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n DELETE FROM job_results\n WHERE job_id = $1\n RETURNING job_id\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "job_id",
9
+
"type_info": "Uuid"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Uuid"
15
+
]
16
+
},
17
+
"nullable": [
18
+
false
19
+
]
20
+
},
21
+
"hash": "3f2817c58f926dd5988eea516a827bae626cec08106fc8863fa29c894ca97ad5"
22
+
}
-23
api/.sqlx/query-476a222abf10eedfe5cebebf21900bc3bbee11e855aa37f6c1ccc7d9bce5f87a.json
-23
api/.sqlx/query-476a222abf10eedfe5cebebf21900bc3bbee11e855aa37f6c1ccc7d9bce5f87a.json
···
1
-
{
2
-
"db_name": "PostgreSQL",
3
-
"query": "\n INSERT INTO job_results (\n job_id, user_did, slice_uri, status, success, total_records,\n collections_synced, repos_processed, message, error_message\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)\n ON CONFLICT (job_id)\n DO UPDATE SET\n status = EXCLUDED.status,\n success = EXCLUDED.success,\n total_records = EXCLUDED.total_records,\n collections_synced = EXCLUDED.collections_synced,\n repos_processed = EXCLUDED.repos_processed,\n message = EXCLUDED.message,\n error_message = EXCLUDED.error_message,\n completed_at = NOW()\n ",
4
-
"describe": {
5
-
"columns": [],
6
-
"parameters": {
7
-
"Left": [
8
-
"Uuid",
9
-
"Text",
10
-
"Text",
11
-
"Text",
12
-
"Bool",
13
-
"Int8",
14
-
"Jsonb",
15
-
"Int8",
16
-
"Text",
17
-
"Text"
18
-
]
19
-
},
20
-
"nullable": []
21
-
},
22
-
"hash": "476a222abf10eedfe5cebebf21900bc3bbee11e855aa37f6c1ccc7d9bce5f87a"
23
-
}
···
+22
api/.sqlx/query-4d9e6d902bf75d05fb516b6f0e47ad9de6ec13cd621dc2270c64a402616f9afa.json
+22
api/.sqlx/query-4d9e6d902bf75d05fb516b6f0e47ad9de6ec13cd621dc2270c64a402616f9afa.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT status\n FROM job_results\n WHERE job_id = $1\n AND status = 'cancelled'\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "status",
9
+
"type_info": "Text"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Uuid"
15
+
]
16
+
},
17
+
"nullable": [
18
+
false
19
+
]
20
+
},
21
+
"hash": "4d9e6d902bf75d05fb516b6f0e47ad9de6ec13cd621dc2270c64a402616f9afa"
22
+
}
+17
api/.sqlx/query-6640b5320abf0a32dde919c3eea5e64ccebc503b2b58269d481c0c87e5a4f239.json
+17
api/.sqlx/query-6640b5320abf0a32dde919c3eea5e64ccebc503b2b58269d481c0c87e5a4f239.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n INSERT INTO job_results (job_id, user_did, slice_uri, status, success, message, created_at, completed_at)\n VALUES ($1, $2, $3, 'cancelled', false, 'Job cancelled by user', $4, $4)\n ON CONFLICT (job_id)\n DO UPDATE SET\n status = 'cancelled',\n message = 'Job cancelled by user',\n completed_at = $4\n WHERE job_results.status NOT IN ('completed', 'failed', 'cancelled')\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid",
9
+
"Text",
10
+
"Text",
11
+
"Timestamptz"
12
+
]
13
+
},
14
+
"nullable": []
15
+
},
16
+
"hash": "6640b5320abf0a32dde919c3eea5e64ccebc503b2b58269d481c0c87e5a4f239"
17
+
}
+22
api/.sqlx/query-7122f93bb3a6a95dce7e4f772e452ffb30fea975bcc7c18ce28e2a9da4cfcbf7.json
+22
api/.sqlx/query-7122f93bb3a6a95dce7e4f772e452ffb30fea975bcc7c18ce28e2a9da4cfcbf7.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT p.payload_json\n FROM mq_msgs m\n JOIN mq_payloads p ON m.id = p.id\n WHERE m.channel_name = 'sync_queue'\n AND p.payload_json->>'job_id' = $1\n LIMIT 1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "payload_json",
9
+
"type_info": "Jsonb"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Text"
15
+
]
16
+
},
17
+
"nullable": [
18
+
true
19
+
]
20
+
},
21
+
"hash": "7122f93bb3a6a95dce7e4f772e452ffb30fea975bcc7c18ce28e2a9da4cfcbf7"
22
+
}
+22
api/.sqlx/query-9a01a39a3f03c25fbcc9b30a5920776ca50858c3ef21f8c8f32d978e2c4abe28.json
+22
api/.sqlx/query-9a01a39a3f03c25fbcc9b30a5920776ca50858c3ef21f8c8f32d978e2c4abe28.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT DISTINCT did\n FROM actor\n WHERE handle = $1\n LIMIT 1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "did",
9
+
"type_info": "Text"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Text"
15
+
]
16
+
},
17
+
"nullable": [
18
+
false
19
+
]
20
+
},
21
+
"hash": "9a01a39a3f03c25fbcc9b30a5920776ca50858c3ef21f8c8f32d978e2c4abe28"
22
+
}
+23
api/.sqlx/query-ac23bb6cab6a16529af787c4592f0f83497f1f1be80674f732af9e0aef09c796.json
+23
api/.sqlx/query-ac23bb6cab6a16529af787c4592f0f83497f1f1be80674f732af9e0aef09c796.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n UPDATE job_results\n SET status = 'cancelled',\n message = 'Job cancelled by user',\n completed_at = $2\n WHERE job_id = $1\n AND status NOT IN ('completed', 'failed', 'cancelled')\n RETURNING job_id\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "job_id",
9
+
"type_info": "Uuid"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Uuid",
15
+
"Timestamptz"
16
+
]
17
+
},
18
+
"nullable": [
19
+
false
20
+
]
21
+
},
22
+
"hash": "ac23bb6cab6a16529af787c4592f0f83497f1f1be80674f732af9e0aef09c796"
23
+
}
+22
api/.sqlx/query-d24f975bc6d56b68b52d0a0cdd9c29ebc6fd519651354701e4b223718734bec6.json
+22
api/.sqlx/query-d24f975bc6d56b68b52d0a0cdd9c29ebc6fd519651354701e4b223718734bec6.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n SELECT status\n FROM job_results\n WHERE job_id = $1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "status",
9
+
"type_info": "Text"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Uuid"
15
+
]
16
+
},
17
+
"nullable": [
18
+
false
19
+
]
20
+
},
21
+
"hash": "d24f975bc6d56b68b52d0a0cdd9c29ebc6fd519651354701e4b223718734bec6"
22
+
}
+17
api/.sqlx/query-e4cc601916507937a0971d62e90c621b9effeb4d0e783f66ffcc365f5af7f604.json
+17
api/.sqlx/query-e4cc601916507937a0971d62e90c621b9effeb4d0e783f66ffcc365f5af7f604.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n INSERT INTO job_results (job_id, user_did, slice_uri, status, success, message, created_at)\n VALUES ($1, $2, $3, 'running', false, 'Job is running...', $4)\n ON CONFLICT (job_id) DO NOTHING\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid",
9
+
"Text",
10
+
"Text",
11
+
"Timestamptz"
12
+
]
13
+
},
14
+
"nullable": []
15
+
},
16
+
"hash": "e4cc601916507937a0971d62e90c621b9effeb4d0e783f66ffcc365f5af7f604"
17
+
}
+23
api/.sqlx/query-e89ac05d0570e29a0c546f0e463d2deffd4db29c4bb05f001e9b7740efca5caf.json
+23
api/.sqlx/query-e89ac05d0570e29a0c546f0e463d2deffd4db29c4bb05f001e9b7740efca5caf.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n INSERT INTO job_results (\n job_id, user_did, slice_uri, status, success, total_records,\n collections_synced, repos_processed, message, error_message\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)\n ON CONFLICT (job_id)\n DO UPDATE SET\n status = EXCLUDED.status,\n success = EXCLUDED.success,\n total_records = EXCLUDED.total_records,\n collections_synced = EXCLUDED.collections_synced,\n repos_processed = EXCLUDED.repos_processed,\n message = EXCLUDED.message,\n error_message = EXCLUDED.error_message,\n completed_at = NOW()\n WHERE job_results.status != 'cancelled'\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid",
9
+
"Text",
10
+
"Text",
11
+
"Text",
12
+
"Bool",
13
+
"Int8",
14
+
"Jsonb",
15
+
"Int8",
16
+
"Text",
17
+
"Text"
18
+
]
19
+
},
20
+
"nullable": []
21
+
},
22
+
"hash": "e89ac05d0570e29a0c546f0e463d2deffd4db29c4bb05f001e9b7740efca5caf"
23
+
}
+95
api/.sqlx/query-f7d276cab8741971c2f00946867f4ed67fdca9fe35843da890a5273ff3c0d872.json
+95
api/.sqlx/query-f7d276cab8741971c2f00946867f4ed67fdca9fe35843da890a5273ff3c0d872.json
···
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n -- Completed jobs from job_results\n SELECT\n job_id, user_did, slice_uri, status, success, total_records,\n collections_synced, repos_processed, message, error_message,\n created_at, completed_at,\n 'completed' as job_type\n FROM job_results\n WHERE slice_uri LIKE $1\n\n UNION ALL\n\n -- Pending jobs from message queue\n SELECT\n (p.payload_json->>'job_id')::uuid as job_id,\n p.payload_json->>'user_did' as user_did,\n p.payload_json->>'slice_uri' as slice_uri,\n 'running' as status,\n NULL::boolean as success,\n NULL::bigint as total_records,\n '[]'::jsonb as collections_synced,\n NULL::bigint as repos_processed,\n 'Job in progress...' as message,\n NULL::text as error_message,\n m.created_at,\n NULL::timestamptz as completed_at,\n 'pending' as job_type\n FROM mq_msgs m\n JOIN mq_payloads p ON m.id = p.id\n WHERE m.channel_name = 'sync_queue'\n AND m.id != '00000000-0000-0000-0000-000000000000'\n AND p.payload_json->>'slice_uri' LIKE $1\n AND NOT EXISTS (\n SELECT 1 FROM job_results jr\n WHERE jr.job_id = (p.payload_json->>'job_id')::uuid\n )\n\n ORDER BY created_at DESC\n LIMIT $2\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "job_id",
9
+
"type_info": "Uuid"
10
+
},
11
+
{
12
+
"ordinal": 1,
13
+
"name": "user_did",
14
+
"type_info": "Text"
15
+
},
16
+
{
17
+
"ordinal": 2,
18
+
"name": "slice_uri",
19
+
"type_info": "Text"
20
+
},
21
+
{
22
+
"ordinal": 3,
23
+
"name": "status",
24
+
"type_info": "Text"
25
+
},
26
+
{
27
+
"ordinal": 4,
28
+
"name": "success",
29
+
"type_info": "Bool"
30
+
},
31
+
{
32
+
"ordinal": 5,
33
+
"name": "total_records",
34
+
"type_info": "Int8"
35
+
},
36
+
{
37
+
"ordinal": 6,
38
+
"name": "collections_synced",
39
+
"type_info": "Jsonb"
40
+
},
41
+
{
42
+
"ordinal": 7,
43
+
"name": "repos_processed",
44
+
"type_info": "Int8"
45
+
},
46
+
{
47
+
"ordinal": 8,
48
+
"name": "message",
49
+
"type_info": "Text"
50
+
},
51
+
{
52
+
"ordinal": 9,
53
+
"name": "error_message",
54
+
"type_info": "Text"
55
+
},
56
+
{
57
+
"ordinal": 10,
58
+
"name": "created_at",
59
+
"type_info": "Timestamptz"
60
+
},
61
+
{
62
+
"ordinal": 11,
63
+
"name": "completed_at",
64
+
"type_info": "Timestamptz"
65
+
},
66
+
{
67
+
"ordinal": 12,
68
+
"name": "job_type",
69
+
"type_info": "Text"
70
+
}
71
+
],
72
+
"parameters": {
73
+
"Left": [
74
+
"Text",
75
+
"Int8"
76
+
]
77
+
},
78
+
"nullable": [
79
+
null,
80
+
null,
81
+
null,
82
+
null,
83
+
null,
84
+
null,
85
+
null,
86
+
null,
87
+
null,
88
+
null,
89
+
null,
90
+
null,
91
+
null
92
+
]
93
+
},
94
+
"hash": "f7d276cab8741971c2f00946867f4ed67fdca9fe35843da890a5273ff3c0d872"
95
+
}
+2
-2
api/Cargo.lock
+2
-2
api/Cargo.lock
+3
-3
api/Cargo.toml
+3
-3
api/Cargo.toml
···
1
[package]
2
name = "slices"
3
-
version = "0.1.0"
4
edition = "2024"
5
6
[dependencies]
···
63
sqlxmq = "0.6"
64
regex = "1.11.2"
65
66
-
# Redis for caching
67
-
redis = { version = "0.32", features = ["tokio-comp", "connection-manager"] }
68
69
# GraphQL server
70
async-graphql = { version = "7.0", features = ["dynamic-schema", "dataloader"] }
···
1
[package]
2
name = "slices"
3
+
version = "0.2.0"
4
edition = "2024"
5
6
[dependencies]
···
63
sqlxmq = "0.6"
64
regex = "1.11.2"
65
66
+
# Redis for caching and pub/sub
67
+
redis = { version = "0.32", features = ["tokio-comp", "connection-manager", "aio"] }
68
69
# GraphQL server
70
async-graphql = { version = "7.0", features = ["dynamic-schema", "dataloader"] }
+2
-2
api/flake.nix
+2
-2
api/flake.nix
···
42
43
commonArgs = {
44
inherit src;
45
-
version = "0.1.0";
46
strictDeps = true;
47
pname = "slices";
48
name = "slices";
···
111
ociLabels = {
112
"org.opencontainers.image.title" = "slices";
113
"org.opencontainers.image.description" = "API service for Slices";
114
-
"org.opencontainers.image.version" = "0.1.0";
115
"org.opencontainers.image.authors" = "Slices Social";
116
"org.opencontainers.image.licenses" = "MIT";
117
};
···
42
43
commonArgs = {
44
inherit src;
45
+
version = "0.2.0";
46
strictDeps = true;
47
pname = "slices";
48
name = "slices";
···
111
ociLabels = {
112
"org.opencontainers.image.title" = "slices";
113
"org.opencontainers.image.description" = "API service for Slices";
114
+
"org.opencontainers.image.version" = "0.2.0";
115
"org.opencontainers.image.authors" = "Slices Social";
116
"org.opencontainers.image.licenses" = "MIT";
117
};
+502
-26
api/src/graphql/schema_builder.rs
+502
-26
api/src/graphql/schema_builder.rs
···
21
add_cancel_job_mutation, add_create_oauth_client_mutation, add_delete_job_mutation,
22
add_delete_oauth_client_mutation, add_delete_slice_records_mutation, add_get_sync_summary_query,
23
add_jetstream_logs_query, add_jetstream_logs_subscription, add_oauth_clients_query,
24
-
add_slice_records_query, add_sparklines_query, add_sparklines_field_to_slice,
25
-
add_start_sync_mutation, add_stats_field_to_slice, add_sync_job_logs_query, add_sync_job_query,
26
-
add_sync_job_subscription, add_sync_jobs_query, add_update_oauth_client_mutation,
27
-
add_upload_blob_mutation, create_blob_upload_response_type, create_collection_stats_type,
28
-
create_collection_summary_type, create_delete_slice_records_output_type,
29
-
create_jetstream_log_entry_type, create_oauth_client_type, create_slice_record_type,
30
-
create_slice_record_edge_type, create_slice_records_connection_type,
31
-
create_slice_records_where_input, create_slice_sparkline_type, create_slice_stats_type,
32
-
create_sparkline_point_type, create_start_sync_output_type, create_sync_job_result_type,
33
-
create_sync_job_type, create_sync_summary_type,
34
};
35
use crate::graphql::types::{extract_collection_fields, extract_record_key, GraphQLField, GraphQLType};
36
use crate::graphql::PUBSUB;
···
44
at_uri_fields: Vec<String>, // Fields with format "at-uri" for reverse joins
45
}
46
47
/// Builds a dynamic GraphQL schema from lexicons for a given slice
48
pub async fn build_graphql_schema(database: Database, slice_uri: String, auth_base_url: String) -> Result<Schema, String> {
49
// Fetch all lexicons for this slice
···
109
}
110
}
111
112
// Second pass: create types and queries
113
for lexicon in &lexicons {
114
// get_lexicons_by_slice returns {lexicon: 1, id: "nsid", defs: {...}}
···
134
database.clone(),
135
slice_uri.clone(),
136
&all_collections,
137
);
138
139
// Create edge and connection types for this collection (Relay standard)
···
1049
schema_builder = schema_builder.register(mutation_input);
1050
}
1051
1052
schema_builder
1053
.finish()
1054
.map_err(|e| format!("Schema build error: {:?}", e))
···
1062
1063
/// Container to hold blob data and DID for URL generation
1064
#[derive(Clone)]
1065
-
struct BlobContainer {
1066
-
blob_ref: String, // CID reference
1067
-
mime_type: String, // MIME type
1068
-
size: i64, // Size in bytes
1069
-
did: String, // DID for CDN URL generation
1070
}
1071
1072
/// Creates a GraphQL Object type for a record collection
···
1076
database: Database,
1077
slice_uri: String,
1078
all_collections: &[CollectionMeta],
1079
) -> Object {
1080
let mut object = Object::new(type_name);
1081
···
1219
let field_type = field.field_type.clone();
1220
let db_clone = database.clone();
1221
1222
-
let type_ref = graphql_type_to_typeref(&field.field_type, field.is_required);
1223
1224
object = object.field(Field::new(&field_name_for_field, type_ref, move |ctx| {
1225
let field_name = field_name.clone();
···
1343
}
1344
}
1345
1346
-
// For non-ref fields, return the raw JSON value
1347
let graphql_val = json_to_graphql_value(val);
1348
Ok(Some(FieldValue::value(graphql_val)))
1349
} else {
···
1834
));
1835
}
1836
1837
-
// Add sparklines and stats fields for NetworkSlicesSlice type
1838
if type_name == "NetworkSlicesSlice" {
1839
object = add_sparklines_field_to_slice(object, database.clone());
1840
object = add_stats_field_to_slice(object, database.clone());
1841
}
1842
1843
object
···
1910
// Always nullable since blob data might be missing or malformed
1911
TypeRef::named("Blob")
1912
}
1913
-
GraphQLType::Json | GraphQLType::Ref | GraphQLType::Object(_) | GraphQLType::Union => {
1914
-
// JSON scalar type - linked records and complex objects return as JSON
1915
if is_required {
1916
TypeRef::named_nn("JSON")
1917
} else {
···
2500
let type_name = nsid_to_type_name(nsid);
2501
2502
// Add create mutation
2503
-
mutation = add_create_mutation(mutation, &type_name, nsid, database.clone(), slice_uri.clone());
2504
2505
// Add update mutation
2506
-
mutation = add_update_mutation(mutation, &type_name, nsid, database.clone(), slice_uri.clone());
2507
2508
// Add delete mutation
2509
mutation = add_delete_mutation(mutation, &type_name, nsid, database.clone(), slice_uri.clone());
···
3153
input
3154
}
3155
3156
/// Adds a create mutation for a collection
3157
fn add_create_mutation(
3158
mutation: Object,
3159
type_name: &str,
3160
nsid: &str,
3161
database: Database,
3162
slice_uri: String,
3163
) -> Object {
3164
let mutation_name = format!("create{}", type_name);
3165
let nsid = nsid.to_string();
3166
let nsid_clone = nsid.clone();
3167
3168
mutation.field(
3169
Field::new(
···
3173
let db = database.clone();
3174
let slice = slice_uri.clone();
3175
let collection = nsid.clone();
3176
3177
FieldFuture::new(async move {
3178
// Get GraphQL context which contains auth info
···
3188
.ok_or_else(|| Error::new("Missing input argument"))?;
3189
3190
// Convert GraphQL value to JSON using deserialize
3191
-
let record_data: serde_json::Value = input.deserialize()
3192
.map_err(|e| Error::new(format!("Failed to deserialize input: {:?}", e)))?;
3193
3194
// Optional rkey argument
3195
let rkey = ctx.args.get("rkey")
···
3262
let (uri, cid) = match result {
3263
atproto_client::com::atproto::repo::CreateRecordResponse::StrongRef { uri, cid, .. } => (uri, cid),
3264
atproto_client::com::atproto::repo::CreateRecordResponse::Error(e) => {
3265
-
return Err(Error::new(format!("AT Protocol error: {:?}", e)));
3266
}
3267
};
3268
···
3318
mutation: Object,
3319
type_name: &str,
3320
nsid: &str,
3321
database: Database,
3322
slice_uri: String,
3323
) -> Object {
3324
let mutation_name = format!("update{}", type_name);
3325
let nsid = nsid.to_string();
3326
let nsid_clone = nsid.clone();
3327
3328
mutation.field(
3329
Field::new(
···
3333
let db = database.clone();
3334
let slice = slice_uri.clone();
3335
let collection = nsid.clone();
3336
3337
FieldFuture::new(async move {
3338
// Get GraphQL context which contains auth info
···
3354
.ok_or_else(|| Error::new("Missing input argument"))?;
3355
3356
// Convert GraphQL value to JSON using deserialize
3357
-
let record_data: serde_json::Value = input.deserialize()
3358
.map_err(|e| Error::new(format!("Failed to deserialize input: {:?}", e)))?;
3359
3360
// Verify OAuth token and get user info
3361
let user_info = crate::auth::verify_oauth_token_cached(
···
3423
let (uri, cid) = match result {
3424
atproto_client::com::atproto::repo::PutRecordResponse::StrongRef { uri, cid, .. } => (uri, cid),
3425
atproto_client::com::atproto::repo::PutRecordResponse::Error(e) => {
3426
-
return Err(Error::new(format!("AT Protocol error: {:?}", e)));
3427
}
3428
};
3429
···
21
add_cancel_job_mutation, add_create_oauth_client_mutation, add_delete_job_mutation,
22
add_delete_oauth_client_mutation, add_delete_slice_records_mutation, add_get_sync_summary_query,
23
add_jetstream_logs_query, add_jetstream_logs_subscription, add_oauth_clients_query,
24
+
add_oauth_clients_field_to_slice, add_slice_records_query, add_sparklines_query,
25
+
add_sparklines_field_to_slice, add_start_sync_mutation, add_stats_field_to_slice,
26
+
add_sync_job_logs_query, add_sync_job_query, add_sync_job_subscription, add_sync_jobs_query,
27
+
add_update_oauth_client_mutation, add_upload_blob_mutation, create_blob_upload_response_type,
28
+
create_collection_stats_type, create_collection_summary_type,
29
+
create_delete_slice_records_output_type, create_jetstream_log_entry_type,
30
+
create_oauth_client_type, create_slice_record_type, create_slice_record_edge_type,
31
+
create_slice_records_connection_type, create_slice_records_where_input,
32
+
create_slice_sparkline_type, create_slice_stats_type, create_sparkline_point_type,
33
+
create_start_sync_output_type, create_sync_job_result_type, create_sync_job_type,
34
+
create_sync_summary_type,
35
};
36
use crate::graphql::types::{extract_collection_fields, extract_record_key, GraphQLField, GraphQLType};
37
use crate::graphql::PUBSUB;
···
45
at_uri_fields: Vec<String>, // Fields with format "at-uri" for reverse joins
46
}
47
48
+
/// Type registry for tracking generated nested object types
49
+
type TypeRegistry = HashMap<String, Object>;
50
+
51
+
/// Container for nested object field values
52
+
#[derive(Clone)]
53
+
struct NestedObjectContainer {
54
+
data: serde_json::Value,
55
+
}
56
+
57
+
/// Generates a unique type name for a nested object field
58
+
fn generate_nested_type_name(parent_type: &str, field_name: &str) -> String {
59
+
let mut chars = field_name.chars();
60
+
let capitalized_field = match chars.next() {
61
+
None => String::new(),
62
+
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
63
+
};
64
+
format!("{}{}", parent_type, capitalized_field)
65
+
}
66
+
67
+
/// Resolves a lexicon ref and generates a GraphQL type for it
68
+
/// Returns the generated type name
69
+
fn resolve_lexicon_ref_type(
70
+
ref_nsid: &str,
71
+
current_lexicon_nsid: &str,
72
+
all_lexicons: &[serde_json::Value],
73
+
type_registry: &mut TypeRegistry,
74
+
database: &Database,
75
+
) -> String {
76
+
// Handle different ref formats:
77
+
// 1. Local ref: #image
78
+
// 2. External ref with specific def: app.bsky.embed.defs#aspectRatio
79
+
// 3. External ref to main: community.lexicon.location.hthree
80
+
let (target_nsid, def_name) = if ref_nsid.starts_with('#') {
81
+
// Local ref - use current lexicon NSID and the def name without #
82
+
(current_lexicon_nsid, &ref_nsid[1..])
83
+
} else if let Some(hash_pos) = ref_nsid.find('#') {
84
+
// External ref with specific def - split on #
85
+
(&ref_nsid[..hash_pos], &ref_nsid[hash_pos + 1..])
86
+
} else {
87
+
// External ref to main def
88
+
(ref_nsid, "main")
89
+
};
90
+
91
+
// Generate type name from NSID and def name
92
+
let type_name = if def_name == "main" {
93
+
// For refs to main: CommunityLexiconLocationHthree
94
+
nsid_to_type_name(target_nsid)
95
+
} else {
96
+
// For refs to specific def: AppBskyEmbedDefsAspectRatio
97
+
format!("{}{}", nsid_to_type_name(target_nsid), capitalize_first(def_name))
98
+
};
99
+
100
+
// Check if already generated
101
+
if type_registry.contains_key(&type_name) {
102
+
return type_name;
103
+
}
104
+
105
+
// Find the lexicon definition
106
+
let lexicon = all_lexicons.iter().find(|lex| {
107
+
lex.get("id").and_then(|id| id.as_str()) == Some(target_nsid)
108
+
});
109
+
110
+
if let Some(lex) = lexicon {
111
+
// Extract the definition (either "main" or specific def like "image")
112
+
if let Some(defs) = lex.get("defs") {
113
+
if let Some(def) = defs.get(def_name) {
114
+
// Extract fields from this specific definition
115
+
if let Some(properties) = def.get("properties") {
116
+
let fields = extract_fields_from_properties(properties);
117
+
118
+
if !fields.is_empty() {
119
+
// Generate the type using existing nested object generator
120
+
generate_nested_object_type(&type_name, &fields, type_registry, database);
121
+
return type_name;
122
+
}
123
+
}
124
+
}
125
+
}
126
+
}
127
+
128
+
// Fallback: couldn't resolve the ref, will use JSON
129
+
tracing::warn!("Could not resolve lexicon ref: {} (target: {}, def: {})", ref_nsid, target_nsid, def_name);
130
+
type_name
131
+
}
132
+
133
+
/// Capitalizes the first character of a string
134
+
fn capitalize_first(s: &str) -> String {
135
+
let mut chars = s.chars();
136
+
match chars.next() {
137
+
None => String::new(),
138
+
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
139
+
}
140
+
}
141
+
142
+
/// Extracts fields from a lexicon properties object
143
+
fn extract_fields_from_properties(properties: &serde_json::Value) -> Vec<GraphQLField> {
144
+
let mut fields = Vec::new();
145
+
146
+
if let Some(props) = properties.as_object() {
147
+
for (field_name, field_def) in props {
148
+
let field_type_str = field_def.get("type").and_then(|t| t.as_str()).unwrap_or("unknown");
149
+
let field_type = crate::graphql::types::map_lexicon_type_to_graphql(field_type_str, field_def);
150
+
151
+
// Check if field is required
152
+
let is_required = false; // We'd need the parent's "required" array to know this
153
+
154
+
// Extract format if present
155
+
let format = field_def.get("format").and_then(|f| f.as_str()).map(|s| s.to_string());
156
+
157
+
fields.push(GraphQLField {
158
+
name: field_name.clone(),
159
+
field_type,
160
+
is_required,
161
+
format,
162
+
});
163
+
}
164
+
}
165
+
166
+
fields
167
+
}
168
+
169
+
/// Recursively generates GraphQL object types for nested objects
170
+
/// Returns the type name of the generated object type
171
+
fn generate_nested_object_type(
172
+
type_name: &str,
173
+
fields: &[GraphQLField],
174
+
type_registry: &mut TypeRegistry,
175
+
database: &Database,
176
+
) -> String {
177
+
// Check if type already exists in registry
178
+
if type_registry.contains_key(type_name) {
179
+
return type_name.to_string();
180
+
}
181
+
182
+
let mut object = Object::new(type_name);
183
+
184
+
// Add fields to the object
185
+
for field in fields {
186
+
let field_name = field.name.clone();
187
+
let field_name_for_field = field_name.clone(); // Clone for Field::new
188
+
let field_type = field.field_type.clone();
189
+
190
+
// Determine the TypeRef for this field
191
+
let type_ref = match &field.field_type {
192
+
GraphQLType::Object(nested_fields) => {
193
+
// Generate nested object type recursively
194
+
let nested_type_name = generate_nested_type_name(type_name, &field_name);
195
+
let actual_type_name = generate_nested_object_type(
196
+
&nested_type_name,
197
+
nested_fields,
198
+
type_registry,
199
+
database,
200
+
);
201
+
202
+
if field.is_required {
203
+
TypeRef::named_nn(actual_type_name)
204
+
} else {
205
+
TypeRef::named(actual_type_name)
206
+
}
207
+
}
208
+
GraphQLType::Array(inner) => {
209
+
if let GraphQLType::Object(nested_fields) = inner.as_ref() {
210
+
// Generate nested object type for array items
211
+
let nested_type_name = generate_nested_type_name(type_name, &field_name);
212
+
let actual_type_name = generate_nested_object_type(
213
+
&nested_type_name,
214
+
nested_fields,
215
+
type_registry,
216
+
database,
217
+
);
218
+
219
+
if field.is_required {
220
+
TypeRef::named_nn_list(actual_type_name)
221
+
} else {
222
+
TypeRef::named_list(actual_type_name)
223
+
}
224
+
} else {
225
+
// Use standard type ref for arrays of primitives
226
+
graphql_type_to_typeref(&field.field_type, field.is_required)
227
+
}
228
+
}
229
+
_ => {
230
+
// Use standard type ref for other types
231
+
graphql_type_to_typeref(&field.field_type, field.is_required)
232
+
}
233
+
};
234
+
235
+
// Add field with resolver
236
+
object = object.field(Field::new(&field_name_for_field, type_ref, move |ctx| {
237
+
let field_name = field_name.clone();
238
+
let field_type = field_type.clone();
239
+
240
+
FieldFuture::new(async move {
241
+
// Get parent container
242
+
let container = ctx.parent_value.try_downcast_ref::<NestedObjectContainer>()?;
243
+
let value = container.data.get(&field_name);
244
+
245
+
if let Some(val) = value {
246
+
if val.is_null() {
247
+
return Ok(None);
248
+
}
249
+
250
+
// For nested objects, wrap in container
251
+
if matches!(field_type, GraphQLType::Object(_)) {
252
+
let nested_container = NestedObjectContainer {
253
+
data: val.clone(),
254
+
};
255
+
return Ok(Some(FieldValue::owned_any(nested_container)));
256
+
}
257
+
258
+
// For arrays of objects, wrap each item
259
+
if let GraphQLType::Array(inner) = &field_type {
260
+
if matches!(inner.as_ref(), GraphQLType::Object(_)) {
261
+
if let Some(arr) = val.as_array() {
262
+
let containers: Vec<FieldValue> = arr
263
+
.iter()
264
+
.map(|item| {
265
+
let nested_container = NestedObjectContainer {
266
+
data: item.clone(),
267
+
};
268
+
FieldValue::owned_any(nested_container)
269
+
})
270
+
.collect();
271
+
return Ok(Some(FieldValue::list(containers)));
272
+
}
273
+
return Ok(Some(FieldValue::list(Vec::<FieldValue>::new())));
274
+
}
275
+
}
276
+
277
+
// For other types, return the GraphQL value
278
+
let graphql_val = json_to_graphql_value(val);
279
+
Ok(Some(FieldValue::value(graphql_val)))
280
+
} else {
281
+
Ok(None)
282
+
}
283
+
})
284
+
}));
285
+
}
286
+
287
+
// Store the generated type in registry
288
+
type_registry.insert(type_name.to_string(), object);
289
+
type_name.to_string()
290
+
}
291
+
292
/// Builds a dynamic GraphQL schema from lexicons for a given slice
293
pub async fn build_graphql_schema(database: Database, slice_uri: String, auth_base_url: String) -> Result<Schema, String> {
294
// Fetch all lexicons for this slice
···
354
}
355
}
356
357
+
// Initialize type registry for nested object types
358
+
let mut type_registry: TypeRegistry = HashMap::new();
359
+
360
// Second pass: create types and queries
361
for lexicon in &lexicons {
362
// get_lexicons_by_slice returns {lexicon: 1, id: "nsid", defs: {...}}
···
382
database.clone(),
383
slice_uri.clone(),
384
&all_collections,
385
+
auth_base_url.clone(),
386
+
&mut type_registry,
387
+
&lexicons,
388
+
nsid,
389
);
390
391
// Create edge and connection types for this collection (Relay standard)
···
1301
schema_builder = schema_builder.register(mutation_input);
1302
}
1303
1304
+
// Register all nested object types from the type registry
1305
+
for (_, nested_type) in type_registry {
1306
+
schema_builder = schema_builder.register(nested_type);
1307
+
}
1308
+
1309
schema_builder
1310
.finish()
1311
.map_err(|e| format!("Schema build error: {:?}", e))
···
1319
1320
/// Container to hold blob data and DID for URL generation
1321
#[derive(Clone)]
1322
+
pub struct BlobContainer {
1323
+
pub blob_ref: String, // CID reference
1324
+
pub mime_type: String, // MIME type
1325
+
pub size: i64, // Size in bytes
1326
+
pub did: String, // DID for CDN URL generation
1327
}
1328
1329
/// Creates a GraphQL Object type for a record collection
···
1333
database: Database,
1334
slice_uri: String,
1335
all_collections: &[CollectionMeta],
1336
+
auth_base_url: String,
1337
+
type_registry: &mut TypeRegistry,
1338
+
all_lexicons: &[serde_json::Value],
1339
+
lexicon_nsid: &str,
1340
) -> Object {
1341
let mut object = Object::new(type_name);
1342
···
1480
let field_type = field.field_type.clone();
1481
let db_clone = database.clone();
1482
1483
+
// Determine type ref - handle nested objects and lexicon refs specially
1484
+
let type_ref = match &field.field_type {
1485
+
GraphQLType::LexiconRef(ref_nsid) => {
1486
+
// Resolve lexicon ref and generate type for it
1487
+
let resolved_type_name = resolve_lexicon_ref_type(
1488
+
ref_nsid,
1489
+
lexicon_nsid,
1490
+
all_lexicons,
1491
+
type_registry,
1492
+
&database,
1493
+
);
1494
+
1495
+
if field.is_required {
1496
+
TypeRef::named_nn(resolved_type_name)
1497
+
} else {
1498
+
TypeRef::named(resolved_type_name)
1499
+
}
1500
+
}
1501
+
GraphQLType::Object(nested_fields) => {
1502
+
// Generate nested object type
1503
+
let nested_type_name = generate_nested_type_name(type_name, &field_name);
1504
+
let actual_type_name = generate_nested_object_type(
1505
+
&nested_type_name,
1506
+
nested_fields,
1507
+
type_registry,
1508
+
&database,
1509
+
);
1510
+
1511
+
if field.is_required {
1512
+
TypeRef::named_nn(actual_type_name)
1513
+
} else {
1514
+
TypeRef::named(actual_type_name)
1515
+
}
1516
+
}
1517
+
GraphQLType::Array(inner) => {
1518
+
match inner.as_ref() {
1519
+
GraphQLType::LexiconRef(ref_nsid) => {
1520
+
// Resolve lexicon ref for array items
1521
+
let resolved_type_name = resolve_lexicon_ref_type(
1522
+
ref_nsid,
1523
+
lexicon_nsid,
1524
+
all_lexicons,
1525
+
type_registry,
1526
+
&database,
1527
+
);
1528
+
1529
+
if field.is_required {
1530
+
TypeRef::named_nn_list(resolved_type_name)
1531
+
} else {
1532
+
TypeRef::named_list(resolved_type_name)
1533
+
}
1534
+
}
1535
+
GraphQLType::Object(nested_fields) => {
1536
+
// Generate nested object type for array items
1537
+
let nested_type_name = generate_nested_type_name(type_name, &field_name);
1538
+
let actual_type_name = generate_nested_object_type(
1539
+
&nested_type_name,
1540
+
nested_fields,
1541
+
type_registry,
1542
+
&database,
1543
+
);
1544
+
1545
+
if field.is_required {
1546
+
TypeRef::named_nn_list(actual_type_name)
1547
+
} else {
1548
+
TypeRef::named_list(actual_type_name)
1549
+
}
1550
+
}
1551
+
_ => graphql_type_to_typeref(&field.field_type, field.is_required),
1552
+
}
1553
+
}
1554
+
_ => graphql_type_to_typeref(&field.field_type, field.is_required),
1555
+
};
1556
1557
object = object.field(Field::new(&field_name_for_field, type_ref, move |ctx| {
1558
let field_name = field_name.clone();
···
1676
}
1677
}
1678
1679
+
// Check if this is a lexicon ref field
1680
+
if matches!(field_type, GraphQLType::LexiconRef(_)) {
1681
+
let nested_container = NestedObjectContainer {
1682
+
data: val.clone(),
1683
+
};
1684
+
return Ok(Some(FieldValue::owned_any(nested_container)));
1685
+
}
1686
+
1687
+
// Check if this is a nested object field
1688
+
if matches!(field_type, GraphQLType::Object(_)) {
1689
+
let nested_container = NestedObjectContainer {
1690
+
data: val.clone(),
1691
+
};
1692
+
return Ok(Some(FieldValue::owned_any(nested_container)));
1693
+
}
1694
+
1695
+
// Check if this is an array of nested objects or lexicon refs
1696
+
if let GraphQLType::Array(inner) = &field_type {
1697
+
if matches!(inner.as_ref(), GraphQLType::LexiconRef(_)) || matches!(inner.as_ref(), GraphQLType::Object(_)) {
1698
+
if let Some(arr) = val.as_array() {
1699
+
let containers: Vec<FieldValue> = arr
1700
+
.iter()
1701
+
.map(|item| {
1702
+
let nested_container = NestedObjectContainer {
1703
+
data: item.clone(),
1704
+
};
1705
+
FieldValue::owned_any(nested_container)
1706
+
})
1707
+
.collect();
1708
+
return Ok(Some(FieldValue::list(containers)));
1709
+
}
1710
+
return Ok(Some(FieldValue::list(Vec::<FieldValue>::new())));
1711
+
}
1712
+
}
1713
+
1714
+
// For non-ref, non-object fields, return the raw JSON value
1715
let graphql_val = json_to_graphql_value(val);
1716
Ok(Some(FieldValue::value(graphql_val)))
1717
} else {
···
2202
));
2203
}
2204
2205
+
// Add sparklines, stats, and oauth clients fields for NetworkSlicesSlice type
2206
if type_name == "NetworkSlicesSlice" {
2207
object = add_sparklines_field_to_slice(object, database.clone());
2208
object = add_stats_field_to_slice(object, database.clone());
2209
+
object = add_oauth_clients_field_to_slice(object, auth_base_url);
2210
}
2211
2212
object
···
2279
// Always nullable since blob data might be missing or malformed
2280
TypeRef::named("Blob")
2281
}
2282
+
GraphQLType::Json | GraphQLType::Ref | GraphQLType::LexiconRef(_) | GraphQLType::Object(_) | GraphQLType::Union => {
2283
+
// JSON scalar type - linked records, lexicon refs, and complex objects return as JSON (fallback)
2284
if is_required {
2285
TypeRef::named_nn("JSON")
2286
} else {
···
2869
let type_name = nsid_to_type_name(nsid);
2870
2871
// Add create mutation
2872
+
mutation = add_create_mutation(mutation, &type_name, nsid, &fields, database.clone(), slice_uri.clone());
2873
2874
// Add update mutation
2875
+
mutation = add_update_mutation(mutation, &type_name, nsid, &fields, database.clone(), slice_uri.clone());
2876
2877
// Add delete mutation
2878
mutation = add_delete_mutation(mutation, &type_name, nsid, database.clone(), slice_uri.clone());
···
3522
input
3523
}
3524
3525
+
/// Transforms fields in record data from GraphQL format to AT Protocol format
3526
+
///
3527
+
/// Blob fields:
3528
+
/// - GraphQL format: `{ref: "bafyrei...", mimeType: "...", size: 123}`
3529
+
/// - AT Protocol format: `{$type: "blob", ref: {$link: "bafyrei..."}, mimeType: "...", size: 123}`
3530
+
///
3531
+
/// Lexicon ref fields:
3532
+
/// - Adds `$type: "{ref_nsid}"` to objects (e.g., `{$type: "community.lexicon.location.hthree#main", ...}`)
3533
+
///
3534
+
/// Nested objects:
3535
+
/// - Recursively processes nested objects and arrays
3536
+
fn transform_fields_for_atproto(
3537
+
mut data: serde_json::Value,
3538
+
fields: &[GraphQLField],
3539
+
) -> serde_json::Value {
3540
+
if let serde_json::Value::Object(ref mut map) = data {
3541
+
for field in fields {
3542
+
if let Some(field_value) = map.get_mut(&field.name) {
3543
+
match &field.field_type {
3544
+
GraphQLType::Blob => {
3545
+
// Transform single blob field
3546
+
if let Some(blob_obj) = field_value.as_object_mut() {
3547
+
// Add $type: "blob"
3548
+
blob_obj.insert("$type".to_string(), serde_json::Value::String("blob".to_string()));
3549
+
3550
+
// Check if ref is a string (GraphQL format)
3551
+
if let Some(serde_json::Value::String(cid)) = blob_obj.get("ref") {
3552
+
// Transform to {$link: "cid"} (AT Protocol format)
3553
+
let link_obj = serde_json::json!({
3554
+
"$link": cid
3555
+
});
3556
+
blob_obj.insert("ref".to_string(), link_obj);
3557
+
}
3558
+
}
3559
+
}
3560
+
GraphQLType::LexiconRef(ref_nsid) => {
3561
+
// Transform lexicon ref field by adding $type
3562
+
if let Some(ref_obj) = field_value.as_object_mut() {
3563
+
ref_obj.insert("$type".to_string(), serde_json::Value::String(ref_nsid.clone()));
3564
+
}
3565
+
}
3566
+
GraphQLType::Object(nested_fields) => {
3567
+
// Recursively transform nested objects
3568
+
*field_value = transform_fields_for_atproto(field_value.clone(), nested_fields);
3569
+
}
3570
+
GraphQLType::Array(inner) => {
3571
+
match inner.as_ref() {
3572
+
GraphQLType::Blob => {
3573
+
// Transform array of blobs
3574
+
if let Some(arr) = field_value.as_array_mut() {
3575
+
for blob_value in arr {
3576
+
if let Some(blob_obj) = blob_value.as_object_mut() {
3577
+
// Add $type: "blob"
3578
+
blob_obj.insert("$type".to_string(), serde_json::Value::String("blob".to_string()));
3579
+
3580
+
if let Some(serde_json::Value::String(cid)) = blob_obj.get("ref") {
3581
+
let link_obj = serde_json::json!({
3582
+
"$link": cid
3583
+
});
3584
+
blob_obj.insert("ref".to_string(), link_obj);
3585
+
}
3586
+
}
3587
+
}
3588
+
}
3589
+
}
3590
+
GraphQLType::LexiconRef(ref_nsid) => {
3591
+
// Transform array of lexicon refs
3592
+
if let Some(arr) = field_value.as_array_mut() {
3593
+
for ref_value in arr {
3594
+
if let Some(ref_obj) = ref_value.as_object_mut() {
3595
+
ref_obj.insert("$type".to_string(), serde_json::Value::String(ref_nsid.clone()));
3596
+
}
3597
+
}
3598
+
}
3599
+
}
3600
+
GraphQLType::Object(nested_fields) => {
3601
+
// Transform array of objects recursively
3602
+
if let Some(arr) = field_value.as_array_mut() {
3603
+
for item in arr {
3604
+
*item = transform_fields_for_atproto(item.clone(), nested_fields);
3605
+
}
3606
+
}
3607
+
}
3608
+
_ => {} // Other array types don't need transformation
3609
+
}
3610
+
}
3611
+
_ => {} // Other field types don't need transformation
3612
+
}
3613
+
}
3614
+
}
3615
+
}
3616
+
3617
+
data
3618
+
}
3619
+
3620
/// Adds a create mutation for a collection
3621
fn add_create_mutation(
3622
mutation: Object,
3623
type_name: &str,
3624
nsid: &str,
3625
+
fields: &[GraphQLField],
3626
database: Database,
3627
slice_uri: String,
3628
) -> Object {
3629
let mutation_name = format!("create{}", type_name);
3630
let nsid = nsid.to_string();
3631
let nsid_clone = nsid.clone();
3632
+
let fields = fields.to_vec();
3633
3634
mutation.field(
3635
Field::new(
···
3639
let db = database.clone();
3640
let slice = slice_uri.clone();
3641
let collection = nsid.clone();
3642
+
let fields = fields.clone();
3643
3644
FieldFuture::new(async move {
3645
// Get GraphQL context which contains auth info
···
3655
.ok_or_else(|| Error::new("Missing input argument"))?;
3656
3657
// Convert GraphQL value to JSON using deserialize
3658
+
let mut record_data: serde_json::Value = input.deserialize()
3659
.map_err(|e| Error::new(format!("Failed to deserialize input: {:?}", e)))?;
3660
+
3661
+
// Transform fields from GraphQL to AT Protocol format (adds $type, transforms blob refs)
3662
+
record_data = transform_fields_for_atproto(record_data, &fields);
3663
3664
// Optional rkey argument
3665
let rkey = ctx.args.get("rkey")
···
3732
let (uri, cid) = match result {
3733
atproto_client::com::atproto::repo::CreateRecordResponse::StrongRef { uri, cid, .. } => (uri, cid),
3734
atproto_client::com::atproto::repo::CreateRecordResponse::Error(e) => {
3735
+
return Err(Error::new(format!("AT Protocol error: {} - {}", e.error.unwrap_or_default(), e.message.unwrap_or_default())));
3736
}
3737
};
3738
···
3788
mutation: Object,
3789
type_name: &str,
3790
nsid: &str,
3791
+
fields: &[GraphQLField],
3792
database: Database,
3793
slice_uri: String,
3794
) -> Object {
3795
let mutation_name = format!("update{}", type_name);
3796
let nsid = nsid.to_string();
3797
let nsid_clone = nsid.clone();
3798
+
let fields = fields.to_vec();
3799
3800
mutation.field(
3801
Field::new(
···
3805
let db = database.clone();
3806
let slice = slice_uri.clone();
3807
let collection = nsid.clone();
3808
+
let fields = fields.clone();
3809
3810
FieldFuture::new(async move {
3811
// Get GraphQL context which contains auth info
···
3827
.ok_or_else(|| Error::new("Missing input argument"))?;
3828
3829
// Convert GraphQL value to JSON using deserialize
3830
+
let mut record_data: serde_json::Value = input.deserialize()
3831
.map_err(|e| Error::new(format!("Failed to deserialize input: {:?}", e)))?;
3832
+
3833
+
// Transform fields from GraphQL to AT Protocol format (adds $type, transforms blob refs)
3834
+
record_data = transform_fields_for_atproto(record_data, &fields);
3835
3836
// Verify OAuth token and get user info
3837
let user_info = crate::auth::verify_oauth_token_cached(
···
3899
let (uri, cid) = match result {
3900
atproto_client::com::atproto::repo::PutRecordResponse::StrongRef { uri, cid, .. } => (uri, cid),
3901
atproto_client::com::atproto::repo::PutRecordResponse::Error(e) => {
3902
+
return Err(Error::new(format!("AT Protocol error: {} - {}", e.error.unwrap_or_default(), e.message.unwrap_or_default())));
3903
}
3904
};
3905
+29
-18
api/src/graphql/schema_ext/blob_upload.rs
+29
-18
api/src/graphql/schema_ext/blob_upload.rs
···
1
//! GraphQL schema extension for blob uploads
2
3
use async_graphql::dynamic::{Field, FieldFuture, FieldValue, InputValue, Object, TypeRef};
4
-
use async_graphql::{Error, Value as GraphQLValue};
5
use base64::engine::general_purpose;
6
use base64::Engine;
7
8
use crate::atproto_extensions::upload_blob as atproto_upload_blob;
9
use crate::auth;
10
-
11
-
/// Container for blob upload response
12
-
#[derive(Clone)]
13
-
struct BlobUploadContainer {
14
-
blob: serde_json::Value,
15
-
}
16
17
/// Creates the BlobUploadResponse GraphQL type
18
pub fn create_blob_upload_response_type() -> Object {
19
let mut response = Object::new("BlobUploadResponse");
20
21
-
response = response.field(Field::new("blob", TypeRef::named_nn("JSON"), |ctx| {
22
FieldFuture::new(async move {
23
-
let container = ctx.parent_value.try_downcast_ref::<BlobUploadContainer>()?;
24
-
// Convert serde_json::Value to async_graphql::Value
25
-
let graphql_value: GraphQLValue = serde_json::from_value(container.blob.clone())
26
-
.map_err(|e| async_graphql::Error::new(format!("Failed to convert blob to GraphQL value: {}", e)))?;
27
-
Ok(Some(graphql_value))
28
})
29
}));
30
···
70
.decode(data_base64)
71
.map_err(|e| Error::new(format!("Invalid base64 data: {}", e)))?;
72
73
// Get ATProto DPoP auth and PDS URL for this user
74
let (dpop_auth, pds_url) = auth::get_atproto_auth_for_user_cached(
75
token,
···
91
.await
92
.map_err(|e| Error::new(format!("Failed to upload blob: {}", e)))?;
93
94
-
// Convert blob to JSON value
95
-
let blob_json = serde_json::to_value(&upload_result.blob)
96
-
.map_err(|e| Error::new(format!("Failed to serialize blob: {}", e)))?;
97
98
-
let container = BlobUploadContainer { blob: blob_json };
99
-
Ok(Some(FieldValue::owned_any(container)))
100
})
101
},
102
)
···
1
//! GraphQL schema extension for blob uploads
2
3
use async_graphql::dynamic::{Field, FieldFuture, FieldValue, InputValue, Object, TypeRef};
4
+
use async_graphql::Error;
5
use base64::engine::general_purpose;
6
use base64::Engine;
7
8
use crate::atproto_extensions::upload_blob as atproto_upload_blob;
9
use crate::auth;
10
+
use crate::graphql::schema_builder::BlobContainer;
11
12
/// Creates the BlobUploadResponse GraphQL type
13
pub fn create_blob_upload_response_type() -> Object {
14
let mut response = Object::new("BlobUploadResponse");
15
16
+
// Return the Blob type instead of JSON to ensure consistent ref field handling
17
+
response = response.field(Field::new("blob", TypeRef::named_nn("Blob"), |ctx| {
18
FieldFuture::new(async move {
19
+
// The BlobContainer is passed through from the mutation resolver
20
+
// The Blob type resolver will handle extracting the fields
21
+
let container = ctx.parent_value.try_downcast_ref::<BlobContainer>()?;
22
+
Ok(Some(FieldValue::owned_any(container.clone())))
23
})
24
}));
25
···
65
.decode(data_base64)
66
.map_err(|e| Error::new(format!("Invalid base64 data: {}", e)))?;
67
68
+
// Verify OAuth token to get user info (needed for DID)
69
+
let user_info = auth::verify_oauth_token_cached(
70
+
token,
71
+
&auth_base,
72
+
gql_ctx.auth_cache.clone(),
73
+
)
74
+
.await
75
+
.map_err(|e| Error::new(format!("Invalid token: {}", e)))?;
76
+
77
// Get ATProto DPoP auth and PDS URL for this user
78
let (dpop_auth, pds_url) = auth::get_atproto_auth_for_user_cached(
79
token,
···
95
.await
96
.map_err(|e| Error::new(format!("Failed to upload blob: {}", e)))?;
97
98
+
// Extract the DID from user info
99
+
let did = user_info.did.unwrap_or(user_info.sub);
100
+
101
+
// Create BlobContainer with flattened ref field (CID string)
102
+
// This ensures the GraphQL Blob type returns ref as a String, not an object
103
+
let blob_container = BlobContainer {
104
+
blob_ref: upload_result.blob.r#ref.link.clone(), // Extract CID from ref.$link
105
+
mime_type: upload_result.blob.mime_type.clone(),
106
+
size: upload_result.blob.size as i64,
107
+
did,
108
+
};
109
110
+
Ok(Some(FieldValue::owned_any(blob_container)))
111
})
112
},
113
)
+1
api/src/graphql/schema_ext/mod.rs
+1
api/src/graphql/schema_ext/mod.rs
+107
api/src/graphql/schema_ext/oauth.rs
+107
api/src/graphql/schema_ext/oauth.rs
···
156
oauth_client
157
}
158
159
+
/// Add oauthClients field to NetworkSlicesSlice type
160
+
pub fn add_oauth_clients_field_to_slice(
161
+
object: Object,
162
+
auth_base_url: String,
163
+
) -> Object {
164
+
use crate::graphql::schema_builder::RecordContainer;
165
+
166
+
let base_url_for_oauth = auth_base_url.clone();
167
+
168
+
object.field(
169
+
Field::new(
170
+
"oauthClients",
171
+
TypeRef::named_nn_list_nn("OAuthClient"),
172
+
move |ctx| {
173
+
let base_url = base_url_for_oauth.clone();
174
+
175
+
FieldFuture::new(async move {
176
+
let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
177
+
let slice_uri = &container.record.uri;
178
+
179
+
// Get pool from context and create database instance
180
+
let pool = ctx.data::<sqlx::PgPool>()
181
+
.map_err(|_| Error::new("Database pool not found in context"))?;
182
+
let database = crate::database::Database::new(pool.clone());
183
+
184
+
// Fetch OAuth clients from database
185
+
let clients = database
186
+
.get_oauth_clients_for_slice(slice_uri)
187
+
.await
188
+
.map_err(|e| Error::new(format!("Failed to fetch OAuth clients: {}", e)))?;
189
+
190
+
if clients.is_empty() {
191
+
return Ok(Some(FieldValue::list(Vec::<FieldValue<'_>>::new())));
192
+
}
193
+
194
+
// Fetch details from AIP server
195
+
let http_client = Client::new();
196
+
let mut client_data_list = Vec::new();
197
+
198
+
for oauth_client in clients {
199
+
let aip_url = format!("{}/oauth/clients/{}", base_url, oauth_client.client_id);
200
+
let mut request_builder = http_client.get(&aip_url);
201
+
202
+
if let Some(token) = &oauth_client.registration_access_token {
203
+
request_builder = request_builder.bearer_auth(token);
204
+
}
205
+
206
+
match request_builder.send().await {
207
+
Ok(response) if response.status().is_success() => {
208
+
if let Ok(response_text) = response.text().await {
209
+
if let Ok(aip_client) = serde_json::from_str::<AipClientResponse>(&response_text) {
210
+
client_data_list.push(OAuthClientData {
211
+
client_id: aip_client.client_id,
212
+
client_secret: aip_client.client_secret,
213
+
client_name: aip_client.client_name,
214
+
redirect_uris: aip_client.redirect_uris,
215
+
grant_types: aip_client.grant_types,
216
+
response_types: aip_client.response_types,
217
+
scope: aip_client.scope,
218
+
client_uri: aip_client.client_uri,
219
+
logo_uri: aip_client.logo_uri,
220
+
tos_uri: aip_client.tos_uri,
221
+
policy_uri: aip_client.policy_uri,
222
+
created_at: oauth_client.created_at,
223
+
created_by_did: oauth_client.created_by_did,
224
+
});
225
+
}
226
+
}
227
+
}
228
+
_ => {
229
+
// Fallback for clients we can't fetch details for
230
+
client_data_list.push(OAuthClientData {
231
+
client_id: oauth_client.client_id,
232
+
client_secret: None,
233
+
client_name: "Unknown".to_string(),
234
+
redirect_uris: vec![],
235
+
grant_types: vec!["authorization_code".to_string()],
236
+
response_types: vec!["code".to_string()],
237
+
scope: None,
238
+
client_uri: None,
239
+
logo_uri: None,
240
+
tos_uri: None,
241
+
policy_uri: None,
242
+
created_at: oauth_client.created_at,
243
+
created_by_did: oauth_client.created_by_did,
244
+
});
245
+
}
246
+
}
247
+
}
248
+
249
+
// Convert to GraphQL values
250
+
let field_values: Vec<FieldValue<'_>> = client_data_list
251
+
.into_iter()
252
+
.map(|client_data| {
253
+
let container = OAuthClientContainer { client: client_data };
254
+
FieldValue::owned_any(container)
255
+
})
256
+
.collect();
257
+
258
+
Ok(Some(FieldValue::list(field_values)))
259
+
})
260
+
},
261
+
)
262
+
.description("Get all OAuth clients for this slice")
263
+
)
264
+
}
265
+
266
/// Add oauthClients query to the Query type
267
pub fn add_oauth_clients_query(query: Object, slice_uri: String, auth_base_url: String) -> Object {
268
query.field(
+143
-4
api/src/graphql/schema_ext/sync.rs
+143
-4
api/src/graphql/schema_ext/sync.rs
···
10
use uuid::Uuid;
11
use base64::engine::general_purpose;
12
use base64::Engine;
13
14
/// Global broadcast channel for sync job status updates
15
/// This allows real-time job status streaming to GraphQL subscriptions
16
static JOB_CHANNEL: OnceLock<Arc<Mutex<broadcast::Sender<JobStatus>>>> = OnceLock::new();
17
18
/// Initialize or get the global job channel
19
fn get_job_channel() -> Arc<Mutex<broadcast::Sender<JobStatus>>> {
···
27
28
/// Publish a sync job status update to subscribers
29
pub async fn publish_sync_job_update(job_status: JobStatus) {
30
let sender = get_job_channel();
31
let sender_lock = sender.lock().await;
32
-
let _ = sender_lock.send(job_status); // Ignore errors if no subscribers
33
}
34
35
/// Container for JobStatus to implement Any trait for GraphQL
···
226
FieldFuture::new(async move {
227
let container = ctx.parent_value.try_downcast_ref::<JobStatusContainer>()?;
228
Ok(Some(GraphQLValue::from(container.status.job_id.to_string())))
229
})
230
}));
231
···
711
let mut receiver = sender_lock.subscribe();
712
drop(sender_lock); // Release lock
713
714
let stream = async_stream::stream! {
715
while let Ok(job_status) = receiver.recv().await {
716
// Filter by job_id if provided
···
720
}
721
}
722
723
-
// Filter by slice_uri if provided (need to query for slice_uri)
724
-
// For now, skip slice filtering since JobStatus doesn't include slice_uri
725
-
// TODO: Add slice_uri to JobStatus or query it separately
726
727
// Convert to GraphQL value and yield
728
let container = JobStatusContainer { status: job_status };
···
822
.description("Delete a sync job from the database")
823
)
824
}
···
10
use uuid::Uuid;
11
use base64::engine::general_purpose;
12
use base64::Engine;
13
+
use redis::aio::ConnectionManager;
14
+
use redis::{Client, AsyncCommands};
15
+
use futures_util::StreamExt;
16
17
/// Global broadcast channel for sync job status updates
18
/// This allows real-time job status streaming to GraphQL subscriptions
19
static JOB_CHANNEL: OnceLock<Arc<Mutex<broadcast::Sender<JobStatus>>>> = OnceLock::new();
20
+
21
+
/// Global Redis client for cross-process pub/sub (optional)
22
+
static REDIS_CLIENT: OnceLock<Option<Client>> = OnceLock::new();
23
24
/// Initialize or get the global job channel
25
fn get_job_channel() -> Arc<Mutex<broadcast::Sender<JobStatus>>> {
···
33
34
/// Publish a sync job status update to subscribers
35
pub async fn publish_sync_job_update(job_status: JobStatus) {
36
+
// Publish to in-memory broadcast channel (for same-process subscribers)
37
let sender = get_job_channel();
38
let sender_lock = sender.lock().await;
39
+
let _ = sender_lock.send(job_status.clone()); // Ignore errors if no subscribers
40
+
drop(sender_lock);
41
+
42
+
// Also publish to Redis for cross-process communication (if Redis is configured)
43
+
if let Some(Some(client)) = REDIS_CLIENT.get() {
44
+
if let Err(e) = publish_to_redis(client, &job_status).await {
45
+
tracing::warn!("Failed to publish job status to Redis: {}", e);
46
+
}
47
+
}
48
+
}
49
+
50
+
/// Publish job status to Redis for cross-process communication
51
+
async fn publish_to_redis(client: &Client, job_status: &JobStatus) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
52
+
let mut conn = ConnectionManager::new(client.clone()).await?;
53
+
let payload = serde_json::to_string(job_status)?;
54
+
let _: () = conn.publish("sync_job_updates", payload).await?;
55
+
Ok(())
56
}
57
58
/// Container for JobStatus to implement Any trait for GraphQL
···
249
FieldFuture::new(async move {
250
let container = ctx.parent_value.try_downcast_ref::<JobStatusContainer>()?;
251
Ok(Some(GraphQLValue::from(container.status.job_id.to_string())))
252
+
})
253
+
}));
254
+
255
+
job = job.field(Field::new("sliceUri", TypeRef::named_nn(TypeRef::STRING), |ctx| {
256
+
FieldFuture::new(async move {
257
+
let container = ctx.parent_value.try_downcast_ref::<JobStatusContainer>()?;
258
+
Ok(Some(GraphQLValue::from(container.status.slice_uri.clone())))
259
})
260
}));
261
···
741
let mut receiver = sender_lock.subscribe();
742
drop(sender_lock); // Release lock
743
744
+
// Get optional slice filter from arguments
745
+
let slice_filter: Option<String> = ctx.args.get("slice")
746
+
.and_then(|val| val.string().ok())
747
+
.map(|s| s.to_string());
748
+
749
let stream = async_stream::stream! {
750
while let Ok(job_status) = receiver.recv().await {
751
// Filter by job_id if provided
···
755
}
756
}
757
758
+
// Filter by slice_uri if provided
759
+
if let Some(ref filter_slice) = slice_filter {
760
+
if &job_status.slice_uri != filter_slice {
761
+
continue;
762
+
}
763
+
}
764
765
// Convert to GraphQL value and yield
766
let container = JobStatusContainer { status: job_status };
···
860
.description("Delete a sync job from the database")
861
)
862
}
863
+
864
+
/// Initialize Redis pub/sub for sync job updates
865
+
///
866
+
/// This function should be called once at application startup.
867
+
/// It initializes the Redis client and starts a background task to listen for
868
+
/// job updates from other processes (e.g., worker processes).
869
+
///
870
+
/// # Arguments
871
+
/// * `redis_url` - Optional Redis connection URL. If None, Redis pub/sub is disabled.
872
+
pub fn initialize_redis_pubsub(redis_url: Option<String>) {
873
+
// Initialize Redis client (or None if not configured)
874
+
let client = redis_url.and_then(|url| {
875
+
match Client::open(url.as_str()) {
876
+
Ok(client) => {
877
+
tracing::info!("Initialized Redis client for sync job pub/sub");
878
+
Some(client)
879
+
}
880
+
Err(e) => {
881
+
tracing::error!("Failed to create Redis client for sync job pub/sub: {}", e);
882
+
None
883
+
}
884
+
}
885
+
});
886
+
887
+
let has_redis = client.is_some();
888
+
REDIS_CLIENT.get_or_init(|| client);
889
+
890
+
// Start Redis subscription listener task if Redis is available
891
+
if has_redis {
892
+
start_redis_listener();
893
+
} else {
894
+
tracing::info!("Redis not configured - sync job updates will use in-memory broadcast only");
895
+
}
896
+
}
897
+
898
+
/// Start a background task that subscribes to Redis and forwards messages to the in-memory broadcast channel
899
+
fn start_redis_listener() {
900
+
tokio::spawn(async {
901
+
tracing::info!("Starting Redis subscription listener for sync job updates");
902
+
903
+
loop {
904
+
// Get Redis client
905
+
let client = match REDIS_CLIENT.get() {
906
+
Some(Some(client)) => client,
907
+
_ => {
908
+
tracing::error!("Redis client not available for subscription");
909
+
return;
910
+
}
911
+
};
912
+
913
+
// Connect and subscribe
914
+
match subscribe_to_redis(client).await {
915
+
Ok(_) => {
916
+
tracing::warn!("Redis subscription ended, reconnecting in 5 seconds...");
917
+
}
918
+
Err(e) => {
919
+
tracing::error!("Redis subscription error: {}, reconnecting in 5 seconds...", e);
920
+
}
921
+
}
922
+
923
+
// Wait before reconnecting
924
+
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
925
+
}
926
+
});
927
+
}
928
+
929
+
/// Subscribe to Redis channel and forward messages to in-memory broadcast
930
+
async fn subscribe_to_redis(client: &Client) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
931
+
// Create a pub/sub connection from the client
932
+
let mut pubsub = client.get_async_pubsub().await?;
933
+
934
+
pubsub.subscribe("sync_job_updates").await?;
935
+
tracing::info!("Subscribed to Redis channel: sync_job_updates");
936
+
937
+
// Get the in-memory broadcast sender
938
+
let sender = get_job_channel();
939
+
940
+
loop {
941
+
let msg = pubsub.on_message().next().await;
942
+
if let Some(msg) = msg {
943
+
let payload: String = msg.get_payload()?;
944
+
945
+
// Deserialize JobStatus from JSON
946
+
match serde_json::from_str::<JobStatus>(&payload) {
947
+
Ok(job_status) => {
948
+
// Forward to in-memory broadcast channel
949
+
let sender_lock = sender.lock().await;
950
+
if let Err(e) = sender_lock.send(job_status.clone()) {
951
+
tracing::debug!("No local subscribers for job update: {}", e);
952
+
}
953
+
drop(sender_lock);
954
+
955
+
tracing::debug!("Forwarded job update from Redis: job_id={}", job_status.job_id);
956
+
}
957
+
Err(e) => {
958
+
tracing::warn!("Failed to deserialize job status from Redis: {}", e);
959
+
}
960
+
}
961
+
}
962
+
}
963
+
}
+6
-1
api/src/graphql/types.rs
+6
-1
api/src/graphql/types.rs
···
20
Float,
21
/// Reference to another record (for strongRef)
22
Ref,
23
/// Array of a type
24
Array(Box<GraphQLType>),
25
/// Object with nested fields
···
45
"unknown" => GraphQLType::Json,
46
"null" => GraphQLType::Json,
47
"ref" => {
48
-
// Check if this is a strongRef (link to another record)
49
let ref_name = lexicon_def
50
.get("ref")
51
.and_then(|r| r.as_str())
···
53
54
if ref_name == "com.atproto.repo.strongRef" {
55
GraphQLType::Ref
56
} else {
57
GraphQLType::Json
58
}
···
20
Float,
21
/// Reference to another record (for strongRef)
22
Ref,
23
+
/// Reference to a lexicon type definition (e.g., community.lexicon.location.hthree)
24
+
LexiconRef(String),
25
/// Array of a type
26
Array(Box<GraphQLType>),
27
/// Object with nested fields
···
47
"unknown" => GraphQLType::Json,
48
"null" => GraphQLType::Json,
49
"ref" => {
50
+
// Check if this is a strongRef (link to another record) or a lexicon type ref
51
let ref_name = lexicon_def
52
.get("ref")
53
.and_then(|r| r.as_str())
···
55
56
if ref_name == "com.atproto.repo.strongRef" {
57
GraphQLType::Ref
58
+
} else if !ref_name.is_empty() {
59
+
// This is a reference to a lexicon type definition
60
+
GraphQLType::LexiconRef(ref_name.to_string())
61
} else {
62
GraphQLType::Json
63
}
+19
api/src/jobs.rs
+19
api/src/jobs.rs
···
148
// Publish job running status to subscribers
149
let running_status = JobStatus {
150
job_id: payload.job_id,
151
status: "running".to_string(),
152
created_at: now,
153
started_at: Some(now),
···
260
// Publish job status update to GraphQL subscribers
261
let job_status = JobStatus {
262
job_id: payload.job_id,
263
status: "completed".to_string(),
264
created_at: chrono::Utc::now(),
265
started_at: Some(chrono::Utc::now()),
···
337
// Publish job status update to GraphQL subscribers
338
let job_status = JobStatus {
339
job_id: payload.job_id,
340
status: "failed".to_string(),
341
created_at: chrono::Utc::now(),
342
started_at: Some(chrono::Utc::now()),
···
536
// Publish job creation event to subscribers
537
let job_status = JobStatus {
538
job_id,
539
status: "pending".to_string(),
540
created_at: chrono::Utc::now(),
541
started_at: None,
···
559
pub struct JobStatus {
560
/// Unique identifier for the job
561
pub job_id: Uuid,
562
/// Current status: "pending", "running", "completed", or "failed"
563
pub status: String,
564
/// Timestamp when job was enqueued
···
611
612
return Ok(Some(JobStatus {
613
job_id,
614
status: result.status,
615
created_at: result.created_at,
616
started_at: Some(result.created_at),
···
647
648
match queue_row {
649
Some(row) => {
650
// Determine status based on attempt_at timestamp
651
let status = if row.attempt_at.is_none() {
652
"completed".to_string()
···
662
663
Ok(Some(JobStatus {
664
job_id,
665
status: status.clone(),
666
created_at: row.created_at.unwrap_or_else(chrono::Utc::now),
667
started_at: if status == "running" || status == "completed" {
···
790
791
results.push(JobStatus {
792
job_id: row.job_id.unwrap_or_else(Uuid::new_v4),
793
status: row.status.unwrap_or_default(),
794
created_at: row.created_at.unwrap_or_else(chrono::Utc::now),
795
started_at: row.created_at,
···
902
903
results.push(JobStatus {
904
job_id: row.job_id.unwrap_or_else(Uuid::new_v4),
905
status: row.status.unwrap_or_default(),
906
created_at: row.created_at.unwrap_or_else(chrono::Utc::now),
907
started_at: row.created_at,
···
1055
// Publish job status update for subscribers
1056
let job_status = JobStatus {
1057
job_id,
1058
status: "cancelled".to_string(),
1059
created_at: now,
1060
started_at: None,
···
148
// Publish job running status to subscribers
149
let running_status = JobStatus {
150
job_id: payload.job_id,
151
+
slice_uri: payload.slice_uri.clone(),
152
status: "running".to_string(),
153
created_at: now,
154
started_at: Some(now),
···
261
// Publish job status update to GraphQL subscribers
262
let job_status = JobStatus {
263
job_id: payload.job_id,
264
+
slice_uri: payload.slice_uri.clone(),
265
status: "completed".to_string(),
266
created_at: chrono::Utc::now(),
267
started_at: Some(chrono::Utc::now()),
···
339
// Publish job status update to GraphQL subscribers
340
let job_status = JobStatus {
341
job_id: payload.job_id,
342
+
slice_uri: payload.slice_uri.clone(),
343
status: "failed".to_string(),
344
created_at: chrono::Utc::now(),
345
started_at: Some(chrono::Utc::now()),
···
539
// Publish job creation event to subscribers
540
let job_status = JobStatus {
541
job_id,
542
+
slice_uri: slice_uri.clone(),
543
status: "pending".to_string(),
544
created_at: chrono::Utc::now(),
545
started_at: None,
···
563
pub struct JobStatus {
564
/// Unique identifier for the job
565
pub job_id: Uuid,
566
+
/// Slice URI this job belongs to
567
+
pub slice_uri: String,
568
/// Current status: "pending", "running", "completed", or "failed"
569
pub status: String,
570
/// Timestamp when job was enqueued
···
617
618
return Ok(Some(JobStatus {
619
job_id,
620
+
slice_uri: result.slice_uri,
621
status: result.status,
622
created_at: result.created_at,
623
started_at: Some(result.created_at),
···
654
655
match queue_row {
656
Some(row) => {
657
+
// Extract slice_uri from payload JSON
658
+
let slice_uri = row.payload_json
659
+
.as_ref()
660
+
.and_then(|json| json.get("slice_uri"))
661
+
.and_then(|v| v.as_str())
662
+
.unwrap_or_default()
663
+
.to_string();
664
+
665
// Determine status based on attempt_at timestamp
666
let status = if row.attempt_at.is_none() {
667
"completed".to_string()
···
677
678
Ok(Some(JobStatus {
679
job_id,
680
+
slice_uri,
681
status: status.clone(),
682
created_at: row.created_at.unwrap_or_else(chrono::Utc::now),
683
started_at: if status == "running" || status == "completed" {
···
806
807
results.push(JobStatus {
808
job_id: row.job_id.unwrap_or_else(Uuid::new_v4),
809
+
slice_uri: row.slice_uri.clone().unwrap_or_default(),
810
status: row.status.unwrap_or_default(),
811
created_at: row.created_at.unwrap_or_else(chrono::Utc::now),
812
started_at: row.created_at,
···
919
920
results.push(JobStatus {
921
job_id: row.job_id.unwrap_or_else(Uuid::new_v4),
922
+
slice_uri: row.slice_uri.clone().unwrap_or_default(),
923
status: row.status.unwrap_or_default(),
924
created_at: row.created_at.unwrap_or_else(chrono::Utc::now),
925
started_at: row.created_at,
···
1073
// Publish job status update for subscribers
1074
let job_status = JobStatus {
1075
job_id,
1076
+
slice_uri,
1077
status: "cancelled".to_string(),
1078
created_at: now,
1079
started_at: None,
+10
-2
api/src/main.rs
+10
-2
api/src/main.rs
···
109
// Start GraphQL PubSub cleanup task
110
graphql::pubsub::start_cleanup_task();
111
112
// Detect process type from environment (supports both PROCESS_TYPE and FLY_PROCESS_GROUP)
113
let process_type = env::var("PROCESS_TYPE")
114
.or_else(|_| env::var("FLY_PROCESS_GROUP"))
···
183
if now.duration_since(window_start) >= RECONNECT_WINDOW {
184
reconnect_count = 0;
185
window_start = now;
186
}
187
188
// Check rate limit
···
198
}
199
200
reconnect_count += 1;
201
202
// Read cursor position from database
203
let initial_cursor =
···
261
let cancellation_token = atproto_jetstream::CancellationToken::new();
262
match consumer_arc.start_consuming(cancellation_token).await {
263
Ok(_) => {
264
-
tracing::info!("Jetstream consumer shut down normally");
265
jetstream_connected_clone
266
.store(false, std::sync::atomic::Ordering::Relaxed);
267
}
268
Err(e) => {
269
-
tracing::error!("Jetstream consumer failed: {} - will reconnect", e);
270
jetstream_connected_clone
271
.store(false, std::sync::atomic::Ordering::Relaxed);
272
tokio::time::sleep(retry_delay).await;
···
109
// Start GraphQL PubSub cleanup task
110
graphql::pubsub::start_cleanup_task();
111
112
+
// Initialize Redis pub/sub for cross-process sync job updates
113
+
let redis_url = env::var("REDIS_URL").ok();
114
+
graphql::schema_ext::sync::initialize_redis_pubsub(redis_url);
115
+
116
// Detect process type from environment (supports both PROCESS_TYPE and FLY_PROCESS_GROUP)
117
let process_type = env::var("PROCESS_TYPE")
118
.or_else(|_| env::var("FLY_PROCESS_GROUP"))
···
187
if now.duration_since(window_start) >= RECONNECT_WINDOW {
188
reconnect_count = 0;
189
window_start = now;
190
+
retry_delay = tokio::time::Duration::from_secs(5); // Reset delay after window passes
191
}
192
193
// Check rate limit
···
203
}
204
205
reconnect_count += 1;
206
+
tracing::info!("Jetstream connection attempt #{} (retry delay: {:?})", reconnect_count, retry_delay);
207
208
// Read cursor position from database
209
let initial_cursor =
···
267
let cancellation_token = atproto_jetstream::CancellationToken::new();
268
match consumer_arc.start_consuming(cancellation_token).await {
269
Ok(_) => {
270
+
tracing::info!("Jetstream consumer shut down normally - reconnecting in {:?}", retry_delay);
271
jetstream_connected_clone
272
.store(false, std::sync::atomic::Ordering::Relaxed);
273
+
tokio::time::sleep(retry_delay).await;
274
+
retry_delay = std::cmp::min(retry_delay * 2, MAX_RETRY_DELAY);
275
}
276
Err(e) => {
277
+
tracing::error!("Jetstream consumer failed: {} - reconnecting in {:?}", e, retry_delay);
278
jetstream_connected_clone
279
.store(false, std::sync::atomic::Ordering::Relaxed);
280
tokio::time::sleep(retry_delay).await;
+1
-1
crates/slices-lexicon/Cargo.lock
+1
-1
crates/slices-lexicon/Cargo.lock
+2
-1
crates/slices-lexicon/Cargo.toml
+2
-1
crates/slices-lexicon/Cargo.toml
···
1
[package]
2
name = "slices-lexicon"
3
-
version = "0.2.2"
4
edition = "2021"
5
description = "AT Protocol lexicon validation library for Slices"
6
license = "MIT"
7
documentation = "https://docs.rs/slices-lexicon"
8
readme = "README.md"
9
keywords = ["atproto", "lexicon", "validation", "wasm", "bluesky"]
···
1
[package]
2
name = "slices-lexicon"
3
+
version = "0.3.0"
4
edition = "2021"
5
description = "AT Protocol lexicon validation library for Slices"
6
license = "MIT"
7
+
repository = "https://tangled.org/@slices.network/slices/tree/main/crates/slices-lexicon"
8
documentation = "https://docs.rs/slices-lexicon"
9
readme = "README.md"
10
keywords = ["atproto", "lexicon", "validation", "wasm", "bluesky"]
+1
-1
crates/slices-lexicon/README.md
+1
-1
crates/slices-lexicon/README.md
+114
-2
crates/slices-lexicon/src/validation/context.rs
+114
-2
crates/slices-lexicon/src/validation/context.rs
···
187
/// Creates a new context with the reference added to the resolution stack
188
///
189
/// This is used during recursive reference resolution to detect circular references.
190
///
191
/// # Arguments
192
///
···
194
///
195
/// # Returns
196
///
197
-
/// A new `ValidationContext` with the reference added to the stack
198
pub fn with_reference(&self, reference: &str) -> Self {
199
let mut reference_stack = self.reference_stack.clone();
200
reference_stack.insert(reference.to_string());
201
202
Self {
203
lexicons: self.lexicons.clone(),
204
path: self.path.clone(),
205
strict_mode: self.strict_mode,
206
-
current_lexicon_id: self.current_lexicon_id.clone(),
207
reference_stack,
208
}
209
}
···
721
let image_schema = ctx.resolve_reference("com.example.media#image").unwrap();
722
assert_eq!(image_schema.get("type").and_then(|t| t.as_str()), Some("object"));
723
assert!(image_schema.get("required").is_some());
724
}
725
726
}
···
187
/// Creates a new context with the reference added to the resolution stack
188
///
189
/// This is used during recursive reference resolution to detect circular references.
190
+
/// Also updates the current lexicon context to the referenced lexicon, ensuring that
191
+
/// local references within the resolved schema are resolved in the correct lexicon.
192
///
193
/// # Arguments
194
///
···
196
///
197
/// # Returns
198
///
199
+
/// A new `ValidationContext` with the reference added to the stack and current lexicon updated
200
pub fn with_reference(&self, reference: &str) -> Self {
201
let mut reference_stack = self.reference_stack.clone();
202
reference_stack.insert(reference.to_string());
203
204
+
// Parse the reference to extract the target lexicon ID
205
+
// This ensures local references in the resolved schema are resolved in the correct context
206
+
let new_current_lexicon_id = if let Ok((lexicon_id, _def_name)) = self.parse_reference(reference) {
207
+
Some(lexicon_id)
208
+
} else {
209
+
// If parsing fails, keep the current lexicon ID
210
+
self.current_lexicon_id.clone()
211
+
};
212
+
213
Self {
214
lexicons: self.lexicons.clone(),
215
path: self.path.clone(),
216
strict_mode: self.strict_mode,
217
+
current_lexicon_id: new_current_lexicon_id,
218
reference_stack,
219
}
220
}
···
732
let image_schema = ctx.resolve_reference("com.example.media#image").unwrap();
733
assert_eq!(image_schema.get("type").and_then(|t| t.as_str()), Some("object"));
734
assert!(image_schema.get("required").is_some());
735
+
}
736
+
737
+
#[test]
738
+
fn test_cross_lexicon_union_with_local_refs() {
739
+
// Regression test for bug where local references in a union weren't resolved
740
+
// in the correct lexicon context after a cross-lexicon reference.
741
+
// This mirrors the pub.leaflet.blocks.text -> pub.leaflet.richtext.facet scenario
742
+
743
+
let ctx = ValidationContext::builder()
744
+
.with_lexicons(vec![
745
+
json!({
746
+
"lexicon": 1,
747
+
"id": "pub.leaflet.blocks.text",
748
+
"defs": {
749
+
"main": {
750
+
"type": "object",
751
+
"required": ["plaintext"],
752
+
"properties": {
753
+
"plaintext": {"type": "string"},
754
+
"facets": {
755
+
"type": "array",
756
+
"items": {
757
+
"type": "ref",
758
+
"ref": "pub.leaflet.richtext.facet"
759
+
}
760
+
}
761
+
}
762
+
}
763
+
}
764
+
}),
765
+
json!({
766
+
"lexicon": 1,
767
+
"id": "pub.leaflet.richtext.facet",
768
+
"defs": {
769
+
"main": {
770
+
"type": "object",
771
+
"required": ["index", "features"],
772
+
"properties": {
773
+
"index": {"type": "ref", "ref": "#byteSlice"},
774
+
"features": {
775
+
"type": "array",
776
+
"items": {
777
+
"type": "union",
778
+
"refs": ["#bold", "#italic", "#link"]
779
+
}
780
+
}
781
+
}
782
+
},
783
+
"byteSlice": {
784
+
"type": "object",
785
+
"required": ["byteStart", "byteEnd"],
786
+
"properties": {
787
+
"byteStart": {"type": "integer", "minimum": 0},
788
+
"byteEnd": {"type": "integer", "minimum": 0}
789
+
}
790
+
},
791
+
"bold": {
792
+
"type": "object",
793
+
"description": "Facet feature for bold text",
794
+
"properties": {}
795
+
},
796
+
"italic": {
797
+
"type": "object",
798
+
"description": "Facet feature for italic text",
799
+
"properties": {}
800
+
},
801
+
"link": {
802
+
"type": "object",
803
+
"required": ["uri"],
804
+
"properties": {
805
+
"uri": {"type": "string", "format": "uri"}
806
+
}
807
+
}
808
+
}
809
+
})
810
+
])
811
+
.unwrap()
812
+
.build()
813
+
.unwrap()
814
+
.with_current_lexicon("pub.leaflet.blocks.text");
815
+
816
+
// Test 1: Verify we can resolve the cross-lexicon reference
817
+
let facet_schema = ctx.resolve_reference("pub.leaflet.richtext.facet").unwrap();
818
+
assert_eq!(facet_schema.get("type").and_then(|t| t.as_str()), Some("object"));
819
+
820
+
// Test 2: Verify that with_reference updates the current lexicon context correctly
821
+
let ref_ctx = ctx.with_reference("pub.leaflet.richtext.facet");
822
+
assert_eq!(ref_ctx.current_lexicon_id(), Some("pub.leaflet.richtext.facet"));
823
+
824
+
// Test 3: Most importantly - verify local references resolve in the TARGET lexicon
825
+
// This is the bug we fixed: #bold should resolve in pub.leaflet.richtext.facet, not pub.leaflet.blocks.text
826
+
let bold_schema = ref_ctx.resolve_reference("#bold").unwrap();
827
+
assert_eq!(bold_schema.get("type").and_then(|t| t.as_str()), Some("object"));
828
+
assert_eq!(bold_schema.get("description").and_then(|d| d.as_str()), Some("Facet feature for bold text"));
829
+
830
+
// Test 4: Verify other local references in the union also work
831
+
let italic_schema = ref_ctx.resolve_reference("#italic").unwrap();
832
+
assert_eq!(italic_schema.get("description").and_then(|d| d.as_str()), Some("Facet feature for italic text"));
833
+
834
+
let link_schema = ref_ctx.resolve_reference("#link").unwrap();
835
+
assert!(link_schema.get("required").is_some());
836
}
837
838
}
+2
-1
crates/slices-lexicon/src/validation/field/array.rs
+2
-1
crates/slices-lexicon/src/validation/field/array.rs
+7
-2
crates/slices-lexicon/src/validation/field/union.rs
+7
-2
crates/slices-lexicon/src/validation/field/union.rs
···
358
.with_lexicons(vec![json!({
359
"lexicon": 1,
360
"id": "com.example.test",
361
+
"defs": {
362
+
"main": union_def.clone(),
363
+
"post": { "type": "object", "properties": {} },
364
+
"repost": { "type": "object", "properties": {} }
365
+
}
366
})])
367
.unwrap()
368
.build()
369
+
.unwrap()
370
+
.with_current_lexicon("com.example.test");
371
372
let validator = UnionValidator;
373
assert!(validator.validate(&union_def, &ctx).is_ok());
+32
-3
crates/slices-lexicon/src/validation/primary/record.rs
+32
-3
crates/slices-lexicon/src/validation/primary/record.rs
···
277
///
278
/// - `tid`: Record key is a Timestamp Identifier (auto-generated)
279
/// - `any`: Record key can be any valid record key format
280
/// - `literal:self`: Record key must be exactly "self"
281
///
282
/// # Arguments
···
291
/// - The key is not one of the valid types
292
fn validate_key(def_name: &str, key_value: &Value) -> Result<(), ValidationError> {
293
match key_value.as_str() {
294
-
Some("tid") | Some("any") => Ok(()),
295
Some(k) if k.starts_with("literal:") => Ok(()),
296
Some(invalid) => Err(ValidationError::InvalidSchema(format!(
297
-
"Record '{}' has invalid key type '{}'. Must be 'tid', 'any', or 'literal:*'",
298
def_name, invalid
299
))),
300
None => Err(ValidationError::InvalidSchema(format!(
···
609
}
610
611
#[test]
612
fn test_invalid_key_type() {
613
let record = json!({
614
"type": "record",
···
849
})])
850
.unwrap()
851
.build()
852
-
.unwrap();
853
854
let validator = RecordValidator;
855
assert!(validator.validate(&schema, &ctx).is_ok());
···
277
///
278
/// - `tid`: Record key is a Timestamp Identifier (auto-generated)
279
/// - `any`: Record key can be any valid record key format
280
+
/// - `nsid`: Record key must be a valid NSID
281
/// - `literal:self`: Record key must be exactly "self"
282
///
283
/// # Arguments
···
292
/// - The key is not one of the valid types
293
fn validate_key(def_name: &str, key_value: &Value) -> Result<(), ValidationError> {
294
match key_value.as_str() {
295
+
Some("tid") | Some("any") | Some("nsid") => Ok(()),
296
Some(k) if k.starts_with("literal:") => Ok(()),
297
Some(invalid) => Err(ValidationError::InvalidSchema(format!(
298
+
"Record '{}' has invalid key type '{}'. Must be 'tid', 'any', 'nsid', or 'literal:*'",
299
def_name, invalid
300
))),
301
None => Err(ValidationError::InvalidSchema(format!(
···
610
}
611
612
#[test]
613
+
fn test_valid_record_nsid() {
614
+
let record = json!({
615
+
"type": "record",
616
+
"key": "nsid",
617
+
"record": {
618
+
"type": "object",
619
+
"properties": {
620
+
"text": { "type": "string" }
621
+
}
622
+
}
623
+
});
624
+
625
+
let ctx = ValidationContext::builder()
626
+
.with_lexicons(vec![json!({
627
+
"lexicon": 1,
628
+
"id": "com.example.test",
629
+
"defs": { "main": record.clone() }
630
+
})])
631
+
.unwrap()
632
+
.build()
633
+
.unwrap();
634
+
635
+
let validator = RecordValidator;
636
+
assert!(validator.validate(&record, &ctx).is_ok());
637
+
}
638
+
639
+
#[test]
640
fn test_invalid_key_type() {
641
let record = json!({
642
"type": "record",
···
877
})])
878
.unwrap()
879
.build()
880
+
.unwrap()
881
+
.with_current_lexicon("com.example.test");
882
883
let validator = RecordValidator;
884
assert!(validator.validate(&schema, &ctx).is_ok());
+7
-2
crates/slices-lexicon/src/validation/primary/subscription.rs
+7
-2
crates/slices-lexicon/src/validation/primary/subscription.rs
···
471
.with_lexicons(vec![json!({
472
"lexicon": 1,
473
"id": "com.example.test",
474
+
"defs": {
475
+
"main": subscription.clone(),
476
+
"post": { "type": "object", "properties": {} },
477
+
"like": { "type": "object", "properties": {} }
478
+
}
479
})])
480
.unwrap()
481
.build()
482
+
.unwrap()
483
+
.with_current_lexicon("com.example.test");
484
485
let validator = SubscriptionValidator;
486
assert!(validator.validate(&subscription, &ctx).is_ok());
+24
-11
crates/slices-lexicon/src/validation/primitive/string.rs
+24
-11
crates/slices-lexicon/src/validation/primitive/string.rs
···
577
578
/// Validates TID (Timestamp Identifier) format
579
///
580
-
/// TID format: 13-character base32-encoded timestamp + random bits
581
-
/// Uses Crockford base32 alphabet: 0123456789ABCDEFGHJKMNPQRSTVWXYZ (case-insensitive)
582
pub fn is_valid_tid(&self, value: &str) -> bool {
583
use regex::Regex;
584
···
586
return false;
587
}
588
589
-
// TID uses Crockford base32 (case-insensitive, excludes I, L, O, U)
590
-
let tid_regex = Regex::new(r"^[0-9A-HJKMNP-TV-Z]{13}$").unwrap();
591
-
let uppercase_value = value.to_uppercase();
592
593
-
tid_regex.is_match(&uppercase_value)
594
}
595
596
/// Validates Record Key format
···
1096
1097
let validator = StringValidator;
1098
1099
-
// Valid TIDs (13 characters, Crockford base32)
1100
-
assert!(validator.validate_data(&json!("3JZFKJT0000ZZ"), &schema, &ctx).is_ok());
1101
-
assert!(validator.validate_data(&json!("3jzfkjt0000zz"), &schema, &ctx).is_ok()); // case insensitive
1102
1103
-
// Invalid TIDs
1104
assert!(validator.validate_data(&json!("too-short"), &schema, &ctx).is_err());
1105
assert!(validator.validate_data(&json!("too-long-string"), &schema, &ctx).is_err());
1106
assert!(validator.validate_data(&json!("invalid-chars!"), &schema, &ctx).is_err());
1107
-
assert!(validator.validate_data(&json!("invalid-ILOU0"), &schema, &ctx).is_err()); // invalid chars (I, L, O, U)
1108
}
1109
1110
#[test]
···
577
578
/// Validates TID (Timestamp Identifier) format
579
///
580
+
/// TID format: 13-character base32-sortable encoded timestamp + random bits
581
+
/// Uses ATProto base32-sortable alphabet: 234567abcdefghijklmnopqrstuvwxyz (lowercase only)
582
pub fn is_valid_tid(&self, value: &str) -> bool {
583
use regex::Regex;
584
···
586
return false;
587
}
588
589
+
// TID uses base32-sortable (s32) - lowercase only
590
+
// First character must be from limited set (ensures top bit is 0)
591
+
// Remaining 12 characters from full base32-sortable alphabet
592
+
let tid_regex = Regex::new(r"^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$").unwrap();
593
594
+
tid_regex.is_match(value)
595
}
596
597
/// Validates Record Key format
···
1097
1098
let validator = StringValidator;
1099
1100
+
// Valid TIDs (base32-sortable, 13 chars, lowercase)
1101
+
assert!(validator.validate_data(&json!("3m3zm7eurxk26"), &schema, &ctx).is_ok());
1102
+
assert!(validator.validate_data(&json!("2222222222222"), &schema, &ctx).is_ok()); // minimum TID
1103
+
assert!(validator.validate_data(&json!("a222222222222"), &schema, &ctx).is_ok()); // leading 'a' (lower bound)
1104
+
assert!(validator.validate_data(&json!("j234567abcdef"), &schema, &ctx).is_ok()); // leading 'j' (upper bound)
1105
+
1106
1107
+
// Invalid TIDs - uppercase not allowed (charset is lowercase only)
1108
+
assert!(validator.validate_data(&json!("3m3zM7eurxk26"), &schema, &ctx).is_err()); // mixed case
1109
+
1110
+
// Invalid TIDs - wrong length
1111
assert!(validator.validate_data(&json!("too-short"), &schema, &ctx).is_err());
1112
assert!(validator.validate_data(&json!("too-long-string"), &schema, &ctx).is_err());
1113
+
1114
+
// Invalid TIDs - invalid characters (hyphen/punct rejected; digits 0,1,8,9 not allowed)
1115
assert!(validator.validate_data(&json!("invalid-chars!"), &schema, &ctx).is_err());
1116
+
assert!(validator.validate_data(&json!("xyz1234567890"), &schema, &ctx).is_err()); // has 0,1,8,9
1117
+
1118
+
// Invalid TIDs - first character must be one of 234567abcdefghij
1119
+
assert!(validator.validate_data(&json!("k222222222222"), &schema, &ctx).is_err()); // leading 'k' forbidden
1120
+
assert!(validator.validate_data(&json!("z234567abcdef"), &schema, &ctx).is_err()); // leading 'z' forbidden
1121
}
1122
1123
#[test]
+3
-1
deno.json
+3
-1
deno.json
···
28
"@std/fs": "jsr:@std/fs@^1.0.4",
29
"@std/fmt": "jsr:@std/fmt@^1.0.2"
30
},
31
+
"unstable": ["sloppy-imports"],
32
"exclude": [
33
"packages/cli/src/templates/deno-ssr/",
34
"packages/lexicon-intellisense/",
35
+
"frontend-v2/src/**/__generated__/**",
36
+
"**/.vite/**"
37
],
38
"nodeModulesDir": "auto"
39
}
+203
-12
deno.lock
+203
-12
deno.lock
···
2
"version": "5",
3
"specifiers": {
4
"jsr:@shikijs/shiki@^3.7.0": "3.7.0",
5
"jsr:@std/assert@^1.0.14": "1.0.14",
6
"jsr:@std/cli@^1.0.21": "1.0.22",
7
"jsr:@std/cli@^1.0.22": "1.0.22",
···
20
"jsr:@std/path@^1.0.6": "1.1.2",
21
"jsr:@std/path@^1.1.1": "1.1.2",
22
"jsr:@std/streams@^1.0.10": "1.0.12",
23
-
"npm:@deno/vite-plugin@^1.0.5": "1.0.5_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@types+node@24.7.2",
24
"npm:@shikijs/core@^3.7.0": "3.13.0",
25
"npm:@shikijs/engine-oniguruma@^3.7.0": "3.13.0",
26
"npm:@shikijs/types@^3.7.0": "3.13.0",
···
29
"npm:@takumi-rs/core@~0.36.2": "0.36.2",
30
"npm:@takumi-rs/helpers@~0.29.8": "0.29.8",
31
"npm:@takumi-rs/helpers@~0.36.2": "0.36.2",
32
"npm:@types/node@^24.6.0": "24.7.2",
33
"npm:@types/react-dom@^19.1.9": "19.2.0_@types+react@19.2.0",
34
"npm:@types/react-relay@^18.2.1": "18.2.1",
35
"npm:@types/react@^19.1.16": "19.2.0",
36
"npm:@types/relay-runtime@^19.0.3": "19.0.3",
37
-
"npm:@vitejs/plugin-react@^5.0.4": "5.0.4_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@babel+core@7.28.4_@types+node@24.7.2",
38
"npm:autoprefixer@^10.4.21": "10.4.21_postcss@8.5.6",
39
"npm:babel-plugin-relay@^20.1.1": "20.1.1",
40
"npm:clsx@^2.1.1": "2.1.1",
···
63
"npm:tailwindcss@^4.1.14": "4.1.14",
64
"npm:ts-morph@26.0.0": "26.0.0",
65
"npm:typed-htmx@~0.3.1": "0.3.1",
66
-
"npm:vite@*": "7.1.10_@types+node@24.7.2_picomatch@4.0.3",
67
-
"npm:vite@^7.1.7": "7.1.10_@types+node@24.7.2_picomatch@4.0.3"
68
},
69
"jsr": {
70
"@shikijs/shiki@3.7.0": {
···
281
"@deno/vite-plugin@1.0.5_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@types+node@24.7.2": {
282
"integrity": "sha512-tLja5n4dyMhcze1NzvSs2iiriBymfBlDCZIrjMTxb9O2ru0gvmV6mn5oBD2teNw5Sd92cj3YJzKwsAs8tMJXlg==",
283
"dependencies": [
284
-
"vite"
285
]
286
},
287
"@emnapi/core@1.5.0": {
···
475
"@jridgewell/sourcemap-codec"
476
]
477
},
478
"@napi-rs/wasm-runtime@1.0.7": {
479
"integrity": "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==",
480
"dependencies": [
···
482
"@emnapi/runtime",
483
"@tybys/wasm-util"
484
]
485
},
486
"@nodelib/fs.scandir@2.1.5": {
487
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
···
741
"@tailwindcss/oxide@4.1.14": {
742
"integrity": "sha512-23yx+VUbBwCg2x5XWdB8+1lkPajzLmALEfMb51zZUBYaYVPDQvBSD/WYDqiVyBIo2BZFa3yw1Rpy3G2Jp+K0dw==",
743
"dependencies": [
744
-
"detect-libc",
745
"tar"
746
],
747
"optionalDependencies": [
···
934
"@types/unist"
935
]
936
},
937
"@types/node@24.7.2": {
938
"integrity": "sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA==",
939
"dependencies": [
940
-
"undici-types"
941
]
942
},
943
"@types/parse-json@4.0.2": {
···
968
"@types/unist@3.0.3": {
969
"integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="
970
},
971
"@ungap/structured-clone@1.3.0": {
972
"integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="
973
},
···
980
"@rolldown/pluginutils",
981
"@types/babel__core",
982
"react-refresh",
983
-
"vite"
984
]
985
},
986
"argparse@1.0.10": {
···
1112
"cross-fetch@3.2.0": {
1113
"integrity": "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==",
1114
"dependencies": [
1115
-
"node-fetch"
1116
]
1117
},
1118
"csstype@3.1.3": {
1119
"integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="
1120
},
1121
"debug@4.4.3": {
1122
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
···
1126
},
1127
"dequal@2.0.3": {
1128
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="
1129
},
1130
"detect-libc@2.1.2": {
1131
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="
···
1232
"picomatch@4.0.3"
1233
]
1234
},
1235
"fill-range@7.1.1": {
1236
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
1237
"dependencies": [
1238
"to-regex-range"
1239
]
1240
},
1241
"fraction.js@4.3.7": {
···
1355
"integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==",
1356
"bin": true
1357
},
1358
"js-tokens@4.0.0": {
1359
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
1360
},
···
1380
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
1381
"bin": true
1382
},
1383
"lightningcss-darwin-arm64@1.30.1": {
1384
"integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==",
1385
"os": ["darwin"],
···
1433
"lightningcss@1.30.1": {
1434
"integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==",
1435
"dependencies": [
1436
-
"detect-libc"
1437
],
1438
"optionalDependencies": [
1439
"lightningcss-darwin-arm64",
···
1562
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
1563
"bin": true
1564
},
1565
"node-fetch@2.7.0": {
1566
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
1567
"dependencies": [
1568
"whatwg-url"
1569
]
1570
},
1571
"node-releases@2.0.23": {
···
1961
"integrity": "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==",
1962
"bin": true
1963
},
1964
"undici-types@7.14.0": {
1965
"integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA=="
1966
},
···
2023
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3": {
2024
"integrity": "sha512-CmuvUBzVJ/e3HGxhg6cYk88NGgTnBoOo7ogtfJJ0fefUWAxN/WDSUa50o+oVBxuIhO8FoEZW0j2eW7sfjs5EtA==",
2025
"dependencies": [
2026
-
"@types/node",
2027
"esbuild",
2028
"fdir",
2029
"picomatch@4.0.3",
···
2035
"fsevents"
2036
],
2037
"optionalPeers": [
2038
-
"@types/node"
2039
],
2040
"bin": true
2041
},
2042
"webidl-conversions@3.0.1": {
2043
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
2044
},
···
2048
"tr46",
2049
"webidl-conversions"
2050
]
2051
},
2052
"xtend@4.0.2": {
2053
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
···
2140
"npm:ts-morph@26.0.0"
2141
]
2142
},
2143
"packages/session": {
2144
"dependencies": [
2145
"npm:pg@^8.16.3"
2146
]
2147
}
···
2
"version": "5",
3
"specifiers": {
4
"jsr:@shikijs/shiki@^3.7.0": "3.7.0",
5
+
"jsr:@std/assert@*": "1.0.14",
6
"jsr:@std/assert@^1.0.14": "1.0.14",
7
"jsr:@std/cli@^1.0.21": "1.0.22",
8
"jsr:@std/cli@^1.0.22": "1.0.22",
···
21
"jsr:@std/path@^1.0.6": "1.1.2",
22
"jsr:@std/path@^1.1.1": "1.1.2",
23
"jsr:@std/streams@^1.0.10": "1.0.12",
24
+
"npm:@deno/vite-plugin@^1.0.5": "1.0.5_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@types+node@24.7.2_@types+node@24.2.0",
25
+
"npm:@libsql/client@0.6.0": "0.6.0",
26
"npm:@shikijs/core@^3.7.0": "3.13.0",
27
"npm:@shikijs/engine-oniguruma@^3.7.0": "3.13.0",
28
"npm:@shikijs/types@^3.7.0": "3.13.0",
···
31
"npm:@takumi-rs/core@~0.36.2": "0.36.2",
32
"npm:@takumi-rs/helpers@~0.29.8": "0.29.8",
33
"npm:@takumi-rs/helpers@~0.36.2": "0.36.2",
34
+
"npm:@types/node@*": "24.2.0",
35
"npm:@types/node@^24.6.0": "24.7.2",
36
"npm:@types/react-dom@^19.1.9": "19.2.0_@types+react@19.2.0",
37
"npm:@types/react-relay@^18.2.1": "18.2.1",
38
"npm:@types/react@^19.1.16": "19.2.0",
39
"npm:@types/relay-runtime@^19.0.3": "19.0.3",
40
+
"npm:@vitejs/plugin-react@^5.0.4": "5.0.4_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@babel+core@7.28.4_@types+node@24.7.2_@types+node@24.2.0",
41
"npm:autoprefixer@^10.4.21": "10.4.21_postcss@8.5.6",
42
"npm:babel-plugin-relay@^20.1.1": "20.1.1",
43
"npm:clsx@^2.1.1": "2.1.1",
···
66
"npm:tailwindcss@^4.1.14": "4.1.14",
67
"npm:ts-morph@26.0.0": "26.0.0",
68
"npm:typed-htmx@~0.3.1": "0.3.1",
69
+
"npm:vite@*": "7.1.10_@types+node@24.7.2_picomatch@4.0.3_@types+node@24.2.0",
70
+
"npm:vite@^7.1.7": "7.1.10_@types+node@24.7.2_picomatch@4.0.3_@types+node@24.2.0"
71
},
72
"jsr": {
73
"@shikijs/shiki@3.7.0": {
···
284
"@deno/vite-plugin@1.0.5_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@types+node@24.7.2": {
285
"integrity": "sha512-tLja5n4dyMhcze1NzvSs2iiriBymfBlDCZIrjMTxb9O2ru0gvmV6mn5oBD2teNw5Sd92cj3YJzKwsAs8tMJXlg==",
286
"dependencies": [
287
+
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3"
288
+
]
289
+
},
290
+
"@deno/vite-plugin@1.0.5_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@types+node@24.7.2_@types+node@24.2.0": {
291
+
"integrity": "sha512-tLja5n4dyMhcze1NzvSs2iiriBymfBlDCZIrjMTxb9O2ru0gvmV6mn5oBD2teNw5Sd92cj3YJzKwsAs8tMJXlg==",
292
+
"dependencies": [
293
+
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3_@types+node@24.2.0"
294
]
295
},
296
"@emnapi/core@1.5.0": {
···
484
"@jridgewell/sourcemap-codec"
485
]
486
},
487
+
"@libsql/client@0.6.0": {
488
+
"integrity": "sha512-qhQzTG/y2IEVbL3+9PULDvlQFWJ/RnjFXECr/Nc3nRngGiiMysDaOV5VUzYk7DulUX98EA4wi+z3FspKrUplUA==",
489
+
"dependencies": [
490
+
"@libsql/core",
491
+
"@libsql/hrana-client",
492
+
"js-base64",
493
+
"libsql"
494
+
]
495
+
},
496
+
"@libsql/core@0.6.2": {
497
+
"integrity": "sha512-c2P4M+4u/4b2L02A0KjggO3UW51rGkhxr/7fzJO0fEAqsqrWGxuNj2YtRkina/oxfYvAof6xjp8RucNoIV/Odw==",
498
+
"dependencies": [
499
+
"js-base64"
500
+
]
501
+
},
502
+
"@libsql/darwin-arm64@0.3.19": {
503
+
"integrity": "sha512-rmOqsLcDI65zzxlUOoEiPJLhqmbFsZF6p4UJQ2kMqB+Kc0Rt5/A1OAdOZ/Wo8fQfJWjR1IbkbpEINFioyKf+nQ==",
504
+
"os": ["darwin"],
505
+
"cpu": ["arm64"]
506
+
},
507
+
"@libsql/darwin-x64@0.3.19": {
508
+
"integrity": "sha512-q9O55B646zU+644SMmOQL3FIfpmEvdWpRpzubwFc2trsa+zoBlSkHuzU9v/C+UNoPHQVRMP7KQctJ455I/h/xw==",
509
+
"os": ["darwin"],
510
+
"cpu": ["x64"]
511
+
},
512
+
"@libsql/hrana-client@0.6.2": {
513
+
"integrity": "sha512-MWxgD7mXLNf9FXXiM0bc90wCjZSpErWKr5mGza7ERy2FJNNMXd7JIOv+DepBA1FQTIfI8TFO4/QDYgaQC0goNw==",
514
+
"dependencies": [
515
+
"@libsql/isomorphic-fetch",
516
+
"@libsql/isomorphic-ws",
517
+
"js-base64",
518
+
"node-fetch@3.3.2"
519
+
]
520
+
},
521
+
"@libsql/isomorphic-fetch@0.2.5": {
522
+
"integrity": "sha512-8s/B2TClEHms2yb+JGpsVRTPBfy1ih/Pq6h6gvyaNcYnMVJvgQRY7wAa8U2nD0dppbCuDU5evTNMEhrQ17ZKKg=="
523
+
},
524
+
"@libsql/isomorphic-ws@0.1.5": {
525
+
"integrity": "sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==",
526
+
"dependencies": [
527
+
"@types/ws",
528
+
"ws"
529
+
]
530
+
},
531
+
"@libsql/linux-arm64-gnu@0.3.19": {
532
+
"integrity": "sha512-mgeAUU1oqqh57k7I3cQyU6Trpdsdt607eFyEmH5QO7dv303ti+LjUvh1pp21QWV6WX7wZyjeJV1/VzEImB+jRg==",
533
+
"os": ["linux"],
534
+
"cpu": ["arm64"]
535
+
},
536
+
"@libsql/linux-arm64-musl@0.3.19": {
537
+
"integrity": "sha512-VEZtxghyK6zwGzU9PHohvNxthruSxBEnRrX7BSL5jQ62tN4n2JNepJ6SdzXp70pdzTfwroOj/eMwiPt94gkVRg==",
538
+
"os": ["linux"],
539
+
"cpu": ["arm64"]
540
+
},
541
+
"@libsql/linux-x64-gnu@0.3.19": {
542
+
"integrity": "sha512-2t/J7LD5w2f63wGihEO+0GxfTyYIyLGEvTFEsMO16XI5o7IS9vcSHrxsvAJs4w2Pf907uDjmc7fUfMg6L82BrQ==",
543
+
"os": ["linux"],
544
+
"cpu": ["x64"]
545
+
},
546
+
"@libsql/linux-x64-musl@0.3.19": {
547
+
"integrity": "sha512-BLsXyJaL8gZD8+3W2LU08lDEd9MIgGds0yPy5iNPp8tfhXx3pV/Fge2GErN0FC+nzt4DYQtjL+A9GUMglQefXQ==",
548
+
"os": ["linux"],
549
+
"cpu": ["x64"]
550
+
},
551
+
"@libsql/win32-x64-msvc@0.3.19": {
552
+
"integrity": "sha512-ay1X9AobE4BpzG0XPw1gplyLZPGHIgJOovvW23gUrukRegiUP62uzhpRbKNogLlUOynyXeq//prHgPXiebUfWg==",
553
+
"os": ["win32"],
554
+
"cpu": ["x64"]
555
+
},
556
"@napi-rs/wasm-runtime@1.0.7": {
557
"integrity": "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==",
558
"dependencies": [
···
560
"@emnapi/runtime",
561
"@tybys/wasm-util"
562
]
563
+
},
564
+
"@neon-rs/load@0.0.4": {
565
+
"integrity": "sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw=="
566
},
567
"@nodelib/fs.scandir@2.1.5": {
568
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
···
822
"@tailwindcss/oxide@4.1.14": {
823
"integrity": "sha512-23yx+VUbBwCg2x5XWdB8+1lkPajzLmALEfMb51zZUBYaYVPDQvBSD/WYDqiVyBIo2BZFa3yw1Rpy3G2Jp+K0dw==",
824
"dependencies": [
825
+
"detect-libc@2.1.2",
826
"tar"
827
],
828
"optionalDependencies": [
···
1015
"@types/unist"
1016
]
1017
},
1018
+
"@types/node@24.2.0": {
1019
+
"integrity": "sha512-3xyG3pMCq3oYCNg7/ZP+E1ooTaGB4cG8JWRsqqOYQdbWNY4zbaV0Ennrd7stjiJEFZCaybcIgpTjJWHRfBSIDw==",
1020
+
"dependencies": [
1021
+
"undici-types@7.10.0"
1022
+
]
1023
+
},
1024
"@types/node@24.7.2": {
1025
"integrity": "sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA==",
1026
"dependencies": [
1027
+
"undici-types@7.14.0"
1028
]
1029
},
1030
"@types/parse-json@4.0.2": {
···
1055
"@types/unist@3.0.3": {
1056
"integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="
1057
},
1058
+
"@types/ws@8.18.1": {
1059
+
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
1060
+
"dependencies": [
1061
+
"@types/node@24.2.0"
1062
+
]
1063
+
},
1064
"@ungap/structured-clone@1.3.0": {
1065
"integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="
1066
},
···
1073
"@rolldown/pluginutils",
1074
"@types/babel__core",
1075
"react-refresh",
1076
+
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3"
1077
+
]
1078
+
},
1079
+
"@vitejs/plugin-react@5.0.4_vite@7.1.10__@types+node@24.7.2__picomatch@4.0.3_@babel+core@7.28.4_@types+node@24.7.2_@types+node@24.2.0": {
1080
+
"integrity": "sha512-La0KD0vGkVkSk6K+piWDKRUyg8Rl5iAIKRMH0vMJI0Eg47bq1eOxmoObAaQG37WMW9MSyk7Cs8EIWwJC1PtzKA==",
1081
+
"dependencies": [
1082
+
"@babel/core",
1083
+
"@babel/plugin-transform-react-jsx-self",
1084
+
"@babel/plugin-transform-react-jsx-source",
1085
+
"@rolldown/pluginutils",
1086
+
"@types/babel__core",
1087
+
"react-refresh",
1088
+
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3_@types+node@24.2.0"
1089
]
1090
},
1091
"argparse@1.0.10": {
···
1217
"cross-fetch@3.2.0": {
1218
"integrity": "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==",
1219
"dependencies": [
1220
+
"node-fetch@2.7.0"
1221
]
1222
},
1223
"csstype@3.1.3": {
1224
"integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="
1225
+
},
1226
+
"data-uri-to-buffer@4.0.1": {
1227
+
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="
1228
},
1229
"debug@4.4.3": {
1230
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
···
1234
},
1235
"dequal@2.0.3": {
1236
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="
1237
+
},
1238
+
"detect-libc@2.0.2": {
1239
+
"integrity": "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw=="
1240
},
1241
"detect-libc@2.1.2": {
1242
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="
···
1343
"picomatch@4.0.3"
1344
]
1345
},
1346
+
"fetch-blob@3.2.0": {
1347
+
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
1348
+
"dependencies": [
1349
+
"node-domexception",
1350
+
"web-streams-polyfill"
1351
+
]
1352
+
},
1353
"fill-range@7.1.1": {
1354
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
1355
"dependencies": [
1356
"to-regex-range"
1357
+
]
1358
+
},
1359
+
"formdata-polyfill@4.0.10": {
1360
+
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
1361
+
"dependencies": [
1362
+
"fetch-blob"
1363
]
1364
},
1365
"fraction.js@4.3.7": {
···
1479
"integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==",
1480
"bin": true
1481
},
1482
+
"js-base64@3.7.8": {
1483
+
"integrity": "sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow=="
1484
+
},
1485
"js-tokens@4.0.0": {
1486
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
1487
},
···
1507
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
1508
"bin": true
1509
},
1510
+
"libsql@0.3.19": {
1511
+
"integrity": "sha512-Aj5cQ5uk/6fHdmeW0TiXK42FqUlwx7ytmMLPSaUQPin5HKKKuUPD62MAbN4OEweGBBI7q1BekoEN4gPUEL6MZA==",
1512
+
"dependencies": [
1513
+
"@neon-rs/load",
1514
+
"detect-libc@2.0.2"
1515
+
],
1516
+
"optionalDependencies": [
1517
+
"@libsql/darwin-arm64",
1518
+
"@libsql/darwin-x64",
1519
+
"@libsql/linux-arm64-gnu",
1520
+
"@libsql/linux-arm64-musl",
1521
+
"@libsql/linux-x64-gnu",
1522
+
"@libsql/linux-x64-musl",
1523
+
"@libsql/win32-x64-msvc"
1524
+
],
1525
+
"os": ["darwin", "linux", "win32"],
1526
+
"cpu": ["x64", "arm64", "wasm32"]
1527
+
},
1528
"lightningcss-darwin-arm64@1.30.1": {
1529
"integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==",
1530
"os": ["darwin"],
···
1578
"lightningcss@1.30.1": {
1579
"integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==",
1580
"dependencies": [
1581
+
"detect-libc@2.1.2"
1582
],
1583
"optionalDependencies": [
1584
"lightningcss-darwin-arm64",
···
1707
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
1708
"bin": true
1709
},
1710
+
"node-domexception@1.0.0": {
1711
+
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
1712
+
"deprecated": true
1713
+
},
1714
"node-fetch@2.7.0": {
1715
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
1716
"dependencies": [
1717
"whatwg-url"
1718
+
]
1719
+
},
1720
+
"node-fetch@3.3.2": {
1721
+
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
1722
+
"dependencies": [
1723
+
"data-uri-to-buffer",
1724
+
"fetch-blob",
1725
+
"formdata-polyfill"
1726
]
1727
},
1728
"node-releases@2.0.23": {
···
2118
"integrity": "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==",
2119
"bin": true
2120
},
2121
+
"undici-types@7.10.0": {
2122
+
"integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="
2123
+
},
2124
"undici-types@7.14.0": {
2125
"integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA=="
2126
},
···
2183
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3": {
2184
"integrity": "sha512-CmuvUBzVJ/e3HGxhg6cYk88NGgTnBoOo7ogtfJJ0fefUWAxN/WDSUa50o+oVBxuIhO8FoEZW0j2eW7sfjs5EtA==",
2185
"dependencies": [
2186
+
"@types/node@24.7.2",
2187
+
"esbuild",
2188
+
"fdir",
2189
+
"picomatch@4.0.3",
2190
+
"postcss",
2191
+
"rollup",
2192
+
"tinyglobby"
2193
+
],
2194
+
"optionalDependencies": [
2195
+
"fsevents"
2196
+
],
2197
+
"optionalPeers": [
2198
+
"@types/node@24.7.2"
2199
+
],
2200
+
"bin": true
2201
+
},
2202
+
"vite@7.1.10_@types+node@24.7.2_picomatch@4.0.3_@types+node@24.2.0": {
2203
+
"integrity": "sha512-CmuvUBzVJ/e3HGxhg6cYk88NGgTnBoOo7ogtfJJ0fefUWAxN/WDSUa50o+oVBxuIhO8FoEZW0j2eW7sfjs5EtA==",
2204
+
"dependencies": [
2205
+
"@types/node@24.2.0",
2206
"esbuild",
2207
"fdir",
2208
"picomatch@4.0.3",
···
2214
"fsevents"
2215
],
2216
"optionalPeers": [
2217
+
"@types/node@24.2.0"
2218
],
2219
"bin": true
2220
},
2221
+
"web-streams-polyfill@3.3.3": {
2222
+
"integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw=="
2223
+
},
2224
"webidl-conversions@3.0.1": {
2225
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
2226
},
···
2230
"tr46",
2231
"webidl-conversions"
2232
]
2233
+
},
2234
+
"ws@8.18.3": {
2235
+
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="
2236
},
2237
"xtend@4.0.2": {
2238
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
···
2325
"npm:ts-morph@26.0.0"
2326
]
2327
},
2328
+
"packages/oauth": {
2329
+
"dependencies": [
2330
+
"npm:@libsql/client@0.6.0"
2331
+
]
2332
+
},
2333
"packages/session": {
2334
"dependencies": [
2335
+
"npm:@libsql/client@0.6.0",
2336
"npm:pg@^8.16.3"
2337
]
2338
}
+20
-7
docs/graphql-api.md
+20
-7
docs/graphql-api.md
···
881
882
**Returns:**
883
884
-
- `blob`: A JSON blob object containing:
885
-
- `ref`: The CID (content identifier) reference for the blob
886
-
- `mimeType`: The MIME type of the uploaded blob
887
-
- `size`: The size of the blob in bytes
888
889
**Example with Variables:**
890
···
897
898
**Usage in Records:**
899
900
-
After uploading a blob, use the returned blob object in your record mutations:
901
902
```graphql
903
mutation UpdateProfile($avatar: JSON) {
···
905
rkey: "self"
906
input: {
907
displayName: "My Name"
908
-
avatar: $avatar # Use the blob object from uploadBlob
909
}
910
) {
911
uri
912
displayName
913
avatar {
914
-
ref
915
mimeType
916
size
917
url(preset: "avatar")
···
919
}
920
}
921
```
922
923
### Create Records
924
···
881
882
**Returns:**
883
884
+
- `blob`: A Blob object containing:
885
+
- `ref` (String): The CID (content identifier) reference for the blob
886
+
- `mimeType` (String): The MIME type of the uploaded blob
887
+
- `size` (Int): The size of the blob in bytes
888
+
- `url` (String): CDN URL for the blob (supports presets)
889
890
**Example with Variables:**
891
···
898
899
**Usage in Records:**
900
901
+
After uploading a blob, use the returned blob object in your record mutations. You can provide the blob as a complete object with `ref` as a String:
902
903
```graphql
904
mutation UpdateProfile($avatar: JSON) {
···
906
rkey: "self"
907
input: {
908
displayName: "My Name"
909
+
avatar: $avatar # Blob object with ref as String (CID)
910
}
911
) {
912
uri
913
displayName
914
avatar {
915
+
ref # Returns as String (CID)
916
mimeType
917
size
918
url(preset: "avatar")
···
920
}
921
}
922
```
923
+
924
+
**Example blob object for mutations:**
925
+
926
+
```json
927
+
{
928
+
"ref": "bafyreigbtj4x7ip5legnfznufuopl4sg4knzc2cof6duas4b3q2fy6swua",
929
+
"mimeType": "image/jpeg",
930
+
"size": 245678
931
+
}
932
+
```
933
+
934
+
**Note:** The GraphQL API automatically handles the conversion between the GraphQL format (where `ref` is a String containing the CID) and the AT Protocol format (where `ref` is an object `{$link: "cid"}`). You always work with `ref` as a simple String in GraphQL queries and mutations.
935
936
### Create Records
937
+15
-15
frontend/src/features/docs/handlers.tsx
+15
-15
frontend/src/features/docs/handlers.tsx
···
224
return `<p class="mb-4 leading-relaxed text-zinc-700 dark:text-zinc-300">${text}</p>`;
225
};
226
227
-
// Custom list renderers
228
-
renderer.list = function (token: Tokens.List) {
229
-
const ordered = token.ordered;
230
-
const body = token.items
231
-
.map((item: Tokens.ListItem) => {
232
-
const text = this.parser.parseInline(item.tokens);
233
-
return `<li class="mb-1">${text}</li>`;
234
-
})
235
-
.join("");
236
-
const tag = ordered ? "ol" : "ul";
237
-
const listStyle = ordered ? "list-decimal" : "list-disc";
238
-
return `<${tag} class="${listStyle} list-inside my-4 text-zinc-700 dark:text-zinc-300">${body}</${tag}>`;
239
};
240
241
-
renderer.listitem = function (token: Tokens.ListItem) {
242
-
const text = this.parser.parseInline(token.tokens);
243
-
return `<li class="mb-1">${text}</li>`;
244
};
245
246
// Custom strong/bold renderer
···
224
return `<p class="mb-4 leading-relaxed text-zinc-700 dark:text-zinc-300">${text}</p>`;
225
};
226
227
+
// Custom list renderer - wrap with classes
228
+
const originalList = renderer.list.bind(renderer);
229
+
renderer.list = function(token: Tokens.List) {
230
+
const html = originalList(token);
231
+
const type = token.ordered ? 'ol' : 'ul';
232
+
const classNames = token.ordered
233
+
? 'list-decimal list-inside my-4 text-zinc-700 dark:text-zinc-300'
234
+
: 'list-disc list-inside my-4 text-zinc-700 dark:text-zinc-300';
235
+
236
+
return html.replace(new RegExp(`<${type}>`), `<${type} class="${classNames}">`);
237
};
238
239
+
// Custom list item renderer
240
+
const originalListitem = renderer.listitem.bind(renderer);
241
+
renderer.listitem = function(token: Tokens.ListItem) {
242
+
const html = originalListitem(token);
243
+
return html.replace('<li>', '<li class="mb-1">');
244
};
245
246
// Custom strong/bold renderer
+1
frontend/src/features/landing/handlers.tsx
+1
frontend/src/features/landing/handlers.tsx
+11
-1
frontend-v2/Dockerfile
+11
-1
frontend-v2/Dockerfile
···
1
FROM denoland/deno:2.5.1
2
3
# Install sqlite3
4
RUN apt-get update && apt-get install -y sqlite3 && rm -rf /var/lib/apt/lists/*
5
···
29
WORKDIR /app/frontend-v2
30
RUN deno cache ./server/main.ts
31
32
-
# Build the Vite frontend (fs.close bug fixed in Deno 2.5.1)
33
RUN deno task build
34
35
# Expose port
···
1
FROM denoland/deno:2.5.1
2
3
+
# Build arguments for Vite environment variables (embedded at build time)
4
+
ARG VITE_API_URL
5
+
ARG VITE_SLICE_URI
6
+
ARG VITE_ADMIN_DIDS
7
+
8
# Install sqlite3
9
RUN apt-get update && apt-get install -y sqlite3 && rm -rf /var/lib/apt/lists/*
10
···
34
WORKDIR /app/frontend-v2
35
RUN deno cache ./server/main.ts
36
37
+
# Set Vite environment variables for build (these get embedded in the bundle)
38
+
ENV VITE_API_URL=$VITE_API_URL
39
+
ENV VITE_SLICE_URI=$VITE_SLICE_URI
40
+
ENV VITE_ADMIN_DIDS=$VITE_ADMIN_DIDS
41
+
42
+
# Build the Vite frontend
43
RUN deno task build
44
45
# Expose port
+1
-1
frontend-v2/deno.json
+1
-1
frontend-v2/deno.json
+5
-1
frontend-v2/schema.graphql
+5
-1
frontend-v2/schema.graphql
···
931
}
932
933
type BlobUploadResponse {
934
-
blob: JSON!
935
}
936
937
type CollectionStats {
···
1495
Get statistics for this slice including collection counts, record counts, and actor counts
1496
"""
1497
stats: SliceStats!
1498
}
1499
1500
type NetworkSlicesSliceAggregated {
···
2105
type SyncJob {
2106
id: ID!
2107
jobId: String!
2108
status: String!
2109
createdAt: String!
2110
startedAt: String
···
931
}
932
933
type BlobUploadResponse {
934
+
blob: Blob!
935
}
936
937
type CollectionStats {
···
1495
Get statistics for this slice including collection counts, record counts, and actor counts
1496
"""
1497
stats: SliceStats!
1498
+
1499
+
"""Get all OAuth clients for this slice"""
1500
+
oauthClients: [OAuthClient!]!
1501
}
1502
1503
type NetworkSlicesSliceAggregated {
···
2108
type SyncJob {
2109
id: ID!
2110
jobId: String!
2111
+
sliceUri: String!
2112
status: String!
2113
createdAt: String!
2114
startedAt: String
+8
-7
frontend-v2/server/profile-init.ts
+8
-7
frontend-v2/server/profile-init.ts
···
18
export async function initializeUserProfile(
19
userDid: string,
20
userHandle: string,
21
-
tokens: TokenInfo
22
): Promise<void> {
23
if (!API_URL || !SLICE_URI) {
24
console.error("Missing API_URL or VITE_SLICE_URI environment variables");
···
26
}
27
28
try {
29
-
const graphqlUrl = `${API_URL}/graphql?slice=${encodeURIComponent(SLICE_URI)}`;
30
const authHeader = `${tokens.tokenType} ${tokens.accessToken}`;
31
32
// 1. Check if profile already exists
···
132
});
133
134
if (!bskyResponse.ok) {
135
-
throw new Error(`Fetch Bluesky profile failed: ${bskyResponse.statusText}`);
136
}
137
138
const bskyData = await bskyResponse.json();
···
160
) {
161
// Reconstruct blob format for AT Protocol
162
profileInput.avatar = {
163
-
$type: "blob",
164
-
ref: {
165
-
$link: bskyProfile.avatar.ref,
166
-
},
167
mimeType: bskyProfile.avatar.mimeType,
168
size: bskyProfile.avatar.size,
169
};
···
18
export async function initializeUserProfile(
19
userDid: string,
20
userHandle: string,
21
+
tokens: TokenInfo,
22
): Promise<void> {
23
if (!API_URL || !SLICE_URI) {
24
console.error("Missing API_URL or VITE_SLICE_URI environment variables");
···
26
}
27
28
try {
29
+
const graphqlUrl = `${API_URL}/graphql?slice=${
30
+
encodeURIComponent(SLICE_URI)
31
+
}`;
32
const authHeader = `${tokens.tokenType} ${tokens.accessToken}`;
33
34
// 1. Check if profile already exists
···
134
});
135
136
if (!bskyResponse.ok) {
137
+
throw new Error(
138
+
`Fetch Bluesky profile failed: ${bskyResponse.statusText}`,
139
+
);
140
}
141
142
const bskyData = await bskyResponse.json();
···
164
) {
165
// Reconstruct blob format for AT Protocol
166
profileInput.avatar = {
167
+
ref: bskyProfile.avatar.ref,
168
mimeType: bskyProfile.avatar.mimeType,
169
size: bskyProfile.avatar.size,
170
};
+18
-29
frontend-v2/server/routes.ts
+18
-29
frontend-v2/server/routes.ts
···
44
console.error("OAuth authorize error:", error);
45
46
return Response.redirect(
47
-
new URL(
48
-
"/login?error=" +
49
-
encodeURIComponent("Please check your handle and try again."),
50
-
req.url,
51
-
),
52
302,
53
);
54
}
···
78
79
if (!code || !state) {
80
return Response.redirect(
81
-
new URL(
82
-
"/login?error=" + encodeURIComponent("Invalid OAuth callback"),
83
-
req.url,
84
-
),
85
302,
86
);
87
}
···
92
93
if (!sessionId) {
94
return Response.redirect(
95
-
new URL(
96
-
"/login?error=" + encodeURIComponent("Failed to create session"),
97
-
req.url,
98
-
),
99
302,
100
);
101
}
···
121
? "already_on_waitlist"
122
: "invite_required";
123
return Response.redirect(
124
-
new URL(`/waitlist?error=${errorCode}`, req.url),
125
302,
126
);
127
}
···
140
return new Response(null, {
141
status: 302,
142
headers: {
143
-
Location: new URL("/", req.url).toString(),
144
"Set-Cookie": sessionCookie,
145
},
146
});
147
} catch (error) {
148
console.error("OAuth callback error:", error);
149
return Response.redirect(
150
-
new URL(
151
-
"/login?error=" + encodeURIComponent("Authentication failed"),
152
-
req.url,
153
-
),
154
302,
155
);
156
}
···
168
return new Response(null, {
169
status: 302,
170
headers: {
171
-
Location: new URL("/login", req.url).toString(),
172
"Set-Cookie": clearCookie,
173
},
174
});
···
210
} catch (error) {
211
console.error("Waitlist initiate error:", error);
212
return Response.redirect(
213
-
new URL("/waitlist?error=authorization_failed", req.url),
214
302,
215
);
216
}
···
224
225
if (!code || !state) {
226
return Response.redirect(
227
-
new URL("/waitlist?error=invalid_callback", req.url),
228
302,
229
);
230
}
···
260
261
if (!userInfo) {
262
return Response.redirect(
263
-
new URL("/waitlist?error=no_user_info", req.url),
264
302,
265
);
266
}
···
270
if (!sliceUri) {
271
console.error("Missing VITE_SLICE_URI environment variable");
272
return Response.redirect(
273
-
new URL("/waitlist?error=waitlist_failed", req.url),
274
302,
275
);
276
}
···
358
359
// Redirect back to waitlist page with success parameter
360
const handle = userInfo.name || waitlistData.handle || "user";
361
-
const redirectUrl = new URL("/waitlist", req.url);
362
-
redirectUrl.searchParams.set("waitlist", "success");
363
-
redirectUrl.searchParams.set("handle", handle);
364
-
return Response.redirect(redirectUrl.toString(), 302);
365
} catch (error) {
366
console.error("Waitlist callback error:", error);
367
return Response.redirect(
368
-
new URL("/waitlist?error=waitlist_failed", req.url),
369
302,
370
);
371
}
···
44
console.error("OAuth authorize error:", error);
45
46
return Response.redirect(
47
+
"/login?error=" +
48
+
encodeURIComponent("Please check your handle and try again."),
49
302,
50
);
51
}
···
75
76
if (!code || !state) {
77
return Response.redirect(
78
+
"/login?error=" + encodeURIComponent("Invalid OAuth callback"),
79
302,
80
);
81
}
···
86
87
if (!sessionId) {
88
return Response.redirect(
89
+
"/login?error=" + encodeURIComponent("Failed to create session"),
90
302,
91
);
92
}
···
112
? "already_on_waitlist"
113
: "invite_required";
114
return Response.redirect(
115
+
`/waitlist?error=${errorCode}`,
116
302,
117
);
118
}
···
131
return new Response(null, {
132
status: 302,
133
headers: {
134
+
Location: "/",
135
"Set-Cookie": sessionCookie,
136
},
137
});
138
} catch (error) {
139
console.error("OAuth callback error:", error);
140
return Response.redirect(
141
+
"/login?error=" + encodeURIComponent("Authentication failed"),
142
302,
143
);
144
}
···
156
return new Response(null, {
157
status: 302,
158
headers: {
159
+
Location: "/login",
160
"Set-Cookie": clearCookie,
161
},
162
});
···
198
} catch (error) {
199
console.error("Waitlist initiate error:", error);
200
return Response.redirect(
201
+
"/waitlist?error=authorization_failed",
202
302,
203
);
204
}
···
212
213
if (!code || !state) {
214
return Response.redirect(
215
+
"/waitlist?error=invalid_callback",
216
302,
217
);
218
}
···
248
249
if (!userInfo) {
250
return Response.redirect(
251
+
"/waitlist?error=no_user_info",
252
302,
253
);
254
}
···
258
if (!sliceUri) {
259
console.error("Missing VITE_SLICE_URI environment variable");
260
return Response.redirect(
261
+
"/waitlist?error=waitlist_failed",
262
302,
263
);
264
}
···
346
347
// Redirect back to waitlist page with success parameter
348
const handle = userInfo.name || waitlistData.handle || "user";
349
+
const params = new URLSearchParams({
350
+
waitlist: "success",
351
+
handle,
352
+
});
353
+
return Response.redirect(`/waitlist?${params.toString()}`, 302);
354
} catch (error) {
355
console.error("Waitlist callback error:", error);
356
return Response.redirect(
357
+
"/waitlist?error=waitlist_failed",
358
302,
359
);
360
}
+66
-68
frontend-v2/src/__generated__/OAuthClientsQuery.graphql.ts
+66
-68
frontend-v2/src/__generated__/OAuthClientsQuery.graphql.ts
···
1
/**
2
-
* @generated SignedSource<<4c24e57ecdb7163fc62f4069422aac37>>
3
* @lightSyntaxTransform
4
* @nogrep
5
*/
···
41
lte?: string | null | undefined;
42
};
43
export type OAuthClientsQuery$variables = {
44
-
slice: string;
45
where?: NetworkSlicesSliceWhereInput | null | undefined;
46
};
47
export type OAuthClientsQuery$data = {
···
51
readonly actorHandle: string | null | undefined;
52
readonly did: string;
53
readonly name: string;
54
};
55
}>;
56
};
57
-
readonly oauthClients: ReadonlyArray<{
58
-
readonly clientId: string;
59
-
readonly clientName: string;
60
-
readonly clientSecret: string | null | undefined;
61
-
readonly clientUri: string | null | undefined;
62
-
readonly createdAt: string;
63
-
readonly createdByDid: string;
64
-
readonly logoUri: string | null | undefined;
65
-
readonly policyUri: string | null | undefined;
66
-
readonly redirectUris: ReadonlyArray<string>;
67
-
readonly scope: string | null | undefined;
68
-
readonly tosUri: string | null | undefined;
69
-
}>;
70
};
71
export type OAuthClientsQuery = {
72
response: OAuthClientsQuery$data;
···
78
{
79
"defaultValue": null,
80
"kind": "LocalArgument",
81
-
"name": "slice"
82
},
83
{
84
-
"defaultValue": null,
85
-
"kind": "LocalArgument",
86
-
"name": "where"
87
}
88
],
89
-
v1 = {
90
"alias": null,
91
-
"args": [
92
-
{
93
-
"kind": "Variable",
94
-
"name": "slice",
95
-
"variableName": "slice"
96
-
}
97
-
],
98
"concreteType": "OAuthClient",
99
"kind": "LinkedField",
100
"name": "oauthClients",
···
179
}
180
],
181
"storageKey": null
182
-
},
183
-
v2 = [
184
-
{
185
-
"kind": "Literal",
186
-
"name": "first",
187
-
"value": 1
188
-
},
189
-
{
190
-
"kind": "Variable",
191
-
"name": "where",
192
-
"variableName": "where"
193
-
}
194
-
],
195
-
v3 = {
196
-
"alias": null,
197
-
"args": null,
198
-
"kind": "ScalarField",
199
-
"name": "name",
200
-
"storageKey": null
201
-
},
202
-
v4 = {
203
-
"alias": null,
204
-
"args": null,
205
-
"kind": "ScalarField",
206
-
"name": "did",
207
-
"storageKey": null
208
-
},
209
-
v5 = {
210
-
"alias": null,
211
-
"args": null,
212
-
"kind": "ScalarField",
213
-
"name": "actorHandle",
214
-
"storageKey": null
215
};
216
return {
217
"fragment": {
···
220
"metadata": null,
221
"name": "OAuthClientsQuery",
222
"selections": [
223
-
(v1/*: any*/),
224
{
225
"alias": null,
226
-
"args": (v2/*: any*/),
227
"concreteType": "NetworkSlicesSliceConnection",
228
"kind": "LinkedField",
229
"name": "networkSlicesSlices",
···
245
"name": "node",
246
"plural": false,
247
"selections": [
248
(v3/*: any*/),
249
(v4/*: any*/),
250
-
(v5/*: any*/)
251
],
252
"storageKey": null
253
}
···
267
"kind": "Operation",
268
"name": "OAuthClientsQuery",
269
"selections": [
270
-
(v1/*: any*/),
271
{
272
"alias": null,
273
-
"args": (v2/*: any*/),
274
"concreteType": "NetworkSlicesSliceConnection",
275
"kind": "LinkedField",
276
"name": "networkSlicesSlices",
···
292
"name": "node",
293
"plural": false,
294
"selections": [
295
(v3/*: any*/),
296
(v4/*: any*/),
297
(v5/*: any*/),
298
{
299
"alias": null,
300
"args": null,
···
314
]
315
},
316
"params": {
317
-
"cacheID": "20abc4b49d5c52da4a3ad1935662056a",
318
"id": null,
319
"metadata": {},
320
"name": "OAuthClientsQuery",
321
"operationKind": "query",
322
-
"text": "query OAuthClientsQuery(\n $slice: String!\n $where: NetworkSlicesSliceWhereInput\n) {\n oauthClients(slice: $slice) {\n clientId\n clientSecret\n clientName\n redirectUris\n scope\n clientUri\n logoUri\n tosUri\n policyUri\n createdAt\n createdByDid\n }\n networkSlicesSlices(first: 1, where: $where) {\n edges {\n node {\n name\n did\n actorHandle\n id\n }\n }\n }\n}\n"
323
}
324
};
325
})();
326
327
-
(node as any).hash = "b3eda4c7e0bda285a5261efa81e7b5cd";
328
329
export default node;
···
1
/**
2
+
* @generated SignedSource<<4bc4595b4bf2e4b263476f66a31ccca4>>
3
* @lightSyntaxTransform
4
* @nogrep
5
*/
···
41
lte?: string | null | undefined;
42
};
43
export type OAuthClientsQuery$variables = {
44
where?: NetworkSlicesSliceWhereInput | null | undefined;
45
};
46
export type OAuthClientsQuery$data = {
···
50
readonly actorHandle: string | null | undefined;
51
readonly did: string;
52
readonly name: string;
53
+
readonly oauthClients: ReadonlyArray<{
54
+
readonly clientId: string;
55
+
readonly clientName: string;
56
+
readonly clientSecret: string | null | undefined;
57
+
readonly clientUri: string | null | undefined;
58
+
readonly createdAt: string;
59
+
readonly createdByDid: string;
60
+
readonly logoUri: string | null | undefined;
61
+
readonly policyUri: string | null | undefined;
62
+
readonly redirectUris: ReadonlyArray<string>;
63
+
readonly scope: string | null | undefined;
64
+
readonly tosUri: string | null | undefined;
65
+
}>;
66
+
readonly uri: string;
67
};
68
}>;
69
};
70
};
71
export type OAuthClientsQuery = {
72
response: OAuthClientsQuery$data;
···
78
{
79
"defaultValue": null,
80
"kind": "LocalArgument",
81
+
"name": "where"
82
+
}
83
+
],
84
+
v1 = [
85
+
{
86
+
"kind": "Literal",
87
+
"name": "first",
88
+
"value": 1
89
},
90
{
91
+
"kind": "Variable",
92
+
"name": "where",
93
+
"variableName": "where"
94
}
95
],
96
+
v2 = {
97
+
"alias": null,
98
+
"args": null,
99
+
"kind": "ScalarField",
100
+
"name": "name",
101
+
"storageKey": null
102
+
},
103
+
v3 = {
104
"alias": null,
105
+
"args": null,
106
+
"kind": "ScalarField",
107
+
"name": "did",
108
+
"storageKey": null
109
+
},
110
+
v4 = {
111
+
"alias": null,
112
+
"args": null,
113
+
"kind": "ScalarField",
114
+
"name": "actorHandle",
115
+
"storageKey": null
116
+
},
117
+
v5 = {
118
+
"alias": null,
119
+
"args": null,
120
+
"kind": "ScalarField",
121
+
"name": "uri",
122
+
"storageKey": null
123
+
},
124
+
v6 = {
125
+
"alias": null,
126
+
"args": null,
127
"concreteType": "OAuthClient",
128
"kind": "LinkedField",
129
"name": "oauthClients",
···
208
}
209
],
210
"storageKey": null
211
};
212
return {
213
"fragment": {
···
216
"metadata": null,
217
"name": "OAuthClientsQuery",
218
"selections": [
219
{
220
"alias": null,
221
+
"args": (v1/*: any*/),
222
"concreteType": "NetworkSlicesSliceConnection",
223
"kind": "LinkedField",
224
"name": "networkSlicesSlices",
···
240
"name": "node",
241
"plural": false,
242
"selections": [
243
+
(v2/*: any*/),
244
(v3/*: any*/),
245
(v4/*: any*/),
246
+
(v5/*: any*/),
247
+
(v6/*: any*/)
248
],
249
"storageKey": null
250
}
···
264
"kind": "Operation",
265
"name": "OAuthClientsQuery",
266
"selections": [
267
{
268
"alias": null,
269
+
"args": (v1/*: any*/),
270
"concreteType": "NetworkSlicesSliceConnection",
271
"kind": "LinkedField",
272
"name": "networkSlicesSlices",
···
288
"name": "node",
289
"plural": false,
290
"selections": [
291
+
(v2/*: any*/),
292
(v3/*: any*/),
293
(v4/*: any*/),
294
(v5/*: any*/),
295
+
(v6/*: any*/),
296
{
297
"alias": null,
298
"args": null,
···
312
]
313
},
314
"params": {
315
+
"cacheID": "953a2b7074ba3074cca3f11991af440e",
316
"id": null,
317
"metadata": {},
318
"name": "OAuthClientsQuery",
319
"operationKind": "query",
320
+
"text": "query OAuthClientsQuery(\n $where: NetworkSlicesSliceWhereInput\n) {\n networkSlicesSlices(first: 1, where: $where) {\n edges {\n node {\n name\n did\n actorHandle\n uri\n oauthClients {\n clientId\n clientSecret\n clientName\n redirectUris\n scope\n clientUri\n logoUri\n tosUri\n policyUri\n createdAt\n createdByDid\n }\n id\n }\n }\n }\n}\n"
321
}
322
};
323
})();
324
325
+
(node as any).hash = "4c0e3d21f0879129255130f260edcb75";
326
327
export default node;
+35
-6
frontend-v2/src/__generated__/ProfileSettingsUploadBlobMutation.graphql.ts
+35
-6
frontend-v2/src/__generated__/ProfileSettingsUploadBlobMutation.graphql.ts
···
1
/**
2
-
* @generated SignedSource<<a2334c7e93bb6d5b4748df1211a418ae>>
3
* @lightSyntaxTransform
4
* @nogrep
5
*/
···
15
};
16
export type ProfileSettingsUploadBlobMutation$data = {
17
readonly uploadBlob: {
18
-
readonly blob: any;
19
};
20
};
21
export type ProfileSettingsUploadBlobMutation = {
···
59
{
60
"alias": null,
61
"args": null,
62
-
"kind": "ScalarField",
63
"name": "blob",
64
"storageKey": null
65
}
66
],
···
85
"selections": (v1/*: any*/)
86
},
87
"params": {
88
-
"cacheID": "3a4a6b19d2898f14635b098941614cab",
89
"id": null,
90
"metadata": {},
91
"name": "ProfileSettingsUploadBlobMutation",
92
"operationKind": "mutation",
93
-
"text": "mutation ProfileSettingsUploadBlobMutation(\n $data: String!\n $mimeType: String!\n) {\n uploadBlob(data: $data, mimeType: $mimeType) {\n blob\n }\n}\n"
94
}
95
};
96
})();
97
98
-
(node as any).hash = "76da65b07a282ed7f2dee12b4cac82d6";
99
100
export default node;
···
1
/**
2
+
* @generated SignedSource<<728b9a3525f975b6c58a5cdcd323f89e>>
3
* @lightSyntaxTransform
4
* @nogrep
5
*/
···
15
};
16
export type ProfileSettingsUploadBlobMutation$data = {
17
readonly uploadBlob: {
18
+
readonly blob: {
19
+
readonly mimeType: string;
20
+
readonly ref: string;
21
+
readonly size: number;
22
+
};
23
};
24
};
25
export type ProfileSettingsUploadBlobMutation = {
···
63
{
64
"alias": null,
65
"args": null,
66
+
"concreteType": "Blob",
67
+
"kind": "LinkedField",
68
"name": "blob",
69
+
"plural": false,
70
+
"selections": [
71
+
{
72
+
"alias": null,
73
+
"args": null,
74
+
"kind": "ScalarField",
75
+
"name": "ref",
76
+
"storageKey": null
77
+
},
78
+
{
79
+
"alias": null,
80
+
"args": null,
81
+
"kind": "ScalarField",
82
+
"name": "mimeType",
83
+
"storageKey": null
84
+
},
85
+
{
86
+
"alias": null,
87
+
"args": null,
88
+
"kind": "ScalarField",
89
+
"name": "size",
90
+
"storageKey": null
91
+
}
92
+
],
93
"storageKey": null
94
}
95
],
···
114
"selections": (v1/*: any*/)
115
},
116
"params": {
117
+
"cacheID": "afd8db2ee7590308e81afc0b0e5c86dd",
118
"id": null,
119
"metadata": {},
120
"name": "ProfileSettingsUploadBlobMutation",
121
"operationKind": "mutation",
122
+
"text": "mutation ProfileSettingsUploadBlobMutation(\n $data: String!\n $mimeType: String!\n) {\n uploadBlob(data: $data, mimeType: $mimeType) {\n blob {\n ref\n mimeType\n size\n }\n }\n}\n"
123
}
124
};
125
})();
126
127
+
(node as any).hash = "74a3a8bf43181cd62d2e81c45be384e5";
128
129
export default node;
+173
frontend-v2/src/__generated__/PublishedLexiconsListQuery.graphql.ts
+173
frontend-v2/src/__generated__/PublishedLexiconsListQuery.graphql.ts
···
···
1
+
/**
2
+
* @generated SignedSource<<af4ff104aee13ec3df07677ea9ef9ce7>>
3
+
* @lightSyntaxTransform
4
+
* @nogrep
5
+
*/
6
+
7
+
/* tslint:disable */
8
+
/* eslint-disable */
9
+
// @ts-nocheck
10
+
11
+
import { ConcreteRequest } from 'relay-runtime';
12
+
export type SliceRecordsWhereInput = {
13
+
cid?: StringFilter | null | undefined;
14
+
collection?: StringFilter | null | undefined;
15
+
did?: StringFilter | null | undefined;
16
+
indexedAt?: DateTimeFilter | null | undefined;
17
+
json?: StringFilter | null | undefined;
18
+
or?: ReadonlyArray<SliceRecordsWhereInput | null | undefined> | null | undefined;
19
+
uri?: StringFilter | null | undefined;
20
+
};
21
+
export type StringFilter = {
22
+
contains?: string | null | undefined;
23
+
eq?: string | null | undefined;
24
+
fuzzy?: string | null | undefined;
25
+
gt?: string | null | undefined;
26
+
gte?: string | null | undefined;
27
+
in?: ReadonlyArray<string | null | undefined> | null | undefined;
28
+
lt?: string | null | undefined;
29
+
lte?: string | null | undefined;
30
+
};
31
+
export type DateTimeFilter = {
32
+
eq?: string | null | undefined;
33
+
gt?: string | null | undefined;
34
+
gte?: string | null | undefined;
35
+
lt?: string | null | undefined;
36
+
lte?: string | null | undefined;
37
+
};
38
+
export type PublishedLexiconsListQuery$variables = {
39
+
sliceUri: string;
40
+
where?: SliceRecordsWhereInput | null | undefined;
41
+
};
42
+
export type PublishedLexiconsListQuery$data = {
43
+
readonly sliceRecords: {
44
+
readonly edges: ReadonlyArray<{
45
+
readonly node: {
46
+
readonly collection: string;
47
+
readonly uri: string;
48
+
readonly value: string;
49
+
};
50
+
}>;
51
+
};
52
+
};
53
+
export type PublishedLexiconsListQuery = {
54
+
response: PublishedLexiconsListQuery$data;
55
+
variables: PublishedLexiconsListQuery$variables;
56
+
};
57
+
58
+
const node: ConcreteRequest = (function(){
59
+
var v0 = [
60
+
{
61
+
"defaultValue": null,
62
+
"kind": "LocalArgument",
63
+
"name": "sliceUri"
64
+
},
65
+
{
66
+
"defaultValue": null,
67
+
"kind": "LocalArgument",
68
+
"name": "where"
69
+
}
70
+
],
71
+
v1 = [
72
+
{
73
+
"alias": null,
74
+
"args": [
75
+
{
76
+
"kind": "Literal",
77
+
"name": "first",
78
+
"value": 1000
79
+
},
80
+
{
81
+
"kind": "Variable",
82
+
"name": "sliceUri",
83
+
"variableName": "sliceUri"
84
+
},
85
+
{
86
+
"kind": "Variable",
87
+
"name": "where",
88
+
"variableName": "where"
89
+
}
90
+
],
91
+
"concreteType": "SliceRecordsConnection",
92
+
"kind": "LinkedField",
93
+
"name": "sliceRecords",
94
+
"plural": false,
95
+
"selections": [
96
+
{
97
+
"alias": null,
98
+
"args": null,
99
+
"concreteType": "SliceRecordEdge",
100
+
"kind": "LinkedField",
101
+
"name": "edges",
102
+
"plural": true,
103
+
"selections": [
104
+
{
105
+
"alias": null,
106
+
"args": null,
107
+
"concreteType": "SliceRecord",
108
+
"kind": "LinkedField",
109
+
"name": "node",
110
+
"plural": false,
111
+
"selections": [
112
+
{
113
+
"alias": null,
114
+
"args": null,
115
+
"kind": "ScalarField",
116
+
"name": "uri",
117
+
"storageKey": null
118
+
},
119
+
{
120
+
"alias": null,
121
+
"args": null,
122
+
"kind": "ScalarField",
123
+
"name": "collection",
124
+
"storageKey": null
125
+
},
126
+
{
127
+
"alias": null,
128
+
"args": null,
129
+
"kind": "ScalarField",
130
+
"name": "value",
131
+
"storageKey": null
132
+
}
133
+
],
134
+
"storageKey": null
135
+
}
136
+
],
137
+
"storageKey": null
138
+
}
139
+
],
140
+
"storageKey": null
141
+
}
142
+
];
143
+
return {
144
+
"fragment": {
145
+
"argumentDefinitions": (v0/*: any*/),
146
+
"kind": "Fragment",
147
+
"metadata": null,
148
+
"name": "PublishedLexiconsListQuery",
149
+
"selections": (v1/*: any*/),
150
+
"type": "Query",
151
+
"abstractKey": null
152
+
},
153
+
"kind": "Request",
154
+
"operation": {
155
+
"argumentDefinitions": (v0/*: any*/),
156
+
"kind": "Operation",
157
+
"name": "PublishedLexiconsListQuery",
158
+
"selections": (v1/*: any*/)
159
+
},
160
+
"params": {
161
+
"cacheID": "9376c8881ba959a67c32afef675e6baa",
162
+
"id": null,
163
+
"metadata": {},
164
+
"name": "PublishedLexiconsListQuery",
165
+
"operationKind": "query",
166
+
"text": "query PublishedLexiconsListQuery(\n $sliceUri: String!\n $where: SliceRecordsWhereInput\n) {\n sliceRecords(sliceUri: $sliceUri, first: 1000, where: $where) {\n edges {\n node {\n uri\n collection\n value\n }\n }\n }\n}\n"
167
+
}
168
+
};
169
+
})();
170
+
171
+
(node as any).hash = "c51dfb1274d633dc4db99d903954a58e";
172
+
173
+
export default node;
+25
-1
frontend-v2/src/components/CopyableField.tsx
+25
-1
frontend-v2/src/components/CopyableField.tsx
···
4
interface CopyableFieldProps {
5
value: string;
6
label: string;
7
}
8
9
-
export function CopyableField({ value, label }: CopyableFieldProps) {
10
const [copied, setCopied] = useState(false);
11
12
const handleCopy = async () => {
···
18
console.error("Failed to copy:", err);
19
}
20
};
21
22
return (
23
<div>
···
4
interface CopyableFieldProps {
5
value: string;
6
label: string;
7
+
variant?: "input" | "inline";
8
}
9
10
+
export function CopyableField({ value, label, variant = "input" }: CopyableFieldProps) {
11
const [copied, setCopied] = useState(false);
12
13
const handleCopy = async () => {
···
19
console.error("Failed to copy:", err);
20
}
21
};
22
+
23
+
if (variant === "inline") {
24
+
return (
25
+
<div className="flex items-center gap-2 group">
26
+
<span className="text-xs text-zinc-500">{label}:</span>
27
+
<span className="font-mono text-xs text-zinc-600 break-all">
28
+
{value}
29
+
</span>
30
+
<button
31
+
type="button"
32
+
onClick={handleCopy}
33
+
className="p-1 text-zinc-400 hover:text-zinc-300 transition-colors rounded hover:bg-zinc-700/50 flex-shrink-0 opacity-0 group-hover:opacity-100"
34
+
title="Copy to clipboard"
35
+
>
36
+
{copied ? (
37
+
<Check size={14} className="text-green-400" />
38
+
) : (
39
+
<Copy size={14} />
40
+
)}
41
+
</button>
42
+
</div>
43
+
);
44
+
}
45
46
return (
47
<div>
+183
-38
frontend-v2/src/components/CreateLexiconDialog.tsx
+183
-38
frontend-v2/src/components/CreateLexiconDialog.tsx
···
5
import { FormControl } from "./FormControl.tsx";
6
import { Textarea } from "./Textarea.tsx";
7
import { Button } from "./Button.tsx";
8
import type { CreateLexiconDialogMutation } from "../__generated__/CreateLexiconDialogMutation.graphql.ts";
9
import "../components/LexiconTree.tsx"; // Import for fragment
10
···
14
sliceUri: string;
15
existingNsids: string[];
16
}
17
18
export function CreateLexiconDialog({
19
open,
···
21
sliceUri,
22
existingNsids,
23
}: CreateLexiconDialogProps) {
24
const [lexiconJson, setLexiconJson] = useState("");
25
const [error, setError] = useState("");
26
const [isValidating, setIsValidating] = useState(false);
···
161
if (isValidating) {
162
return; // Prevent closing while validation is in progress
163
}
164
setLexiconJson("");
165
setError("");
166
setIsValidating(false);
167
onClose();
168
};
169
170
return (
171
<Dialog
172
open={open}
173
onClose={handleClose}
174
-
title="Add Lexicon Definition"
175
maxWidth="xl"
176
>
177
{error && (
···
180
</div>
181
)}
182
183
-
<form className="space-y-4">
184
-
<FormControl label="Lexicon JSON">
185
-
<Textarea
186
-
value={lexiconJson}
187
-
onChange={(e) => setLexiconJson(e.target.value)}
188
-
rows={16}
189
-
className="font-mono"
190
-
placeholder={`{
191
"lexicon": 1,
192
"id": "network.slices.example",
193
"description": "Example record type",
···
212
}
213
}
214
}`}
215
-
disabled={isMutationInFlight}
216
-
/>
217
-
<p className="mt-1 text-xs text-zinc-500">
218
-
Paste a valid AT Protocol lexicon definition in JSON format
219
-
</p>
220
-
</FormControl>
221
222
-
<div className="flex justify-end gap-3 pt-4">
223
-
<Button
224
-
type="button"
225
-
variant="default"
226
-
onClick={handleClose}
227
-
disabled={isMutationInFlight}
228
-
>
229
-
Cancel
230
-
</Button>
231
-
<Button
232
-
type="button"
233
-
variant="primary"
234
-
onClick={(e) => {
235
-
e.preventDefault();
236
-
e.stopPropagation();
237
-
handleSubmit(e);
238
-
}}
239
-
disabled={isMutationInFlight || isValidating}
240
-
>
241
-
{isMutationInFlight ? "Adding..." : "Add Lexicon"}
242
-
</Button>
243
-
</div>
244
-
</form>
245
</Dialog>
246
);
247
}
···
5
import { FormControl } from "./FormControl.tsx";
6
import { Textarea } from "./Textarea.tsx";
7
import { Button } from "./Button.tsx";
8
+
import { PublishedLexiconsList } from "./PublishedLexiconsList.tsx";
9
import type { CreateLexiconDialogMutation } from "../__generated__/CreateLexiconDialogMutation.graphql.ts";
10
import "../components/LexiconTree.tsx"; // Import for fragment
11
···
15
sliceUri: string;
16
existingNsids: string[];
17
}
18
+
19
+
type SourceType = 'published' | 'new' | null;
20
21
export function CreateLexiconDialog({
22
open,
···
24
sliceUri,
25
existingNsids,
26
}: CreateLexiconDialogProps) {
27
+
const [step, setStep] = useState<1 | 2>(1);
28
+
const [sourceType, setSourceType] = useState<SourceType>(null);
29
const [lexiconJson, setLexiconJson] = useState("");
30
const [error, setError] = useState("");
31
const [isValidating, setIsValidating] = useState(false);
···
166
if (isValidating) {
167
return; // Prevent closing while validation is in progress
168
}
169
+
setStep(1);
170
+
setSourceType(null);
171
setLexiconJson("");
172
setError("");
173
setIsValidating(false);
174
onClose();
175
};
176
177
+
const handleSourceSelect = (type: SourceType) => {
178
+
setSourceType(type);
179
+
setStep(2);
180
+
setError("");
181
+
};
182
+
183
+
const handleBack = () => {
184
+
setStep(1);
185
+
setSourceType(null);
186
+
setLexiconJson("");
187
+
setError("");
188
+
};
189
+
190
return (
191
<Dialog
192
open={open}
193
onClose={handleClose}
194
+
title={step === 1 ? "Add Lexicon Definition" : sourceType === 'published' ? "Select Published Lexicon" : "Create New Lexicon"}
195
maxWidth="xl"
196
>
197
{error && (
···
200
</div>
201
)}
202
203
+
{step === 1 ? (
204
+
<div className="space-y-4">
205
+
<p className="text-sm text-zinc-400 mb-4">
206
+
Choose how you'd like to add a lexicon:
207
+
</p>
208
+
209
+
<div className="space-y-3">
210
+
<button
211
+
type="button"
212
+
onClick={() => handleSourceSelect('published')}
213
+
className="w-full text-left p-4 bg-zinc-900/50 hover:bg-zinc-800/50 border border-zinc-800 hover:border-zinc-700 rounded transition-colors"
214
+
>
215
+
<h3 className="text-sm font-medium text-zinc-200 mb-1">
216
+
Add from Published Lexicons
217
+
</h3>
218
+
<p className="text-xs text-zinc-500">
219
+
Browse and select from community-published AT Protocol lexicons
220
+
</p>
221
+
</button>
222
+
223
+
<button
224
+
type="button"
225
+
onClick={() => handleSourceSelect('new')}
226
+
className="w-full text-left p-4 bg-zinc-900/50 hover:bg-zinc-800/50 border border-zinc-800 hover:border-zinc-700 rounded transition-colors"
227
+
>
228
+
<h3 className="text-sm font-medium text-zinc-200 mb-1">
229
+
Create New Lexicon
230
+
</h3>
231
+
<p className="text-xs text-zinc-500">
232
+
Write a custom lexicon definition from scratch
233
+
</p>
234
+
</button>
235
+
</div>
236
+
237
+
<div className="flex justify-end gap-3 pt-4">
238
+
<Button
239
+
type="button"
240
+
variant="default"
241
+
onClick={handleClose}
242
+
>
243
+
Cancel
244
+
</Button>
245
+
</div>
246
+
</div>
247
+
) : sourceType === 'new' ? (
248
+
<form className="space-y-4">
249
+
<FormControl label="Lexicon JSON">
250
+
<Textarea
251
+
value={lexiconJson}
252
+
onChange={(e) => setLexiconJson(e.target.value)}
253
+
rows={16}
254
+
className="font-mono"
255
+
placeholder={`{
256
"lexicon": 1,
257
"id": "network.slices.example",
258
"description": "Example record type",
···
277
}
278
}
279
}`}
280
+
disabled={isMutationInFlight}
281
+
/>
282
+
<p className="mt-1 text-xs text-zinc-500">
283
+
Paste a valid AT Protocol lexicon definition in JSON format
284
+
</p>
285
+
</FormControl>
286
+
287
+
<div className="flex justify-between gap-3 pt-4">
288
+
<Button
289
+
type="button"
290
+
variant="default"
291
+
onClick={handleBack}
292
+
disabled={isMutationInFlight}
293
+
>
294
+
Back
295
+
</Button>
296
+
<div className="flex gap-3">
297
+
<Button
298
+
type="button"
299
+
variant="default"
300
+
onClick={handleClose}
301
+
disabled={isMutationInFlight}
302
+
>
303
+
Cancel
304
+
</Button>
305
+
<Button
306
+
type="button"
307
+
variant="primary"
308
+
onClick={(e) => {
309
+
e.preventDefault();
310
+
e.stopPropagation();
311
+
handleSubmit(e);
312
+
}}
313
+
disabled={isMutationInFlight || isValidating}
314
+
>
315
+
{isMutationInFlight ? "Adding..." : "Add Lexicon"}
316
+
</Button>
317
+
</div>
318
+
</div>
319
+
</form>
320
+
) : (
321
+
<PublishedLexiconsList
322
+
existingNsids={existingNsids}
323
+
onSelect={(lexicons) => {
324
+
// Add all lexicons directly without going to JSON editor
325
+
lexicons.forEach((lexicon) => {
326
+
const lexiconData = lexicon.data as Record<string, unknown>;
327
+
const defs = lexiconData.defs || lexiconData.definitions;
328
+
const nsid = lexicon.nsid;
329
+
const definitionsString = JSON.stringify(defs);
330
331
+
commitMutation({
332
+
variables: {
333
+
input: {
334
+
nsid,
335
+
description: (lexiconData.description as string) || "",
336
+
definitions: definitionsString,
337
+
slice: sliceUri,
338
+
createdAt: new Date().toISOString(),
339
+
excludedFromSync: false,
340
+
},
341
+
},
342
+
onCompleted: () => {
343
+
// Only close dialog after all mutations complete
344
+
// (This will be called for each lexicon)
345
+
},
346
+
onError: (err) => {
347
+
setError(err.message || "Failed to create lexicon");
348
+
},
349
+
updater: (store) => {
350
+
const newLexicon = store.getRootField("createNetworkSlicesLexicon");
351
+
if (!newLexicon) return;
352
+
353
+
// Extract the rkey from the slice URI (e.g., "at://did/collection/rkey" -> "rkey")
354
+
const sliceRkey = sliceUri.split("/").pop();
355
+
if (!sliceRkey) return;
356
+
357
+
// Use ConnectionHandler to get the connection
358
+
const root = store.getRoot();
359
+
const connection = ConnectionHandler.getConnection(
360
+
root,
361
+
"SliceOverview_networkSlicesLexicons",
362
+
{
363
+
where: {
364
+
slice: { contains: sliceRkey }
365
+
}
366
+
}
367
+
);
368
+
369
+
if (connection) {
370
+
// Create and insert a new edge
371
+
const newEdge = ConnectionHandler.createEdge(
372
+
store,
373
+
connection,
374
+
newLexicon,
375
+
"NetworkSlicesLexiconEdge"
376
+
);
377
+
ConnectionHandler.insertEdgeAfter(connection, newEdge);
378
+
}
379
+
},
380
+
});
381
+
});
382
+
383
+
// Close dialog after submitting all mutations
384
+
handleClose();
385
+
}}
386
+
onBack={handleBack}
387
+
onCancel={handleClose}
388
+
/>
389
+
)}
390
</Dialog>
391
);
392
}
+74
frontend-v2/src/components/LexiconDependencyConfirmationDialog.tsx
+74
frontend-v2/src/components/LexiconDependencyConfirmationDialog.tsx
···
···
1
+
import { Dialog } from "./Dialog.tsx";
2
+
import { Button } from "./Button.tsx";
3
+
4
+
interface LexiconDependencyConfirmationDialogProps {
5
+
open: boolean;
6
+
mainLexiconNsid: string;
7
+
dependencies: string[];
8
+
onConfirm: () => void;
9
+
onCancel: () => void;
10
+
}
11
+
12
+
export function LexiconDependencyConfirmationDialog({
13
+
open,
14
+
mainLexiconNsid,
15
+
dependencies,
16
+
onConfirm,
17
+
onCancel,
18
+
}: LexiconDependencyConfirmationDialogProps) {
19
+
const totalCount = 1 + dependencies.length;
20
+
21
+
return (
22
+
<Dialog
23
+
open={open}
24
+
onClose={onCancel}
25
+
title="Add Lexicon with Dependencies"
26
+
maxWidth="md"
27
+
>
28
+
<div className="space-y-4">
29
+
<p className="text-sm text-zinc-400">
30
+
This lexicon requires {dependencies.length} {dependencies.length === 1 ? "dependency" : "dependencies"}.
31
+
All {totalCount} lexicons will be added to your slice.
32
+
</p>
33
+
34
+
<div className="space-y-3">
35
+
<div>
36
+
<h3 className="text-xs font-medium text-zinc-500 uppercase tracking-wider mb-2">
37
+
Selected Lexicon
38
+
</h3>
39
+
<div className="font-mono text-sm text-zinc-200">
40
+
{mainLexiconNsid}
41
+
</div>
42
+
</div>
43
+
44
+
{dependencies.length > 0 && (
45
+
<div>
46
+
<h3 className="text-xs font-medium text-zinc-500 uppercase tracking-wider mb-2">
47
+
Dependencies ({dependencies.length})
48
+
</h3>
49
+
<div className="space-y-1">
50
+
{dependencies.map((nsid) => (
51
+
<div
52
+
key={nsid}
53
+
className="font-mono text-sm text-zinc-400 pl-4"
54
+
>
55
+
{nsid}
56
+
</div>
57
+
))}
58
+
</div>
59
+
</div>
60
+
)}
61
+
</div>
62
+
63
+
<div className="flex justify-end gap-3 pt-4">
64
+
<Button type="button" variant="default" onClick={onCancel}>
65
+
Cancel
66
+
</Button>
67
+
<Button type="button" variant="primary" onClick={onConfirm}>
68
+
Add All ({totalCount})
69
+
</Button>
70
+
</div>
71
+
</div>
72
+
</Dialog>
73
+
);
74
+
}
+266
frontend-v2/src/components/PublishedLexiconsList.tsx
+266
frontend-v2/src/components/PublishedLexiconsList.tsx
···
···
1
+
import { useState } from "react";
2
+
import { graphql, useLazyLoadQuery } from "react-relay";
3
+
import { FormControl } from "./FormControl.tsx";
4
+
import { Input } from "./Input.tsx";
5
+
import { Button } from "./Button.tsx";
6
+
import { LexiconDependencyConfirmationDialog } from "./LexiconDependencyConfirmationDialog.tsx";
7
+
import { resolveDependencies } from "../utils/lexiconDependencies.ts";
8
+
import type { PublishedLexiconsListQuery } from "../__generated__/PublishedLexiconsListQuery.graphql.ts";
9
+
10
+
interface PublishedLexicon {
11
+
uri: string;
12
+
nsid: string;
13
+
description?: string;
14
+
defs: unknown;
15
+
fullData: unknown;
16
+
}
17
+
18
+
interface LexiconWithData {
19
+
nsid: string;
20
+
data: unknown;
21
+
}
22
+
23
+
interface PublishedLexiconsListProps {
24
+
existingNsids: string[];
25
+
onSelect: (lexicons: LexiconWithData[]) => void;
26
+
onBack: () => void;
27
+
onCancel: () => void;
28
+
}
29
+
30
+
const PUBLISHED_LEXICONS_SLICE_URI = "at://did:plc:dzmqinfp7efnofbqg5npjmth/network.slices.slice/3m3fsrppc3p2h";
31
+
32
+
export function PublishedLexiconsList({
33
+
existingNsids,
34
+
onSelect,
35
+
onBack,
36
+
onCancel,
37
+
}: PublishedLexiconsListProps) {
38
+
const [searchQuery, setSearchQuery] = useState("");
39
+
const [showDepsDialog, setShowDepsDialog] = useState(false);
40
+
const [selectedLexicon, setSelectedLexicon] = useState<LexiconWithData | null>(null);
41
+
const [resolvedDeps, setResolvedDeps] = useState<LexiconWithData[]>([]);
42
+
43
+
const data = useLazyLoadQuery<PublishedLexiconsListQuery>(
44
+
graphql`
45
+
query PublishedLexiconsListQuery(
46
+
$sliceUri: String!
47
+
$where: SliceRecordsWhereInput
48
+
) {
49
+
sliceRecords(sliceUri: $sliceUri, first: 1000, where: $where) {
50
+
edges {
51
+
node {
52
+
uri
53
+
collection
54
+
value
55
+
}
56
+
}
57
+
}
58
+
}
59
+
`,
60
+
{
61
+
sliceUri: PUBLISHED_LEXICONS_SLICE_URI,
62
+
where: {
63
+
collection: { eq: "com.atproto.lexicon.schema" },
64
+
},
65
+
},
66
+
{
67
+
fetchPolicy: "store-and-network",
68
+
}
69
+
);
70
+
71
+
// Parse and filter published lexicons
72
+
const publishedLexicons = data.sliceRecords.edges
73
+
.map((edge) => {
74
+
try {
75
+
const lexiconData = JSON.parse(edge.node.value);
76
+
const nsid = lexiconData.id || lexiconData.nsid;
77
+
const defs = lexiconData.defs || lexiconData.definitions;
78
+
79
+
if (!nsid || !defs) return null;
80
+
81
+
return {
82
+
uri: edge.node.uri,
83
+
nsid,
84
+
description: lexiconData.description,
85
+
defs,
86
+
fullData: lexiconData,
87
+
} as PublishedLexicon;
88
+
} catch {
89
+
return null;
90
+
}
91
+
})
92
+
.filter((lex): lex is PublishedLexicon => lex !== null);
93
+
94
+
// Filter by search query
95
+
const filteredLexicons = publishedLexicons.filter((lex) => {
96
+
if (!searchQuery) return true;
97
+
const query = searchQuery.toLowerCase();
98
+
return (
99
+
lex.nsid.toLowerCase().includes(query) ||
100
+
lex.description?.toLowerCase().includes(query)
101
+
);
102
+
});
103
+
104
+
// Check if lexicon already exists in slice
105
+
const isAlreadyAdded = (nsid: string) => existingNsids.includes(nsid);
106
+
107
+
// Handle lexicon selection with dependency resolution
108
+
const handleLexiconClick = (lexicon: PublishedLexicon) => {
109
+
if (isAlreadyAdded(lexicon.nsid)) return;
110
+
111
+
// Convert to LexiconWithData format
112
+
const mainLexicon: LexiconWithData = {
113
+
nsid: lexicon.nsid,
114
+
data: lexicon.fullData,
115
+
};
116
+
117
+
// Convert all published lexicons to LexiconWithData format
118
+
const allLexicons: LexiconWithData[] = publishedLexicons.map(lex => ({
119
+
nsid: lex.nsid,
120
+
data: lex.fullData,
121
+
}));
122
+
123
+
// Resolve dependencies
124
+
const dependencies = resolveDependencies(mainLexicon, allLexicons, existingNsids);
125
+
126
+
// If there are dependencies, show confirmation dialog
127
+
if (dependencies.length > 0) {
128
+
setSelectedLexicon(mainLexicon);
129
+
setResolvedDeps(dependencies);
130
+
setShowDepsDialog(true);
131
+
} else {
132
+
// No dependencies, add directly
133
+
onSelect([mainLexicon]);
134
+
}
135
+
};
136
+
137
+
// Handle confirmation dialog confirmation
138
+
const handleConfirmDeps = () => {
139
+
if (selectedLexicon) {
140
+
onSelect([selectedLexicon, ...resolvedDeps]);
141
+
}
142
+
setShowDepsDialog(false);
143
+
setSelectedLexicon(null);
144
+
setResolvedDeps([]);
145
+
};
146
+
147
+
// Handle confirmation dialog cancellation
148
+
const handleCancelDeps = () => {
149
+
setShowDepsDialog(false);
150
+
setSelectedLexicon(null);
151
+
setResolvedDeps([]);
152
+
};
153
+
154
+
return (
155
+
<div className="space-y-4">
156
+
<FormControl label="Search Lexicons" htmlFor="search">
157
+
<Input
158
+
id="search"
159
+
type="text"
160
+
value={searchQuery}
161
+
onChange={(e) => setSearchQuery(e.target.value)}
162
+
placeholder="Filter by NSID or description..."
163
+
/>
164
+
</FormControl>
165
+
166
+
<div className="h-96 overflow-y-auto">
167
+
{filteredLexicons.length === 0 ? (
168
+
<div className="text-center py-8 text-sm text-zinc-500">
169
+
{searchQuery ? "No lexicons match your search" : "No published lexicons found"}
170
+
</div>
171
+
) : (
172
+
filteredLexicons.map((lexicon) => {
173
+
const alreadyAdded = isAlreadyAdded(lexicon.nsid);
174
+
const parts = lexicon.nsid.split(".");
175
+
const authority = parts.length >= 2 ? `${parts[0]}.${parts[1]}` : parts[0];
176
+
const rest = parts.length >= 2 ? parts.slice(2).join(".") : "";
177
+
178
+
// Check if this is a record type lexicon
179
+
let isRecordType = false;
180
+
try {
181
+
const defs = lexicon.defs as Record<string, { type?: string }> | undefined;
182
+
isRecordType = defs?.main?.type === "record";
183
+
} catch {
184
+
// ignore
185
+
}
186
+
187
+
// Split the rest into middle and last part if it's a record type
188
+
let middle = rest;
189
+
let lastPart = "";
190
+
if (isRecordType && rest) {
191
+
const restParts = rest.split(".");
192
+
if (restParts.length > 1) {
193
+
lastPart = restParts[restParts.length - 1];
194
+
middle = restParts.slice(0, -1).join(".");
195
+
} else {
196
+
lastPart = rest;
197
+
middle = "";
198
+
}
199
+
}
200
+
201
+
return (
202
+
<button
203
+
key={lexicon.uri}
204
+
type="button"
205
+
onClick={() => handleLexiconClick(lexicon)}
206
+
disabled={alreadyAdded}
207
+
className={`w-full text-left py-1 rounded group transition-colors ${
208
+
alreadyAdded
209
+
? "opacity-50 cursor-not-allowed"
210
+
: "hover:bg-zinc-900/50 cursor-pointer"
211
+
}`}
212
+
>
213
+
<div className="flex items-center gap-2">
214
+
<span className="text-sm font-medium font-mono">
215
+
<span className="text-zinc-200">{authority}</span>
216
+
{isRecordType ? (
217
+
<>
218
+
{middle && <span className="text-zinc-400">.{middle}</span>}
219
+
{lastPart && (
220
+
<>
221
+
<span className="text-zinc-400">.</span>
222
+
<span className="text-cyan-400">{lastPart}</span>
223
+
</>
224
+
)}
225
+
</>
226
+
) : (
227
+
rest && <span className="text-zinc-400">.{rest}</span>
228
+
)}
229
+
</span>
230
+
{alreadyAdded && (
231
+
<span className="text-xs text-zinc-600">
232
+
(added)
233
+
</span>
234
+
)}
235
+
{lexicon.description && (
236
+
<span className="text-xs text-zinc-600 truncate">
237
+
{lexicon.description}
238
+
</span>
239
+
)}
240
+
</div>
241
+
</button>
242
+
);
243
+
})
244
+
)}
245
+
</div>
246
+
247
+
<div className="flex justify-between gap-3 pt-4">
248
+
<Button type="button" variant="default" onClick={onBack}>
249
+
Back
250
+
</Button>
251
+
<Button type="button" variant="default" onClick={onCancel}>
252
+
Cancel
253
+
</Button>
254
+
</div>
255
+
256
+
{/* Dependency confirmation dialog */}
257
+
<LexiconDependencyConfirmationDialog
258
+
open={showDepsDialog}
259
+
mainLexiconNsid={selectedLexicon?.nsid || ""}
260
+
dependencies={resolvedDeps.map(dep => dep.nsid)}
261
+
onConfirm={handleConfirmDeps}
262
+
onCancel={handleCancelDeps}
263
+
/>
264
+
</div>
265
+
);
266
+
}
-21
frontend-v2/src/config/auth.ts
-21
frontend-v2/src/config/auth.ts
···
1
-
/**
2
-
* OAuth configuration for the application.
3
-
*
4
-
* Environment variables should be defined in .env file:
5
-
* - VITE_AUTH_BASE_URL: Base URL of the OAuth server
6
-
* - VITE_OAUTH_CLIENT_ID: OAuth client ID
7
-
* - VITE_OAUTH_CLIENT_SECRET: OAuth client secret (optional for public clients)
8
-
*/
9
-
10
-
export const oauthConfig = {
11
-
clientId: import.meta.env.VITE_OAUTH_CLIENT_ID || "",
12
-
clientSecret: import.meta.env.VITE_OAUTH_CLIENT_SECRET || "",
13
-
authBaseUrl: import.meta.env.VITE_AUTH_BASE_URL || "http://localhost:8081",
14
-
redirectUri:
15
-
import.meta.env.VITE_OAUTH_REDIRECT_URI ||
16
-
`${globalThis.location.origin}/oauth/callback`,
17
-
silentRedirectUri:
18
-
import.meta.env.VITE_OAUTH_SILENT_REDIRECT_URI ||
19
-
`${globalThis.location.origin}/silent-refresh`,
20
-
scopes: ["openid", "profile", "atproto", "repo:*"],
21
-
};
···
+5
-2
frontend-v2/src/generateChartData.ts
+5
-2
frontend-v2/src/generateChartData.ts
+19
-23
frontend-v2/src/pages/OAuthClients.tsx
+19
-23
frontend-v2/src/pages/OAuthClients.tsx
···
138
export default function OAuthClients() {
139
const { handle, rkey } = useParams<{ handle: string; rkey: string }>();
140
141
-
// Build slice URI from params
142
-
const sliceUri =
143
-
`at://did:placeholder/${handle}/network.slices.slice/${rkey}`;
144
-
145
return (
146
<Suspense
147
fallback={
···
152
</Layout>
153
}
154
>
155
-
<OAuthClientsWrapper sliceUri={sliceUri} handle={handle!} rkey={rkey!} />
156
</Suspense>
157
);
158
}
159
160
function OAuthClientsWrapper(
161
-
{ sliceUri, handle, rkey }: {
162
-
sliceUri: string;
163
handle: string;
164
rkey: string;
165
},
166
) {
167
const { session } = useSessionContext();
168
const data = useLazyLoadQuery<OAuthClientsQuery>(
169
graphql`
170
query OAuthClientsQuery(
171
-
$slice: String!
172
$where: NetworkSlicesSliceWhereInput
173
) {
174
-
oauthClients(slice: $slice) {
175
-
clientId
176
-
clientSecret
177
-
clientName
178
-
redirectUris
179
-
scope
180
-
clientUri
181
-
logoUri
182
-
tosUri
183
-
policyUri
184
-
createdAt
185
-
createdByDid
186
-
}
187
networkSlicesSlices(first: 1, where: $where) {
188
edges {
189
node {
190
name
191
did
192
actorHandle
193
}
194
}
195
}
196
}
197
`,
198
{
199
-
slice: sliceUri,
200
where: {
201
actorHandle: { eq: handle },
202
uri: { contains: rkey },
···
207
208
const slice = data.networkSlicesSlices.edges[0]?.node;
209
const sliceName = slice?.name;
210
211
// Check if current user is the slice owner or admin
212
const isOwner = isSliceOwner(slice, session);
···
229
}
230
>
231
<OAuthClientsContent
232
-
clients={data.oauthClients || []}
233
sliceUri={sliceUri}
234
/>
235
</Layout>
···
138
export default function OAuthClients() {
139
const { handle, rkey } = useParams<{ handle: string; rkey: string }>();
140
141
return (
142
<Suspense
143
fallback={
···
148
</Layout>
149
}
150
>
151
+
<OAuthClientsWrapper handle={handle!} rkey={rkey!} />
152
</Suspense>
153
);
154
}
155
156
function OAuthClientsWrapper(
157
+
{ handle, rkey }: {
158
handle: string;
159
rkey: string;
160
},
161
) {
162
const { session } = useSessionContext();
163
+
164
const data = useLazyLoadQuery<OAuthClientsQuery>(
165
graphql`
166
query OAuthClientsQuery(
167
$where: NetworkSlicesSliceWhereInput
168
) {
169
networkSlicesSlices(first: 1, where: $where) {
170
edges {
171
node {
172
name
173
did
174
actorHandle
175
+
uri
176
+
oauthClients {
177
+
clientId
178
+
clientSecret
179
+
clientName
180
+
redirectUris
181
+
scope
182
+
clientUri
183
+
logoUri
184
+
tosUri
185
+
policyUri
186
+
createdAt
187
+
createdByDid
188
+
}
189
}
190
}
191
}
192
}
193
`,
194
{
195
where: {
196
actorHandle: { eq: handle },
197
uri: { contains: rkey },
···
202
203
const slice = data.networkSlicesSlices.edges[0]?.node;
204
const sliceName = slice?.name;
205
+
const sliceUri = slice?.uri || `at://${slice?.did}/network.slices.slice/${rkey}`;
206
207
// Check if current user is the slice owner or admin
208
const isOwner = isSliceOwner(slice, session);
···
225
}
226
>
227
<OAuthClientsContent
228
+
clients={slice?.oauthClients || []}
229
sliceUri={sliceUri}
230
/>
231
</Layout>
+18
-13
frontend-v2/src/pages/ProfileSettings.tsx
+18
-13
frontend-v2/src/pages/ProfileSettings.tsx
···
1
-
import { useParams, Link } from "react-router-dom";
2
import { useState } from "react";
3
import { graphql, useLazyLoadQuery, useMutation } from "react-relay";
4
import type { ProfileSettingsQuery } from "../__generated__/ProfileSettingsQuery.graphql.ts";
···
44
where: {
45
actorHandle: { eq: handle },
46
},
47
-
}
48
);
49
50
const profile = data.networkSlicesActorProfiles.edges[0]?.node;
···
59
graphql`
60
mutation ProfileSettingsUploadBlobMutation($data: String!, $mimeType: String!) {
61
uploadBlob(data: $data, mimeType: $mimeType) {
62
-
blob
63
}
64
}
65
-
`
66
);
67
68
const [commitUpdateProfile, isUpdatingProfile] = useMutation(
···
80
}
81
}
82
}
83
-
`
84
);
85
86
const [commitCreateProfile, isCreatingProfile] = useMutation(
···
98
}
99
}
100
}
101
-
`
102
);
103
104
// Helper to convert File to base64
···
108
reader.onload = () => {
109
const arrayBuffer = reader.result as ArrayBuffer;
110
const bytes = new Uint8Array(arrayBuffer);
111
-
const binary = Array.from(bytes).map(b => String.fromCharCode(b)).join('');
112
resolve(btoa(binary));
113
};
114
reader.onerror = reject;
···
129
// Upload new avatar
130
const base64Data = await fileToBase64(avatarFile);
131
132
-
const uploadResult = await new Promise<{ uploadBlob: { blob: unknown } }>((resolve, reject) => {
133
commitUploadBlob({
134
variables: {
135
data: base64Data,
136
mimeType: avatarFile.type,
137
},
138
-
onCompleted: (data) => resolve(data as { uploadBlob: { blob: unknown } }),
139
onError: (error) => reject(error),
140
});
141
});
···
144
} else if (profile?.avatar) {
145
// Keep existing avatar - reconstruct blob with $type field for AT Protocol
146
avatarBlob = {
147
-
$type: "blob",
148
-
ref: {
149
-
$link: profile.avatar.ref,
150
-
},
151
mimeType: profile.avatar.mimeType,
152
size: profile.avatar.size,
153
};
···
1
+
import { Link, useParams } from "react-router-dom";
2
import { useState } from "react";
3
import { graphql, useLazyLoadQuery, useMutation } from "react-relay";
4
import type { ProfileSettingsQuery } from "../__generated__/ProfileSettingsQuery.graphql.ts";
···
44
where: {
45
actorHandle: { eq: handle },
46
},
47
+
},
48
);
49
50
const profile = data.networkSlicesActorProfiles.edges[0]?.node;
···
59
graphql`
60
mutation ProfileSettingsUploadBlobMutation($data: String!, $mimeType: String!) {
61
uploadBlob(data: $data, mimeType: $mimeType) {
62
+
blob {
63
+
ref
64
+
mimeType
65
+
size
66
+
}
67
}
68
}
69
+
`,
70
);
71
72
const [commitUpdateProfile, isUpdatingProfile] = useMutation(
···
84
}
85
}
86
}
87
+
`,
88
);
89
90
const [commitCreateProfile, isCreatingProfile] = useMutation(
···
102
}
103
}
104
}
105
+
`,
106
);
107
108
// Helper to convert File to base64
···
112
reader.onload = () => {
113
const arrayBuffer = reader.result as ArrayBuffer;
114
const bytes = new Uint8Array(arrayBuffer);
115
+
const binary = Array.from(bytes).map((b) => String.fromCharCode(b))
116
+
.join("");
117
resolve(btoa(binary));
118
};
119
reader.onerror = reject;
···
134
// Upload new avatar
135
const base64Data = await fileToBase64(avatarFile);
136
137
+
const uploadResult = await new Promise<
138
+
{ uploadBlob: { blob: unknown } }
139
+
>((resolve, reject) => {
140
commitUploadBlob({
141
variables: {
142
data: base64Data,
143
mimeType: avatarFile.type,
144
},
145
+
onCompleted: (data) =>
146
+
resolve(data as { uploadBlob: { blob: unknown } }),
147
onError: (error) => reject(error),
148
});
149
});
···
152
} else if (profile?.avatar) {
153
// Keep existing avatar - reconstruct blob with $type field for AT Protocol
154
avatarBlob = {
155
+
ref: profile.avatar.ref,
156
mimeType: profile.avatar.mimeType,
157
size: profile.avatar.size,
158
};
+5
-1
frontend-v2/src/pages/SliceOverview.tsx
+5
-1
frontend-v2/src/pages/SliceOverview.tsx
···
13
import { useSessionContext } from "../lib/useSession.ts";
14
import { isSliceOwner } from "../lib/permissions.ts";
15
import { Plus } from "lucide-react";
16
17
export default function SliceOverview() {
18
const { handle, rkey } = useParams<{ handle: string; rkey: string }>();
···
101
}
102
>
103
<div className="mb-8">
104
-
<div className="flex items-center gap-3">
105
<Avatar
106
src={slice?.networkSlicesActorProfile?.avatar?.url}
107
alt={`${handle} avatar`}
···
116
</p>
117
</div>
118
</div>
119
</div>
120
121
{/* Stats Section */}
···
13
import { useSessionContext } from "../lib/useSession.ts";
14
import { isSliceOwner } from "../lib/permissions.ts";
15
import { Plus } from "lucide-react";
16
+
import { CopyableField } from "../components/CopyableField.tsx";
17
18
export default function SliceOverview() {
19
const { handle, rkey } = useParams<{ handle: string; rkey: string }>();
···
102
}
103
>
104
<div className="mb-8">
105
+
<div className="flex items-center gap-3 mb-4">
106
<Avatar
107
src={slice?.networkSlicesActorProfile?.avatar?.url}
108
alt={`${handle} avatar`}
···
117
</p>
118
</div>
119
</div>
120
+
{slice?.uri && (
121
+
<CopyableField value={slice.uri} label="Slice URI" variant="inline" />
122
+
)}
123
</div>
124
125
{/* Stats Section */}
+106
frontend-v2/src/utils/lexiconDependencies.ts
+106
frontend-v2/src/utils/lexiconDependencies.ts
···
···
1
+
/**
2
+
* Extracts all external lexicon references from a lexicon's defs
3
+
* Returns unique NSIDs (without #defName fragments)
4
+
*/
5
+
export function extractExternalRefs(defs: unknown): string[] {
6
+
const refs = new Set<string>();
7
+
8
+
function traverse(obj: unknown): void {
9
+
if (Array.isArray(obj)) {
10
+
obj.forEach(traverse);
11
+
} else if (obj !== null && typeof obj === "object") {
12
+
const objRecord = obj as Record<string, unknown>;
13
+
14
+
// Check if this is a ref object (single ref)
15
+
if (objRecord.type === "ref" && typeof objRecord.ref === "string") {
16
+
const ref = objRecord.ref;
17
+
18
+
// Only include external refs (not starting with #)
19
+
if (!ref.startsWith("#")) {
20
+
// Strip #defName suffix if present
21
+
const nsid = ref.split("#")[0];
22
+
refs.add(nsid);
23
+
}
24
+
}
25
+
26
+
// Check if this is a union type with multiple refs
27
+
if (objRecord.type === "union" && Array.isArray(objRecord.refs)) {
28
+
for (const ref of objRecord.refs) {
29
+
if (typeof ref === "string" && !ref.startsWith("#")) {
30
+
// Strip #defName suffix if present
31
+
const nsid = ref.split("#")[0];
32
+
refs.add(nsid);
33
+
}
34
+
}
35
+
}
36
+
37
+
// Recursively traverse all properties
38
+
for (const value of Object.values(objRecord)) {
39
+
traverse(value);
40
+
}
41
+
}
42
+
}
43
+
44
+
traverse(defs);
45
+
return Array.from(refs);
46
+
}
47
+
48
+
interface LexiconWithData {
49
+
nsid: string;
50
+
data: unknown;
51
+
}
52
+
53
+
/**
54
+
* Resolves all transitive dependencies for a lexicon
55
+
* Uses BFS to find all required lexicons, avoiding circular references
56
+
*/
57
+
export function resolveDependencies(
58
+
mainLexicon: LexiconWithData,
59
+
allAvailableLexicons: LexiconWithData[],
60
+
existingNsids: string[]
61
+
): LexiconWithData[] {
62
+
const dependencies: LexiconWithData[] = [];
63
+
const visited = new Set<string>();
64
+
const queue: LexiconWithData[] = [mainLexicon];
65
+
66
+
// Create a map for quick lookups
67
+
const lexiconMap = new Map<string, LexiconWithData>();
68
+
for (const lex of allAvailableLexicons) {
69
+
lexiconMap.set(lex.nsid, lex);
70
+
}
71
+
72
+
while (queue.length > 0) {
73
+
const current = queue.shift()!;
74
+
75
+
// Skip if already visited or already in user's slice
76
+
if (visited.has(current.nsid) || existingNsids.includes(current.nsid)) {
77
+
continue;
78
+
}
79
+
80
+
visited.add(current.nsid);
81
+
82
+
// Add to dependencies (except for the main lexicon)
83
+
if (current.nsid !== mainLexicon.nsid) {
84
+
dependencies.push(current);
85
+
}
86
+
87
+
// Extract refs from this lexicon's defs
88
+
try {
89
+
const lexData = current.data as Record<string, unknown>;
90
+
const defs = lexData.defs || lexData.definitions;
91
+
const refs = extractExternalRefs(defs);
92
+
93
+
// Queue up any dependencies we haven't visited yet
94
+
for (const refNsid of refs) {
95
+
if (!visited.has(refNsid) && lexiconMap.has(refNsid)) {
96
+
queue.push(lexiconMap.get(refNsid)!);
97
+
}
98
+
}
99
+
} catch {
100
+
// If we can't parse the lexicon, skip it
101
+
continue;
102
+
}
103
+
}
104
+
105
+
return dependencies;
106
+
}
+7
-1
frontend-v2.fly.toml
+7
-1
frontend-v2.fly.toml
···
8
9
[build]
10
dockerfile = './frontend-v2/Dockerfile'
11
12
[env]
13
-
ADMIN_DID = 'did:plc:bcgltzqazw5tb6k2g3ttenbj'
14
API_URL = 'https://slices-api.fly.dev'
15
DATABASE_URL = '/data/slices.db'
16
DENO_ENV = 'production'
17
DOCS_PATH = '/app/docs'
18
PORT = '8080'
19
SLICE_URI = 'at://did:plc:bcgltzqazw5tb6k2g3ttenbj/network.slices.slice/3lymhd4jhrd2z'
20
21
[[mounts]]
22
source = 'frontend_data'
···
8
9
[build]
10
dockerfile = './frontend-v2/Dockerfile'
11
+
[build.args]
12
+
VITE_API_URL = 'https://slices-api.fly.dev'
13
+
VITE_SLICE_URI = 'at://did:plc:bcgltzqazw5tb6k2g3ttenbj/network.slices.slice/3lymhd4jhrd2z'
14
+
VITE_ADMIN_DIDS = 'did:plc:bcgltzqazw5tb6k2g3ttenbj'
15
16
[env]
17
API_URL = 'https://slices-api.fly.dev'
18
DATABASE_URL = '/data/slices.db'
19
DENO_ENV = 'production'
20
DOCS_PATH = '/app/docs'
21
PORT = '8080'
22
SLICE_URI = 'at://did:plc:bcgltzqazw5tb6k2g3ttenbj/network.slices.slice/3lymhd4jhrd2z'
23
+
VITE_API_URL = 'https://slices-api.fly.dev'
24
+
VITE_SLICE_URI = 'at://did:plc:bcgltzqazw5tb6k2g3ttenbj/network.slices.slice/3lymhd4jhrd2z'
25
+
VITE_ADMIN_DIDS = 'did:plc:bcgltzqazw5tb6k2g3ttenbj'
26
27
[[mounts]]
28
source = 'frontend_data'
+1
-1
packages/lexicon/deno.json
+1
-1
packages/lexicon/deno.json
+40
-20
packages/lexicon/wasm/README.md
+40
-20
packages/lexicon/wasm/README.md
···
1
-
# lexicon-rs
2
3
Rust implementation of AT Protocol lexicon validation.
4
5
## Overview
6
7
-
This validation engine can be used in any project that needs AT Protocol lexicon validation. It provides high-performance, spec-compliant validation of AT Protocol lexicon documents and data records. It can also be compiled to WebAssembly for use in JavaScript/TypeScript environments.
8
9
## Architecture
10
11
-
This package serves as the core validation engine and is typically consumed by higher-level packages:
12
13
- **`@slices/lexicon`** - TypeScript/Deno package with ergonomic APIs
14
- **`lexicon-intellisense`** - VS Code extension for lexicon development
15
-
- **Slices CLI** - Command-line tooling for lexicon management
16
17
## Features
18
···
31
32
```toml
33
[dependencies]
34
-
slices-lexicon = "0.1"
35
```
36
37
Basic validation:
···
91
Use in JavaScript environments:
92
93
```javascript
94
-
import init, {
95
-
WasmLexiconValidator,
96
-
validate_lexicons_and_get_errors,
97
-
is_valid_nsid
98
-
} from './pkg/slices_lexicon.js';
99
100
await init();
101
102
// Validate lexicons
103
const lexicons = [{
104
-
id: "com.example.post",
105
-
lexicon: 1,
106
-
defs: { /* ... */ }
107
}];
108
109
-
const errors = validate_lexicons_and_get_errors(JSON.stringify(lexicons));
110
-
console.log('Validation errors:', JSON.parse(errors));
111
112
-
// Validate NSID format
113
-
const isValid = is_valid_nsid("com.example.post");
114
```
115
116
## JavaScript/TypeScript Usage
117
118
-
If you're using JavaScript or TypeScript, use the higher-level packages instead of consuming this library directly:
119
120
-
- **TypeScript/JavaScript**: Use `@slices/lexicon` for ergonomic APIs with automatic resource management
121
- **VS Code Development**: Install the `lexicon-intellisense` extension
122
- **CLI Tools**: Use the Slices CLI for lexicon management tasks
123
···
151
152
## License
153
154
-
MIT
···
1
+
# slices-lexicon
2
3
Rust implementation of AT Protocol lexicon validation.
4
5
## Overview
6
7
+
This validation engine can be used in any project that needs AT Protocol lexicon
8
+
validation. It provides high-performance, spec-compliant validation of AT
9
+
Protocol lexicon documents and data records. It can also be compiled to
10
+
WebAssembly for use in JavaScript/TypeScript environments.
11
12
## Architecture
13
14
+
This package serves as the core validation engine and is typically consumed by
15
+
higher-level packages:
16
17
- **`@slices/lexicon`** - TypeScript/Deno package with ergonomic APIs
18
+
- **`@slices/cli`** - Deno command-line tool for lexicon/appview management
19
- **`lexicon-intellisense`** - VS Code extension for lexicon development
20
21
## Features
22
···
35
36
```toml
37
[dependencies]
38
+
slices-lexicon = "0.2"
39
```
40
41
Basic validation:
···
95
Use in JavaScript environments:
96
97
```javascript
98
+
import init, { WasmLexiconValidator } from "./pkg/slices_lexicon.js";
99
100
await init();
101
102
// Validate lexicons
103
const lexicons = [{
104
+
id: "com.example.post",
105
+
lexicon: 1,
106
+
defs: {
107
+
main: {
108
+
type: "record",
109
+
key: "tid",
110
+
record: {
111
+
type: "object",
112
+
required: ["text"],
113
+
properties: {
114
+
text: { type: "string", maxLength: 300 },
115
+
},
116
+
},
117
+
},
118
+
},
119
}];
120
121
+
const validator = new WasmLexiconValidator(JSON.stringify(lexicons));
122
+
const errorsJson = validator.validate_lexicons();
123
+
const errors = JSON.parse(errorsJson);
124
125
+
if (Object.keys(errors).length > 0) {
126
+
console.log("Validation errors:", errors);
127
+
} else {
128
+
console.log("All lexicons valid");
129
+
}
130
+
131
+
validator.free(); // Clean up WASM resources
132
```
133
134
## JavaScript/TypeScript Usage
135
136
+
If you're using JavaScript or TypeScript, use the higher-level packages instead
137
+
of consuming this library directly:
138
139
+
- **TypeScript/JavaScript**: Use `@slices/lexicon` for ergonomic APIs with
140
+
automatic resource management
141
- **VS Code Development**: Install the `lexicon-intellisense` extension
142
- **CLI Tools**: Use the Slices CLI for lexicon management tasks
143
···
171
172
## License
173
174
+
MIT
+5
-1
packages/lexicon/wasm/package.json
+5
-1
packages/lexicon/wasm/package.json
···
2
"name": "slices-lexicon",
3
"type": "module",
4
"description": "AT Protocol lexicon validation library for Slices",
5
+
"version": "0.3.0",
6
"license": "MIT",
7
+
"repository": {
8
+
"type": "git",
9
+
"url": "https://tangled.org/@slices.network/slices/tree/main/crates/slices-lexicon"
10
+
},
11
"files": [
12
"slices_lexicon_bg.wasm",
13
"slices_lexicon.js",
packages/lexicon/wasm/slices_lexicon_bg.wasm
packages/lexicon/wasm/slices_lexicon_bg.wasm
This is a binary file and will not be displayed.
+1
-1
packages/lexicon-intellisense/package.json
+1
-1
packages/lexicon-intellisense/package.json
+40
-20
packages/lexicon-intellisense/wasm/README.md
+40
-20
packages/lexicon-intellisense/wasm/README.md
···
1
-
# lexicon-rs
2
3
Rust implementation of AT Protocol lexicon validation.
4
5
## Overview
6
7
-
This validation engine can be used in any project that needs AT Protocol lexicon validation. It provides high-performance, spec-compliant validation of AT Protocol lexicon documents and data records. It can also be compiled to WebAssembly for use in JavaScript/TypeScript environments.
8
9
## Architecture
10
11
-
This package serves as the core validation engine and is typically consumed by higher-level packages:
12
13
- **`@slices/lexicon`** - TypeScript/Deno package with ergonomic APIs
14
- **`lexicon-intellisense`** - VS Code extension for lexicon development
15
-
- **Slices CLI** - Command-line tooling for lexicon management
16
17
## Features
18
···
31
32
```toml
33
[dependencies]
34
-
slices-lexicon = "0.1"
35
```
36
37
Basic validation:
···
91
Use in JavaScript environments:
92
93
```javascript
94
-
import init, {
95
-
WasmLexiconValidator,
96
-
validate_lexicons_and_get_errors,
97
-
is_valid_nsid
98
-
} from './pkg/slices_lexicon.js';
99
100
await init();
101
102
// Validate lexicons
103
const lexicons = [{
104
-
id: "com.example.post",
105
-
lexicon: 1,
106
-
defs: { /* ... */ }
107
}];
108
109
-
const errors = validate_lexicons_and_get_errors(JSON.stringify(lexicons));
110
-
console.log('Validation errors:', JSON.parse(errors));
111
112
-
// Validate NSID format
113
-
const isValid = is_valid_nsid("com.example.post");
114
```
115
116
## JavaScript/TypeScript Usage
117
118
-
If you're using JavaScript or TypeScript, use the higher-level packages instead of consuming this library directly:
119
120
-
- **TypeScript/JavaScript**: Use `@slices/lexicon` for ergonomic APIs with automatic resource management
121
- **VS Code Development**: Install the `lexicon-intellisense` extension
122
- **CLI Tools**: Use the Slices CLI for lexicon management tasks
123
···
151
152
## License
153
154
-
MIT
···
1
+
# slices-lexicon
2
3
Rust implementation of AT Protocol lexicon validation.
4
5
## Overview
6
7
+
This validation engine can be used in any project that needs AT Protocol lexicon
8
+
validation. It provides high-performance, spec-compliant validation of AT
9
+
Protocol lexicon documents and data records. It can also be compiled to
10
+
WebAssembly for use in JavaScript/TypeScript environments.
11
12
## Architecture
13
14
+
This package serves as the core validation engine and is typically consumed by
15
+
higher-level packages:
16
17
- **`@slices/lexicon`** - TypeScript/Deno package with ergonomic APIs
18
+
- **`@slices/cli`** - Deno command-line tool for lexicon/appview management
19
- **`lexicon-intellisense`** - VS Code extension for lexicon development
20
21
## Features
22
···
35
36
```toml
37
[dependencies]
38
+
slices-lexicon = "0.2"
39
```
40
41
Basic validation:
···
95
Use in JavaScript environments:
96
97
```javascript
98
+
import init, { WasmLexiconValidator } from "./pkg/slices_lexicon.js";
99
100
await init();
101
102
// Validate lexicons
103
const lexicons = [{
104
+
id: "com.example.post",
105
+
lexicon: 1,
106
+
defs: {
107
+
main: {
108
+
type: "record",
109
+
key: "tid",
110
+
record: {
111
+
type: "object",
112
+
required: ["text"],
113
+
properties: {
114
+
text: { type: "string", maxLength: 300 },
115
+
},
116
+
},
117
+
},
118
+
},
119
}];
120
121
+
const validator = new WasmLexiconValidator(JSON.stringify(lexicons));
122
+
const errorsJson = validator.validate_lexicons();
123
+
const errors = JSON.parse(errorsJson);
124
125
+
if (Object.keys(errors).length > 0) {
126
+
console.log("Validation errors:", errors);
127
+
} else {
128
+
console.log("All lexicons valid");
129
+
}
130
+
131
+
validator.free(); // Clean up WASM resources
132
```
133
134
## JavaScript/TypeScript Usage
135
136
+
If you're using JavaScript or TypeScript, use the higher-level packages instead
137
+
of consuming this library directly:
138
139
+
- **TypeScript/JavaScript**: Use `@slices/lexicon` for ergonomic APIs with
140
+
automatic resource management
141
- **VS Code Development**: Install the `lexicon-intellisense` extension
142
- **CLI Tools**: Use the Slices CLI for lexicon management tasks
143
···
171
172
## License
173
174
+
MIT
+5
-1
packages/lexicon-intellisense/wasm/package.json
+5
-1
packages/lexicon-intellisense/wasm/package.json
···
2
"name": "slices-lexicon",
3
"type": "module",
4
"description": "AT Protocol lexicon validation library for Slices",
5
+
"version": "0.3.0",
6
"license": "MIT",
7
+
"repository": {
8
+
"type": "git",
9
+
"url": "https://tangled.org/@slices.network/slices/tree/main/crates/slices-lexicon"
10
+
},
11
"files": [
12
"slices_lexicon_bg.wasm",
13
"slices_lexicon.js",
packages/lexicon-intellisense/wasm/slices_lexicon_bg.wasm
packages/lexicon-intellisense/wasm/slices_lexicon_bg.wasm
This is a binary file and will not be displayed.
+5
-2
packages/oauth/deno.json
+5
-2
packages/oauth/deno.json
···
1
{
2
"name": "@slices/oauth",
3
+
"version": "0.7.0-alpha.4",
4
"exports": {
5
".": "./mod.ts"
6
},
7
+
"imports": {
8
+
"@libsql/client": "npm:@libsql/client@0.6.0"
9
+
},
10
"compilerOptions": {
11
+
"lib": ["deno.ns", "deno.unstable", "dom", "dom.iterable", "esnext"]
12
},
13
"tasks": {
14
"test": "deno test"
+1
packages/oauth/mod.ts
+1
packages/oauth/mod.ts
···
3
export { DeviceFlowClient } from "./src/device.ts";
4
export { DenoKVOAuthStorage } from "./src/storage/deno-kv.ts";
5
export { SQLiteOAuthStorage } from "./src/storage/sqlite.ts";
6
+
export { ValTownSQLiteOAuthStorage } from "./src/storage/valtown-sqlite.ts";
7
export type {
8
OAuthConfig,
9
OAuthTokens,
+149
packages/oauth/src/storage/valtown-sqlite.ts
+149
packages/oauth/src/storage/valtown-sqlite.ts
···
···
1
+
import type { OAuthStorage, OAuthTokens } from "../types.ts";
2
+
import type { InStatement, TransactionMode } from "@libsql/client";
3
+
4
+
// Val Town's SQLite ResultSet (doesn't have toJSON method)
5
+
interface ValTownResultSet {
6
+
columns: string[];
7
+
columnTypes: string[];
8
+
rows: unknown[][];
9
+
rowsAffected: number;
10
+
lastInsertRowid?: bigint;
11
+
}
12
+
13
+
interface SQLiteInstance {
14
+
execute(statement: InStatement): Promise<ValTownResultSet>;
15
+
batch(
16
+
statements: InStatement[],
17
+
mode?: TransactionMode,
18
+
): Promise<ValTownResultSet[]>;
19
+
}
20
+
21
+
export class ValTownSQLiteOAuthStorage implements OAuthStorage {
22
+
private sqlite: SQLiteInstance;
23
+
24
+
constructor(sqlite: SQLiteInstance) {
25
+
this.sqlite = sqlite;
26
+
this.initTables();
27
+
}
28
+
29
+
private async initTables(): Promise<void> {
30
+
// Create tokens table
31
+
await this.sqlite.execute(`
32
+
CREATE TABLE IF NOT EXISTS oauth_tokens (
33
+
id INTEGER PRIMARY KEY,
34
+
session_id TEXT,
35
+
access_token TEXT NOT NULL,
36
+
token_type TEXT NOT NULL,
37
+
expires_at INTEGER,
38
+
refresh_token TEXT,
39
+
scope TEXT,
40
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
41
+
UNIQUE(session_id)
42
+
)
43
+
`);
44
+
45
+
// Create states table with automatic cleanup
46
+
await this.sqlite.execute(`
47
+
CREATE TABLE IF NOT EXISTS oauth_states (
48
+
state TEXT PRIMARY KEY,
49
+
code_verifier TEXT NOT NULL,
50
+
timestamp INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000)
51
+
)
52
+
`);
53
+
54
+
// Create index for cleanup efficiency
55
+
await this.sqlite.execute(`
56
+
CREATE INDEX IF NOT EXISTS idx_oauth_states_timestamp ON oauth_states(timestamp)
57
+
`);
58
+
}
59
+
60
+
async getTokens(sessionId: string): Promise<OAuthTokens | null> {
61
+
const result = await this.sqlite.execute({
62
+
sql: `SELECT access_token, token_type, expires_at, refresh_token, scope
63
+
FROM oauth_tokens
64
+
WHERE session_id = ?
65
+
LIMIT 1`,
66
+
args: [sessionId],
67
+
});
68
+
69
+
if (result.rows.length === 0) return null;
70
+
71
+
const row = result.rows[0];
72
+
73
+
return {
74
+
accessToken: row[0] as string,
75
+
tokenType: row[1] as string,
76
+
expiresAt: (row[2] as number | null) ?? undefined,
77
+
refreshToken: (row[3] as string | null) ?? undefined,
78
+
scope: (row[4] as string | null) ?? undefined,
79
+
};
80
+
}
81
+
82
+
async setTokens(tokens: OAuthTokens, sessionId: string): Promise<void> {
83
+
await this.clearTokens(sessionId);
84
+
85
+
await this.sqlite.execute({
86
+
sql:
87
+
`INSERT INTO oauth_tokens (session_id, access_token, token_type, expires_at, refresh_token, scope)
88
+
VALUES (?, ?, ?, ?, ?, ?)`,
89
+
args: [
90
+
sessionId,
91
+
tokens.accessToken,
92
+
tokens.tokenType,
93
+
tokens.expiresAt ?? null,
94
+
tokens.refreshToken ?? null,
95
+
tokens.scope ?? null,
96
+
],
97
+
});
98
+
}
99
+
100
+
async clearTokens(sessionId: string): Promise<void> {
101
+
await this.sqlite.execute({
102
+
sql: "DELETE FROM oauth_tokens WHERE session_id = ?",
103
+
args: [sessionId],
104
+
});
105
+
}
106
+
107
+
async getState(state: string): Promise<string | null> {
108
+
const result = await this.sqlite.execute({
109
+
sql: "SELECT code_verifier FROM oauth_states WHERE state = ?",
110
+
args: [state],
111
+
});
112
+
113
+
if (result.rows.length === 0) return null;
114
+
115
+
const codeVerifier = result.rows[0][0] as string;
116
+
117
+
// Delete after use (one-time use)
118
+
await this.clearState(state);
119
+
120
+
return codeVerifier;
121
+
}
122
+
123
+
async setState(state: string, codeVerifier: string): Promise<void> {
124
+
await this.sqlite.execute({
125
+
sql:
126
+
`INSERT OR REPLACE INTO oauth_states (state, code_verifier, timestamp)
127
+
VALUES (?, ?, ?)`,
128
+
args: [state, codeVerifier, Date.now()],
129
+
});
130
+
131
+
// Auto-cleanup expired states
132
+
await this.cleanup();
133
+
}
134
+
135
+
async clearState(state: string): Promise<void> {
136
+
await this.sqlite.execute({
137
+
sql: "DELETE FROM oauth_states WHERE state = ?",
138
+
args: [state],
139
+
});
140
+
}
141
+
142
+
private async cleanup(): Promise<void> {
143
+
const cutoff = Date.now() - (10 * 60 * 1000); // 10 minutes ago
144
+
await this.sqlite.execute({
145
+
sql: "DELETE FROM oauth_states WHERE timestamp < ?",
146
+
args: [cutoff],
147
+
});
148
+
}
149
+
}
+6
-2
packages/session/deno.json
+6
-2
packages/session/deno.json
···
1
{
2
"name": "@slices/session",
3
+
"version": "0.4.0-alpha.4",
4
"exports": "./mod.ts",
5
+
"compilerOptions": {
6
+
"lib": ["deno.ns", "deno.unstable", "esnext", "dom"]
7
+
},
8
"tasks": {
9
"test": "deno test",
10
"check": "deno check mod.ts"
11
},
12
"imports": {
13
+
"pg": "npm:pg@^8.16.3",
14
+
"@libsql/client": "npm:@libsql/client@0.6.0"
15
}
16
}
+1
packages/session/mod.ts
+1
packages/session/mod.ts
···
1
export { SessionStore } from "./src/store.ts";
2
export { MemoryAdapter } from "./src/adapters/memory.ts";
3
export { SQLiteAdapter } from "./src/adapters/sqlite.ts";
4
export { PostgresAdapter } from "./src/adapters/postgres.ts";
5
export { DenoKVAdapter } from "./src/adapters/deno-kv.ts";
6
export { withOAuthSession } from "./src/oauth-integration.ts";
···
1
export { SessionStore } from "./src/store.ts";
2
export { MemoryAdapter } from "./src/adapters/memory.ts";
3
export { SQLiteAdapter } from "./src/adapters/sqlite.ts";
4
+
export { ValTownSQLiteAdapter } from "./src/adapters/valtown-sqlite.ts";
5
export { PostgresAdapter } from "./src/adapters/postgres.ts";
6
export { DenoKVAdapter } from "./src/adapters/deno-kv.ts";
7
export { withOAuthSession } from "./src/oauth-integration.ts";
+11
-11
packages/session/src/adapters/postgres.ts
+11
-11
packages/session/src/adapters/postgres.ts
···
6
user_id: string;
7
handle: string | null;
8
is_authenticated: boolean;
9
-
data: string | null;
10
-
created_at: Date;
11
-
expires_at: Date;
12
-
last_accessed_at: Date;
13
}
14
15
export class PostgresAdapter implements SessionAdapter {
···
100
data.userId,
101
data.handle || null,
102
data.isAuthenticated,
103
-
data.data ? JSON.stringify(data.data) : null,
104
data.createdAt,
105
data.expiresAt,
106
data.lastAccessedAt,
···
116
updates: Partial<SessionData>
117
): Promise<boolean> {
118
const setParts: string[] = [];
119
-
const values: (string | number | boolean | null)[] = [];
120
let paramIndex = 1;
121
122
if (updates.userId !== undefined) {
···
136
137
if (updates.data !== undefined) {
138
setParts.push(`data = $${paramIndex++}`);
139
-
values.push(updates.data ? JSON.stringify(updates.data) : null);
140
}
141
142
if (updates.expiresAt !== undefined) {
···
226
userId: row.user_id,
227
handle: row.handle || undefined,
228
isAuthenticated: row.is_authenticated,
229
-
data: row.data ? JSON.parse(row.data) : undefined,
230
-
createdAt: row.created_at.getTime(),
231
-
expiresAt: row.expires_at.getTime(),
232
-
lastAccessedAt: row.last_accessed_at.getTime(),
233
};
234
}
235
···
6
user_id: string;
7
handle: string | null;
8
is_authenticated: boolean;
9
+
data: Record<string, unknown> | null;
10
+
created_at: number;
11
+
expires_at: number;
12
+
last_accessed_at: number;
13
}
14
15
export class PostgresAdapter implements SessionAdapter {
···
100
data.userId,
101
data.handle || null,
102
data.isAuthenticated,
103
+
data.data || null,
104
data.createdAt,
105
data.expiresAt,
106
data.lastAccessedAt,
···
116
updates: Partial<SessionData>
117
): Promise<boolean> {
118
const setParts: string[] = [];
119
+
const values: (string | number | boolean | null | Record<string, unknown>)[] = [];
120
let paramIndex = 1;
121
122
if (updates.userId !== undefined) {
···
136
137
if (updates.data !== undefined) {
138
setParts.push(`data = $${paramIndex++}`);
139
+
values.push(updates.data || null);
140
}
141
142
if (updates.expiresAt !== undefined) {
···
226
userId: row.user_id,
227
handle: row.handle || undefined,
228
isAuthenticated: row.is_authenticated,
229
+
data: row.data || undefined,
230
+
createdAt: row.created_at,
231
+
expiresAt: row.expires_at,
232
+
lastAccessedAt: row.last_accessed_at,
233
};
234
}
235
+180
packages/session/src/adapters/valtown-sqlite.ts
+180
packages/session/src/adapters/valtown-sqlite.ts
···
···
1
+
import type { SessionAdapter, SessionData } from "../types.ts";
2
+
import type { InStatement, InValue, TransactionMode } from "@libsql/client";
3
+
4
+
// Val Town's SQLite ResultSet (doesn't have toJSON method)
5
+
interface ValTownResultSet {
6
+
columns: string[];
7
+
columnTypes: string[];
8
+
rows: unknown[][];
9
+
rowsAffected: number;
10
+
lastInsertRowid?: bigint;
11
+
}
12
+
13
+
interface SQLiteInstance {
14
+
execute(statement: InStatement): Promise<ValTownResultSet>;
15
+
batch(statements: InStatement[], mode?: TransactionMode): Promise<ValTownResultSet[]>;
16
+
}
17
+
18
+
export class ValTownSQLiteAdapter implements SessionAdapter {
19
+
private sqlite: SQLiteInstance;
20
+
21
+
constructor(sqlite: SQLiteInstance) {
22
+
this.sqlite = sqlite;
23
+
this.initializeDatabase();
24
+
}
25
+
26
+
private async initializeDatabase(): Promise<void> {
27
+
await this.sqlite.execute(`
28
+
CREATE TABLE IF NOT EXISTS sessions (
29
+
session_id TEXT PRIMARY KEY,
30
+
user_id TEXT NOT NULL,
31
+
handle TEXT,
32
+
is_authenticated INTEGER NOT NULL DEFAULT 1,
33
+
data TEXT, -- JSON string
34
+
created_at INTEGER NOT NULL,
35
+
expires_at INTEGER NOT NULL,
36
+
last_accessed_at INTEGER NOT NULL
37
+
)
38
+
`);
39
+
40
+
// Index for cleanup operations
41
+
await this.sqlite.execute(`
42
+
CREATE INDEX IF NOT EXISTS idx_sessions_expires_at
43
+
ON sessions(expires_at)
44
+
`);
45
+
46
+
// Index for user lookups
47
+
await this.sqlite.execute(`
48
+
CREATE INDEX IF NOT EXISTS idx_sessions_user_id
49
+
ON sessions(user_id)
50
+
`);
51
+
}
52
+
53
+
async get(sessionId: string): Promise<SessionData | null> {
54
+
const result = await this.sqlite.execute({
55
+
sql: `SELECT session_id, user_id, handle, is_authenticated, data, created_at, expires_at, last_accessed_at
56
+
FROM sessions
57
+
WHERE session_id = ?`,
58
+
args: [sessionId]
59
+
});
60
+
61
+
if (result.rows.length === 0) return null;
62
+
63
+
return this.rowToSessionData(result.rows[0]);
64
+
}
65
+
66
+
async set(sessionId: string, data: SessionData): Promise<void> {
67
+
await this.sqlite.execute({
68
+
sql: `INSERT OR REPLACE INTO sessions
69
+
(session_id, user_id, handle, is_authenticated, data, created_at, expires_at, last_accessed_at)
70
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
71
+
args: [
72
+
sessionId,
73
+
data.userId,
74
+
data.handle ?? null,
75
+
data.isAuthenticated ? 1 : 0,
76
+
data.data ? JSON.stringify(data.data) : null,
77
+
data.createdAt,
78
+
data.expiresAt,
79
+
data.lastAccessedAt,
80
+
]
81
+
});
82
+
}
83
+
84
+
async update(sessionId: string, updates: Partial<SessionData>): Promise<boolean> {
85
+
const setParts: string[] = [];
86
+
const values: InValue[] = [];
87
+
88
+
if (updates.userId !== undefined) {
89
+
setParts.push("user_id = ?");
90
+
values.push(updates.userId);
91
+
}
92
+
93
+
if (updates.handle !== undefined) {
94
+
setParts.push("handle = ?");
95
+
values.push(updates.handle ?? null);
96
+
}
97
+
98
+
if (updates.isAuthenticated !== undefined) {
99
+
setParts.push("is_authenticated = ?");
100
+
values.push(updates.isAuthenticated ? 1 : 0);
101
+
}
102
+
103
+
if (updates.data !== undefined) {
104
+
setParts.push("data = ?");
105
+
values.push(updates.data ? JSON.stringify(updates.data) : null);
106
+
}
107
+
108
+
if (updates.expiresAt !== undefined) {
109
+
setParts.push("expires_at = ?");
110
+
values.push(updates.expiresAt);
111
+
}
112
+
113
+
if (updates.lastAccessedAt !== undefined) {
114
+
setParts.push("last_accessed_at = ?");
115
+
values.push(updates.lastAccessedAt);
116
+
}
117
+
118
+
if (setParts.length === 0) return false;
119
+
120
+
// Add sessionId as the last parameter for WHERE clause
121
+
values.push(sessionId);
122
+
123
+
const result = await this.sqlite.execute({
124
+
sql: `UPDATE sessions
125
+
SET ${setParts.join(", ")}
126
+
WHERE session_id = ?`,
127
+
args: values
128
+
});
129
+
130
+
return result.rowsAffected > 0;
131
+
}
132
+
133
+
async delete(sessionId: string): Promise<void> {
134
+
await this.sqlite.execute({
135
+
sql: "DELETE FROM sessions WHERE session_id = ?",
136
+
args: [sessionId]
137
+
});
138
+
}
139
+
140
+
async cleanup(expiresBeforeMs: number): Promise<number> {
141
+
const result = await this.sqlite.execute({
142
+
sql: "DELETE FROM sessions WHERE expires_at < ?",
143
+
args: [expiresBeforeMs]
144
+
});
145
+
return result.rowsAffected;
146
+
}
147
+
148
+
async exists(sessionId: string): Promise<boolean> {
149
+
const result = await this.sqlite.execute({
150
+
sql: "SELECT 1 FROM sessions WHERE session_id = ? LIMIT 1",
151
+
args: [sessionId]
152
+
});
153
+
return result.rows.length > 0;
154
+
}
155
+
156
+
private rowToSessionData(row: unknown[]): SessionData {
157
+
return {
158
+
sessionId: row[0] as string,
159
+
userId: row[1] as string,
160
+
handle: (row[2] as string | null) ?? undefined,
161
+
isAuthenticated: Boolean(row[3] as number),
162
+
data: row[4] ? JSON.parse(row[4] as string) : undefined,
163
+
createdAt: row[5] as number,
164
+
expiresAt: row[6] as number,
165
+
lastAccessedAt: row[7] as number,
166
+
};
167
+
}
168
+
169
+
// Val Town SQLite-specific methods
170
+
async getSessionsByUser(userId: string): Promise<SessionData[]> {
171
+
const result = await this.sqlite.execute({
172
+
sql: `SELECT session_id, user_id, handle, is_authenticated, data, created_at, expires_at, last_accessed_at
173
+
FROM sessions
174
+
WHERE user_id = ?`,
175
+
args: [userId]
176
+
});
177
+
178
+
return result.rows.map(row => this.rowToSessionData(row));
179
+
}
180
+
}