Our Personal Data Server from scratch! tranquil.farm
atproto pds rust postgresql fun oauth

fix(backups): remove useless backups concept #67

merged opened by oyster.cafe targeting main from fix/remove-backups-concept
Labels

None yet.

assignee

None yet.

Participants 1
AT URI
at://did:plc:3fwecdnvtcscjnrx2p4n7alz/sh.tangled.repo.pull/3mhdhpni6a622
+959 -5803
Diff #0
-28
.sqlx/query-017b04caf42b30f2c8f9468acf61a83244b7c2fa5cacfaee41a946a6af5ef68e.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT id, backup_enabled FROM users WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "backup_enabled", 14 - "type_info": "Bool" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - false 25 - ] 26 - }, 27 - "hash": "017b04caf42b30f2c8f9468acf61a83244b7c2fa5cacfaee41a946a6af5ef68e" 28 - }
-22
.sqlx/query-05fd99170e31e68fa5028c862417cdf535cd70e09fde0a8a28249df0070eb2fc.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT t.token FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "05fd99170e31e68fa5028c862417cdf535cd70e09fde0a8a28249df0070eb2fc" 22 - }
-15
.sqlx/query-0710b57fb9aa933525f617b15e6e2e5feaa9c59c38ec9175568abdacda167107.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "UPDATE users SET deactivated_at = $1 WHERE did = $2", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Timestamptz", 9 - "Text" 10 - ] 11 - }, 12 - "nullable": [] 13 - }, 14 - "hash": "0710b57fb9aa933525f617b15e6e2e5feaa9c59c38ec9175568abdacda167107" 15 - }
-53
.sqlx/query-07c54d6bcfdc08d821e788c31dc46cc240dc8507c7dd5bdd2ceeb36d2084e8ce.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT u.id, u.did, u.backup_enabled, u.deactivated_at, r.repo_root_cid, r.repo_rev\n FROM users u\n JOIN repos r ON r.user_id = u.id\n WHERE u.backup_enabled = true\n AND u.deactivated_at IS NULL\n AND (\n NOT EXISTS (\n SELECT 1 FROM account_backups ab WHERE ab.user_id = u.id\n )\n OR (\n SELECT MAX(ab.created_at) FROM account_backups ab WHERE ab.user_id = u.id\n ) < NOW() - make_interval(secs => $1)\n )\n LIMIT $2\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "did", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "backup_enabled", 19 - "type_info": "Bool" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "deactivated_at", 24 - "type_info": "Timestamptz" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "repo_root_cid", 29 - "type_info": "Text" 30 - }, 31 - { 32 - "ordinal": 5, 33 - "name": "repo_rev", 34 - "type_info": "Text" 35 - } 36 - ], 37 - "parameters": { 38 - "Left": [ 39 - "Float8", 40 - "Int8" 41 - ] 42 - }, 43 - "nullable": [ 44 - false, 45 - false, 46 - false, 47 - true, 48 - false, 49 - true 50 - ] 51 - }, 52 - "hash": "07c54d6bcfdc08d821e788c31dc46cc240dc8507c7dd5bdd2ceeb36d2084e8ce" 53 - }
-87
.sqlx/query-0ce61cd393867522de318c1f86c787851ec7bbc31b147b9c2fb2bac9b2676ba6.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT\n id,\n delegated_did,\n actor_did,\n controller_did,\n action_type as \"action_type: PgDelegationActionType\",\n action_details,\n ip_address,\n user_agent,\n created_at\n FROM delegation_audit_log\n WHERE controller_did = $1\n ORDER BY created_at DESC\n LIMIT $2 OFFSET $3\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "delegated_did", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "actor_did", 19 - "type_info": "Text" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "controller_did", 24 - "type_info": "Text" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "action_type: PgDelegationActionType", 29 - "type_info": { 30 - "Custom": { 31 - "name": "delegation_action_type", 32 - "kind": { 33 - "Enum": [ 34 - "grant_created", 35 - "grant_revoked", 36 - "scopes_modified", 37 - "token_issued", 38 - "repo_write", 39 - "blob_upload", 40 - "account_action" 41 - ] 42 - } 43 - } 44 - } 45 - }, 46 - { 47 - "ordinal": 5, 48 - "name": "action_details", 49 - "type_info": "Jsonb" 50 - }, 51 - { 52 - "ordinal": 6, 53 - "name": "ip_address", 54 - "type_info": "Text" 55 - }, 56 - { 57 - "ordinal": 7, 58 - "name": "user_agent", 59 - "type_info": "Text" 60 - }, 61 - { 62 - "ordinal": 8, 63 - "name": "created_at", 64 - "type_info": "Timestamptz" 65 - } 66 - ], 67 - "parameters": { 68 - "Left": [ 69 - "Text", 70 - "Int8", 71 - "Int8" 72 - ] 73 - }, 74 - "nullable": [ 75 - false, 76 - false, 77 - false, 78 - true, 79 - false, 80 - true, 81 - true, 82 - true, 83 - false 84 - ] 85 - }, 86 - "hash": "0ce61cd393867522de318c1f86c787851ec7bbc31b147b9c2fb2bac9b2676ba6" 87 - }
-29
.sqlx/query-0d70da6b077d6fbab5056c38490acbe772626a3447182b76b0cbcee15c5a46dc.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT ab.storage_key, ab.repo_rev\n FROM account_backups ab\n JOIN users u ON u.id = ab.user_id\n WHERE ab.id = $1 AND u.did = $2\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "storage_key", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "repo_rev", 14 - "type_info": "Text" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Uuid", 20 - "Text" 21 - ] 22 - }, 23 - "nullable": [ 24 - false, 25 - false 26 - ] 27 - }, 28 - "hash": "0d70da6b077d6fbab5056c38490acbe772626a3447182b76b0cbcee15c5a46dc" 29 - }
-22
.sqlx/query-0ec60bb854a4991d0d7249a68f7445b65c8cc8c723baca221d85f5e4f2478b99.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT body FROM comms_queue WHERE user_id = (SELECT id FROM users WHERE did = $1) AND comms_type = 'email_update' ORDER BY created_at DESC LIMIT 1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "body", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "0ec60bb854a4991d0d7249a68f7445b65c8cc8c723baca221d85f5e4f2478b99" 22 - }
-22
.sqlx/query-0fae1be7a75bdc58c69a9af97cad4aec23c32a9378764b8d6d7eb2cc89c562b1.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT token FROM sso_pending_registration WHERE token = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "0fae1be7a75bdc58c69a9af97cad4aec23c32a9378764b8d6d7eb2cc89c562b1" 22 - }
-29
.sqlx/query-120c30d563b3cc40a2bb670f0c38c4ad63360b4a4f8e20c21afbe60c6fb7087a.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT id, storage_key\n FROM account_backups\n WHERE user_id = $1\n ORDER BY created_at DESC\n OFFSET $2\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "storage_key", 14 - "type_info": "Text" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Uuid", 20 - "Int8" 21 - ] 22 - }, 23 - "nullable": [ 24 - false, 25 - false 26 - ] 27 - }, 28 - "hash": "120c30d563b3cc40a2bb670f0c38c4ad63360b4a4f8e20c21afbe60c6fb7087a" 29 - }
-22
.sqlx/query-1c84643fd6bc57c76517849a64d2d877df337e823d4c2c2b077f695bbfc9e9ac.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n DELETE FROM sso_pending_registration\n WHERE token = $1 AND expires_at > NOW()\n RETURNING token\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "1c84643fd6bc57c76517849a64d2d877df337e823d4c2c2b077f695bbfc9e9ac" 22 - }
-28
.sqlx/query-24b823043ab60f36c29029137fef30dfe33922bb06067f2fdbfc1fbb4b0a2a81.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n DELETE FROM sso_pending_registration\n WHERE token = $1 AND expires_at > NOW()\n RETURNING token, request_uri\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "request_uri", 14 - "type_info": "Text" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - false 25 - ] 26 - }, 27 - "hash": "24b823043ab60f36c29029137fef30dfe33922bb06067f2fdbfc1fbb4b0a2a81" 28 - }
-38
.sqlx/query-2841093a67480e75e1e9e4046bf3eb74afae2d04f5ea0ec17a4d433983e6d71c.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO external_identities (did, provider, provider_user_id)\n VALUES ($1, $2, $3)\n RETURNING id\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text", 15 - { 16 - "Custom": { 17 - "name": "sso_provider_type", 18 - "kind": { 19 - "Enum": [ 20 - "github", 21 - "discord", 22 - "google", 23 - "gitlab", 24 - "oidc", 25 - "apple" 26 - ] 27 - } 28 - } 29 - }, 30 - "Text" 31 - ] 32 - }, 33 - "nullable": [ 34 - false 35 - ] 36 - }, 37 - "hash": "2841093a67480e75e1e9e4046bf3eb74afae2d04f5ea0ec17a4d433983e6d71c" 38 - }
-14
.sqlx/query-29ef76852bb89af1ab9e679ceaa4abcf8bc8268a348d3be0da9840d1708d20b5.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "UPDATE users SET password_reset_code_expires_at = NOW() - INTERVAL '1 hour' WHERE email = $1", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text" 9 - ] 10 - }, 11 - "nullable": [] 12 - }, 13 - "hash": "29ef76852bb89af1ab9e679ceaa4abcf8bc8268a348d3be0da9840d1708d20b5" 14 - }
-52
.sqlx/query-3043db19db2d0f51d1db6965a3ba0af4a1a4a76cc4ab1cad8395ba6babecc37d.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT u.id, u.did, u.backup_enabled, u.deactivated_at, r.repo_root_cid, r.repo_rev\n FROM users u\n JOIN repos r ON r.user_id = u.id\n WHERE u.did = $1\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "did", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "backup_enabled", 19 - "type_info": "Bool" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "deactivated_at", 24 - "type_info": "Timestamptz" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "repo_root_cid", 29 - "type_info": "Text" 30 - }, 31 - { 32 - "ordinal": 5, 33 - "name": "repo_rev", 34 - "type_info": "Text" 35 - } 36 - ], 37 - "parameters": { 38 - "Left": [ 39 - "Text" 40 - ] 41 - }, 42 - "nullable": [ 43 - false, 44 - false, 45 - false, 46 - true, 47 - false, 48 - true 49 - ] 50 - }, 51 - "hash": "3043db19db2d0f51d1db6965a3ba0af4a1a4a76cc4ab1cad8395ba6babecc37d" 52 - }
-32
.sqlx/query-376b72306b50f747bc9161985ff4f50c35c53025a55ccf5e9933dc3795d29313.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_pending_registration (token, request_uri, provider, provider_user_id, provider_email_verified)\n VALUES ($1, $2, $3, $4, $5)\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text", 26 - "Bool" 27 - ] 28 - }, 29 - "nullable": [] 30 - }, 31 - "hash": "376b72306b50f747bc9161985ff4f50c35c53025a55ccf5e9933dc3795d29313" 32 - }
-22
.sqlx/query-3933ea5b147ab6294936de147b98e116cfae848ecd76ea5d367585eb5117f2ad.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT id FROM external_identities WHERE id = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Uuid" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "3933ea5b147ab6294936de147b98e116cfae848ecd76ea5d367585eb5117f2ad" 22 - }
-16
.sqlx/query-3bed8d4843545f4a9676207513806603c50eb2af92957994abaf1c89c0294c12.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "INSERT INTO users (did, handle, email, password_hash) VALUES ($1, $2, $3, 'hash')", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - "Text" 11 - ] 12 - }, 13 - "nullable": [] 14 - }, 15 - "hash": "3bed8d4843545f4a9676207513806603c50eb2af92957994abaf1c89c0294c12" 16 - }
-55
.sqlx/query-4445cc86cdf04894b340e67661b79a3c411917144a011f50849b737130b24dbe.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT subject, body, comms_type as \"comms_type: String\" FROM comms_queue WHERE user_id = $1 AND comms_type = 'admin_email' ORDER BY created_at DESC LIMIT 1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "subject", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "body", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "comms_type: String", 19 - "type_info": { 20 - "Custom": { 21 - "name": "comms_type", 22 - "kind": { 23 - "Enum": [ 24 - "welcome", 25 - "email_verification", 26 - "password_reset", 27 - "email_update", 28 - "account_deletion", 29 - "admin_email", 30 - "plc_operation", 31 - "two_factor_code", 32 - "channel_verification", 33 - "passkey_recovery", 34 - "legacy_login_alert", 35 - "migration_verification", 36 - "channel_verified" 37 - ] 38 - } 39 - } 40 - } 41 - } 42 - ], 43 - "parameters": { 44 - "Left": [ 45 - "Uuid" 46 - ] 47 - }, 48 - "nullable": [ 49 - true, 50 - false, 51 - false 52 - ] 53 - }, 54 - "hash": "4445cc86cdf04894b340e67661b79a3c411917144a011f50849b737130b24dbe" 55 - }
-22
.sqlx/query-4560c237741ce9d4166aecd669770b3360a3ac71e649b293efb88d92c3254068.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT id FROM users WHERE email = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "4560c237741ce9d4166aecd669770b3360a3ac71e649b293efb88d92c3254068" 22 - }
-28
.sqlx/query-47fe4a54857344d8f789f37092a294cd58f64b4fb431b54b5deda13d64525e88.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT token, expires_at FROM account_deletion_requests WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "expires_at", 14 - "type_info": "Timestamptz" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - false 25 - ] 26 - }, 27 - "hash": "47fe4a54857344d8f789f37092a294cd58f64b4fb431b54b5deda13d64525e88" 28 - }
-22
.sqlx/query-49cbc923cc4a0dcf7dea4ead5ab9580ff03b717586c4ca2d5343709e2dac86b6.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT email_verified FROM users WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "email_verified", 9 - "type_info": "Bool" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "49cbc923cc4a0dcf7dea4ead5ab9580ff03b717586c4ca2d5343709e2dac86b6" 22 - }
-22
.sqlx/query-4fef326fa2d03d04869af3fec702c901d1ecf392545a3a032438b2c1859d46cc.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT token FROM sso_pending_registration\n WHERE token = $1 AND expires_at > NOW()\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "4fef326fa2d03d04869af3fec702c901d1ecf392545a3a032438b2c1859d46cc" 22 - }
-15
.sqlx/query-575c1e5529874f8f523e6fe22ccf4ee3296806581b1765dfb91a84ffab347f15.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO oauth_authorization_request (id, client_id, parameters, expires_at)\n VALUES ($1, 'https://test.example.com', $2, NOW() + INTERVAL '1 hour')\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Jsonb" 10 - ] 11 - }, 12 - "nullable": [] 13 - }, 14 - "hash": "575c1e5529874f8f523e6fe22ccf4ee3296806581b1765dfb91a84ffab347f15" 15 - }
-33
.sqlx/query-596c3400a60c77c7645fd46fcea61fa7898b6832e58c0f647f382b23b81d350e.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_pending_registration (token, request_uri, provider, provider_user_id, provider_username, provider_email)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text", 26 - "Text", 27 - "Text" 28 - ] 29 - }, 30 - "nullable": [] 31 - }, 32 - "hash": "596c3400a60c77c7645fd46fcea61fa7898b6832e58c0f647f382b23b81d350e" 33 - }
-81
.sqlx/query-59e63c5cf92985714e9586d1ce012efef733d4afaa4ea09974daf8303805e5d2.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT id, did, provider as \"provider: SsoProviderType\", provider_user_id, provider_username, provider_email\n FROM external_identities\n WHERE provider = $1 AND provider_user_id = $2\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "did", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "provider: SsoProviderType", 19 - "type_info": { 20 - "Custom": { 21 - "name": "sso_provider_type", 22 - "kind": { 23 - "Enum": [ 24 - "github", 25 - "discord", 26 - "google", 27 - "gitlab", 28 - "oidc", 29 - "apple" 30 - ] 31 - } 32 - } 33 - } 34 - }, 35 - { 36 - "ordinal": 3, 37 - "name": "provider_user_id", 38 - "type_info": "Text" 39 - }, 40 - { 41 - "ordinal": 4, 42 - "name": "provider_username", 43 - "type_info": "Text" 44 - }, 45 - { 46 - "ordinal": 5, 47 - "name": "provider_email", 48 - "type_info": "Text" 49 - } 50 - ], 51 - "parameters": { 52 - "Left": [ 53 - { 54 - "Custom": { 55 - "name": "sso_provider_type", 56 - "kind": { 57 - "Enum": [ 58 - "github", 59 - "discord", 60 - "google", 61 - "gitlab", 62 - "oidc", 63 - "apple" 64 - ] 65 - } 66 - } 67 - }, 68 - "Text" 69 - ] 70 - }, 71 - "nullable": [ 72 - false, 73 - false, 74 - false, 75 - false, 76 - true, 77 - true 78 - ] 79 - }, 80 - "hash": "59e63c5cf92985714e9586d1ce012efef733d4afaa4ea09974daf8303805e5d2" 81 - }
-28
.sqlx/query-5a016f289caf75177731711e56e92881ba343c73a9a6e513e205c801c5943ec0.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT k.key_bytes, k.encryption_version\n FROM user_keys k\n JOIN users u ON k.user_id = u.id\n WHERE u.did = $1\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "key_bytes", 9 - "type_info": "Bytea" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "encryption_version", 14 - "type_info": "Int4" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - true 25 - ] 26 - }, 27 - "hash": "5a016f289caf75177731711e56e92881ba343c73a9a6e513e205c801c5943ec0" 28 - }
-22
.sqlx/query-5af4a386c1632903ad7102551a5bd148bcf541baab6a84c8649666a695f9c4d1.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n DELETE FROM sso_auth_state\n WHERE state = $1 AND expires_at > NOW()\n RETURNING state\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "state", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "5af4a386c1632903ad7102551a5bd148bcf541baab6a84c8649666a695f9c4d1" 22 - }
-43
.sqlx/query-5e4c0dd92ac3c4b5e2eae5d129f2649cf3a8f068105f44a8dca9625427affc06.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT provider_user_id, provider_email_verified\n FROM external_identities\n WHERE did = $1 AND provider = $2\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "provider_user_id", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "provider_email_verified", 14 - "type_info": "Bool" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text", 20 - { 21 - "Custom": { 22 - "name": "sso_provider_type", 23 - "kind": { 24 - "Enum": [ 25 - "github", 26 - "discord", 27 - "google", 28 - "gitlab", 29 - "oidc", 30 - "apple" 31 - ] 32 - } 33 - } 34 - } 35 - ] 36 - }, 37 - "nullable": [ 38 - false, 39 - false 40 - ] 41 - }, 42 - "hash": "5e4c0dd92ac3c4b5e2eae5d129f2649cf3a8f068105f44a8dca9625427affc06" 43 - }
-33
.sqlx/query-5e9c6ec72c2c0ea1c8dff551d01baddd1dd953c828a5656db2ee39dea996f890.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_auth_state (state, request_uri, provider, action, nonce, code_verifier)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text", 26 - "Text", 27 - "Text" 28 - ] 29 - }, 30 - "nullable": [] 31 - }, 32 - "hash": "5e9c6ec72c2c0ea1c8dff551d01baddd1dd953c828a5656db2ee39dea996f890" 33 - }
-28
.sqlx/query-63f6f2a89650794fe90e10ce7fc785a6b9f7d37c12b31a6ff13f7c5214eef19e.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT did, email_verified FROM users WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "did", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "email_verified", 14 - "type_info": "Bool" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - false 25 - ] 26 - }, 27 - "hash": "63f6f2a89650794fe90e10ce7fc785a6b9f7d37c12b31a6ff13f7c5214eef19e" 28 - }
-66
.sqlx/query-6c7ace2a64848adc757af6c93b9162e1d95788b372370a7ad0d7540338bb73ee.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT state, request_uri, provider as \"provider: SsoProviderType\", action, nonce, code_verifier\n FROM sso_auth_state\n WHERE state = $1\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "state", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "request_uri", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "provider: SsoProviderType", 19 - "type_info": { 20 - "Custom": { 21 - "name": "sso_provider_type", 22 - "kind": { 23 - "Enum": [ 24 - "github", 25 - "discord", 26 - "google", 27 - "gitlab", 28 - "oidc", 29 - "apple" 30 - ] 31 - } 32 - } 33 - } 34 - }, 35 - { 36 - "ordinal": 3, 37 - "name": "action", 38 - "type_info": "Text" 39 - }, 40 - { 41 - "ordinal": 4, 42 - "name": "nonce", 43 - "type_info": "Text" 44 - }, 45 - { 46 - "ordinal": 5, 47 - "name": "code_verifier", 48 - "type_info": "Text" 49 - } 50 - ], 51 - "parameters": { 52 - "Left": [ 53 - "Text" 54 - ] 55 - }, 56 - "nullable": [ 57 - false, 58 - false, 59 - false, 60 - false, 61 - true, 62 - true 63 - ] 64 - }, 65 - "hash": "6c7ace2a64848adc757af6c93b9162e1d95788b372370a7ad0d7540338bb73ee" 66 - }
-27
.sqlx/query-6c7d5b62546ed7581edbfdb6946492a6cc41b94b04009b5602f1ef72e0138edc.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO account_backups (user_id, storage_key, repo_root_cid, repo_rev, block_count, size_bytes)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (storage_key) DO UPDATE SET created_at = NOW()\n RETURNING id\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Uuid", 15 - "Text", 16 - "Text", 17 - "Text", 18 - "Int4", 19 - "Int8" 20 - ] 21 - }, 22 - "nullable": [ 23 - false 24 - ] 25 - }, 26 - "hash": "6c7d5b62546ed7581edbfdb6946492a6cc41b94b04009b5602f1ef72e0138edc" 27 - }
-22
.sqlx/query-6fbcff0206599484bfb6cef165b6f729d27e7a342f7718ee4ac07f0ca94412ba.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT state FROM sso_auth_state WHERE state = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "state", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "6fbcff0206599484bfb6cef165b6f729d27e7a342f7718ee4ac07f0ca94412ba" 22 - }
-33
.sqlx/query-712459c27fc037f45389e2766cf1057e86e93ef756a784ed12beb453b03c5da1.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_pending_registration (token, request_uri, provider, provider_user_id, provider_username, provider_email_verified)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text", 26 - "Text", 27 - "Bool" 28 - ] 29 - }, 30 - "nullable": [] 31 - }, 32 - "hash": "712459c27fc037f45389e2766cf1057e86e93ef756a784ed12beb453b03c5da1" 33 - }
-22
.sqlx/query-785a864944c5939331704c71b0cd3ed26ffdd64f3fd0f26ecc28b6a0557bbe8f.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT subject FROM comms_queue WHERE user_id = $1 AND comms_type = 'admin_email' AND body = 'Email without subject' LIMIT 1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "subject", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Uuid" 15 - ] 16 - }, 17 - "nullable": [ 18 - true 19 - ] 20 - }, 21 - "hash": "785a864944c5939331704c71b0cd3ed26ffdd64f3fd0f26ecc28b6a0557bbe8f" 22 - }
-35
.sqlx/query-78be3d837cfb54d5e915ff0c5b1aaedc104776a22ad28b637de574e58d052036.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT ab.id, ab.storage_key, u.deactivated_at\n FROM account_backups ab\n JOIN users u ON u.id = ab.user_id\n WHERE ab.id = $1 AND u.did = $2\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "storage_key", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "deactivated_at", 19 - "type_info": "Timestamptz" 20 - } 21 - ], 22 - "parameters": { 23 - "Left": [ 24 - "Uuid", 25 - "Text" 26 - ] 27 - }, 28 - "nullable": [ 29 - false, 30 - false, 31 - true 32 - ] 33 - }, 34 - "hash": "78be3d837cfb54d5e915ff0c5b1aaedc104776a22ad28b637de574e58d052036" 35 - }
-22
.sqlx/query-7caa8f9083b15ec1209dda35c4c6f6fba9fe338e4a6a10636b5389d426df1631.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT t.token\n FROM plc_operation_tokens t\n JOIN users u ON t.user_id = u.id\n WHERE u.did = $1\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "7caa8f9083b15ec1209dda35c4c6f6fba9fe338e4a6a10636b5389d426df1631" 22 - }
-28
.sqlx/query-7d24e744a4e63570b1410e50b45b745ce8915ab3715b3eff7efc2d84f27735d0.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT provider_username, last_login_at FROM external_identities WHERE id = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "provider_username", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "last_login_at", 14 - "type_info": "Timestamptz" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Uuid" 20 - ] 21 - }, 22 - "nullable": [ 23 - true, 24 - true 25 - ] 26 - }, 27 - "hash": "7d24e744a4e63570b1410e50b45b745ce8915ab3715b3eff7efc2d84f27735d0" 28 - }
-46
.sqlx/query-7d43e7e917104d7bce29459149a345145361c6940e9b8e2ccc561a0a032f63c0.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT\n u.did,\n u.handle,\n d.granted_scopes,\n d.granted_at,\n (u.deactivated_at IS NULL AND u.takedown_ref IS NULL) as \"is_active!\"\n FROM account_delegations d\n JOIN users u ON u.did = d.controller_did\n WHERE d.delegated_did = $1 AND d.revoked_at IS NULL\n ORDER BY d.granted_at DESC\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "did", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "handle", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "granted_scopes", 19 - "type_info": "Text" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "granted_at", 24 - "type_info": "Timestamptz" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "is_active!", 29 - "type_info": "Bool" 30 - } 31 - ], 32 - "parameters": { 33 - "Left": [ 34 - "Text" 35 - ] 36 - }, 37 - "nullable": [ 38 - false, 39 - false, 40 - false, 41 - false, 42 - null 43 - ] 44 - }, 45 - "hash": "7d43e7e917104d7bce29459149a345145361c6940e9b8e2ccc561a0a032f63c0" 46 - }
-52
.sqlx/query-7ec9500e28b450ab569ec50f968f5f66b7e8e80b160db9734cb78f91fc9f2cc6.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT id, repo_rev, repo_root_cid, block_count, size_bytes, created_at\n FROM account_backups\n WHERE user_id = $1\n ORDER BY created_at DESC\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "repo_rev", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "repo_root_cid", 19 - "type_info": "Text" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "block_count", 24 - "type_info": "Int4" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "size_bytes", 29 - "type_info": "Int8" 30 - }, 31 - { 32 - "ordinal": 5, 33 - "name": "created_at", 34 - "type_info": "Timestamptz" 35 - } 36 - ], 37 - "parameters": { 38 - "Left": [ 39 - "Uuid" 40 - ] 41 - }, 42 - "nullable": [ 43 - false, 44 - false, 45 - false, 46 - false, 47 - false, 48 - false 49 - ] 50 - }, 51 - "hash": "7ec9500e28b450ab569ec50f968f5f66b7e8e80b160db9734cb78f91fc9f2cc6" 52 - }
-28
.sqlx/query-82717b6f61cd79347e1ca7e92c4413743ba168d1e0d8b85566711e54d4048f81.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT t.token, t.expires_at FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "expires_at", 14 - "type_info": "Timestamptz" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - false 25 - ] 26 - }, 27 - "hash": "82717b6f61cd79347e1ca7e92c4413743ba168d1e0d8b85566711e54d4048f81" 28 - }
-34
.sqlx/query-85ffc37a77af832d7795f5f37efe304fced4bf56b4f2287fe9aeb3fc97e1b191.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_pending_registration (token, request_uri, provider, provider_user_id, provider_username, provider_email, provider_email_verified)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text", 26 - "Text", 27 - "Text", 28 - "Bool" 29 - ] 30 - }, 31 - "nullable": [] 32 - }, 33 - "hash": "85ffc37a77af832d7795f5f37efe304fced4bf56b4f2287fe9aeb3fc97e1b191" 34 - }
-22
.sqlx/query-9ad422bf3c43e3cfd86fc88c73594246ead214ca794760d3fe77bb5cf4f27be5.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT body FROM comms_queue WHERE user_id = (SELECT id FROM users WHERE did = $1) AND comms_type = 'email_verification' ORDER BY created_at DESC LIMIT 1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "body", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "9ad422bf3c43e3cfd86fc88c73594246ead214ca794760d3fe77bb5cf4f27be5" 22 - }
-28
.sqlx/query-9b035b051769e6b9d45910a8bb42ac0f84c73de8c244ba4560f004ee3f4b7002.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT did, public_key_did_key FROM reserved_signing_keys WHERE public_key_did_key = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "did", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "public_key_did_key", 14 - "type_info": "Text" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - true, 24 - false 25 - ] 26 - }, 27 - "hash": "9b035b051769e6b9d45910a8bb42ac0f84c73de8c244ba4560f004ee3f4b7002" 28 - }
-22
.sqlx/query-9dba64081d4f95b5490c9a9bf30a7175db3429f39df4f25e212f38f33882fc65.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT id FROM external_identities WHERE did = $1\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "9dba64081d4f95b5490c9a9bf30a7175db3429f39df4f25e212f38f33882fc65" 22 - }
-66
.sqlx/query-9fd56986c1c843d386d1e5884acef8573eb55a3e9f5cb0122fcf8b93d6d667a5.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT token, request_uri, provider as \"provider: SsoProviderType\", provider_user_id,\n provider_username, provider_email\n FROM sso_pending_registration\n WHERE token = $1 AND expires_at > NOW()\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "request_uri", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "provider: SsoProviderType", 19 - "type_info": { 20 - "Custom": { 21 - "name": "sso_provider_type", 22 - "kind": { 23 - "Enum": [ 24 - "github", 25 - "discord", 26 - "google", 27 - "gitlab", 28 - "oidc", 29 - "apple" 30 - ] 31 - } 32 - } 33 - } 34 - }, 35 - { 36 - "ordinal": 3, 37 - "name": "provider_user_id", 38 - "type_info": "Text" 39 - }, 40 - { 41 - "ordinal": 4, 42 - "name": "provider_username", 43 - "type_info": "Text" 44 - }, 45 - { 46 - "ordinal": 5, 47 - "name": "provider_email", 48 - "type_info": "Text" 49 - } 50 - ], 51 - "parameters": { 52 - "Left": [ 53 - "Text" 54 - ] 55 - }, 56 - "nullable": [ 57 - false, 58 - false, 59 - false, 60 - false, 61 - true, 62 - true 63 - ] 64 - }, 65 - "hash": "9fd56986c1c843d386d1e5884acef8573eb55a3e9f5cb0122fcf8b93d6d667a5" 66 - }
-34
.sqlx/query-a23a390659616779d7dbceaa3b5d5171e70fa25e3b8393e142cebcbff752f0f5.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT private_key_bytes, expires_at, used_at FROM reserved_signing_keys WHERE public_key_did_key = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "private_key_bytes", 9 - "type_info": "Bytea" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "expires_at", 14 - "type_info": "Timestamptz" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "used_at", 19 - "type_info": "Timestamptz" 20 - } 21 - ], 22 - "parameters": { 23 - "Left": [ 24 - "Text" 25 - ] 26 - }, 27 - "nullable": [ 28 - false, 29 - false, 30 - true 31 - ] 32 - }, 33 - "hash": "a23a390659616779d7dbceaa3b5d5171e70fa25e3b8393e142cebcbff752f0f5" 34 - }
-15
.sqlx/query-a3d549a32e76c24e265c73a98dd739067623f275de0740bd576ee288f4444496.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n UPDATE external_identities\n SET provider_username = $2, last_login_at = NOW()\n WHERE id = $1\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Uuid", 9 - "Text" 10 - ] 11 - }, 12 - "nullable": [] 13 - }, 14 - "hash": "a3d549a32e76c24e265c73a98dd739067623f275de0740bd576ee288f4444496" 15 - }
-22
.sqlx/query-a802d7d860f263eace39ce82bb27b633cec7287c1cc177f0e1d47ec6571564d5.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT token FROM account_deletion_requests WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "token", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - false 19 - ] 20 - }, 21 - "hash": "a802d7d860f263eace39ce82bb27b633cec7287c1cc177f0e1d47ec6571564d5" 22 - }
-40
.sqlx/query-a844774d8dd3c50c5faf3de5d43f534b80234759c8437434e467ca33ea10fd1f.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT preferred_comms_channel as \"preferred_comms_channel: String\", discord_username FROM users WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "preferred_comms_channel: String", 9 - "type_info": { 10 - "Custom": { 11 - "name": "comms_channel", 12 - "kind": { 13 - "Enum": [ 14 - "email", 15 - "discord", 16 - "telegram", 17 - "signal" 18 - ] 19 - } 20 - } 21 - } 22 - }, 23 - { 24 - "ordinal": 1, 25 - "name": "discord_username", 26 - "type_info": "Text" 27 - } 28 - ], 29 - "parameters": { 30 - "Left": [ 31 - "Text" 32 - ] 33 - }, 34 - "nullable": [ 35 - false, 36 - true 37 - ] 38 - }, 39 - "hash": "a844774d8dd3c50c5faf3de5d43f534b80234759c8437434e467ca33ea10fd1f" 40 - }
-28
.sqlx/query-aee3e8e1d8924d41bec7d866e274f8bb2ddef833eb03326103c2d0a17ee56154.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n DELETE FROM sso_auth_state\n WHERE state = $1 AND expires_at > NOW()\n RETURNING state, request_uri\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "state", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "request_uri", 14 - "type_info": "Text" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - false, 24 - false 25 - ] 26 - }, 27 - "hash": "aee3e8e1d8924d41bec7d866e274f8bb2ddef833eb03326103c2d0a17ee56154" 28 - }
-34
.sqlx/query-b22003e74a58303beb993547a8e2bf2415152a5546d6a10c9aebc223420b0088.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT DISTINCT b.cid, b.storage_key, b.mime_type\n FROM blobs b\n JOIN record_blobs rb ON rb.blob_cid = b.cid\n WHERE rb.repo_id = $1\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "cid", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "storage_key", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "mime_type", 19 - "type_info": "Text" 20 - } 21 - ], 22 - "parameters": { 23 - "Left": [ 24 - "Uuid" 25 - ] 26 - }, 27 - "nullable": [ 28 - false, 29 - false, 30 - false 31 - ] 32 - }, 33 - "hash": "b22003e74a58303beb993547a8e2bf2415152a5546d6a10c9aebc223420b0088" 34 - }
-31
.sqlx/query-ba9684872fad5201b8504c2606c29364a2df9631fe98817e7bfacd3f3f51f6cb.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_pending_registration (token, request_uri, provider, provider_user_id, expires_at)\n VALUES ($1, $2, $3, $4, NOW() - INTERVAL '1 hour')\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text" 26 - ] 27 - }, 28 - "nullable": [] 29 - }, 30 - "hash": "ba9684872fad5201b8504c2606c29364a2df9631fe98817e7bfacd3f3f51f6cb" 31 - }
-12
.sqlx/query-bb4460f75d30f48b79d71b97f2c7d54190260deba2d2ade177dbdaa507ab275b.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "DELETE FROM sso_auth_state WHERE expires_at < NOW()", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [] 8 - }, 9 - "nullable": [] 10 - }, 11 - "hash": "bb4460f75d30f48b79d71b97f2c7d54190260deba2d2ade177dbdaa507ab275b" 12 - }
-14
.sqlx/query-bb83d46014c20edc99ef7e81808087097e719771c6c8163e4e8e11a98bb97dde.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "DELETE FROM account_backups WHERE user_id = $1", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Uuid" 9 - ] 10 - }, 11 - "nullable": [] 12 - }, 13 - "hash": "bb83d46014c20edc99ef7e81808087097e719771c6c8163e4e8e11a98bb97dde" 14 - }
-22
.sqlx/query-bf2b237ee5cfe66d038dce3f564c05a683d391aebda7871d65d302e04b5d733f.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT deactivated_at FROM users WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "deactivated_at", 9 - "type_info": "Timestamptz" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - true 19 - ] 20 - }, 21 - "hash": "bf2b237ee5cfe66d038dce3f564c05a683d391aebda7871d65d302e04b5d733f" 22 - }
-46
.sqlx/query-c69577bc8e88e2f2cc516d6bec3d201419b589b6088ceb933cb182265b14e363.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT\n u.did,\n u.handle,\n d.granted_scopes,\n d.granted_at,\n true as \"is_active!\"\n FROM account_delegations d\n JOIN users u ON u.did = d.controller_did\n WHERE d.delegated_did = $1\n AND d.revoked_at IS NULL\n AND u.deactivated_at IS NULL\n AND u.takedown_ref IS NULL\n ORDER BY d.granted_at DESC\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "did", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "handle", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "granted_scopes", 19 - "type_info": "Text" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "granted_at", 24 - "type_info": "Timestamptz" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "is_active!", 29 - "type_info": "Bool" 30 - } 31 - ], 32 - "parameters": { 33 - "Left": [ 34 - "Text" 35 - ] 36 - }, 37 - "nullable": [ 38 - false, 39 - false, 40 - false, 41 - false, 42 - null 43 - ] 44 - }, 45 - "hash": "c69577bc8e88e2f2cc516d6bec3d201419b589b6088ceb933cb182265b14e363" 46 - }
-22
.sqlx/query-cd3b8098ad4c1056c1d23acd8a6b29f7abfe18ee6f559bd94ab16274b1cfdfee.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT password_reset_code FROM users WHERE email = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "password_reset_code", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - true 19 - ] 20 - }, 21 - "hash": "cd3b8098ad4c1056c1d23acd8a6b29f7abfe18ee6f559bd94ab16274b1cfdfee" 22 - }
-22
.sqlx/query-cda68f9b6c60295a196fc853b70ec5fd51a8ffaa2bac5942c115c99d1cbcafa3.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT COUNT(*) as \"count!\" FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "count!", 9 - "type_info": "Int8" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - null 19 - ] 20 - }, 21 - "hash": "cda68f9b6c60295a196fc853b70ec5fd51a8ffaa2bac5942c115c99d1cbcafa3" 22 - }
-31
.sqlx/query-d0d4fb4b44cda3442b20037b4d5efaa032e1d004c775e2b6077c5050d7d62041.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO sso_auth_state (state, request_uri, provider, action, expires_at)\n VALUES ($1, $2, $3, $4, NOW() - INTERVAL '1 hour')\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - "Text", 10 - { 11 - "Custom": { 12 - "name": "sso_provider_type", 13 - "kind": { 14 - "Enum": [ 15 - "github", 16 - "discord", 17 - "google", 18 - "gitlab", 19 - "oidc", 20 - "apple" 21 - ] 22 - } 23 - } 24 - }, 25 - "Text" 26 - ] 27 - }, 28 - "nullable": [] 29 - }, 30 - "hash": "d0d4fb4b44cda3442b20037b4d5efaa032e1d004c775e2b6077c5050d7d62041" 31 - }
-14
.sqlx/query-d529d6dc9858c1da360f0417e94a3b40041b043bae57e95002d4bf5df46a4ab4.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "UPDATE account_deletion_requests SET expires_at = NOW() - INTERVAL '1 hour' WHERE token = $1", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text" 9 - ] 10 - }, 11 - "nullable": [] 12 - }, 13 - "hash": "d529d6dc9858c1da360f0417e94a3b40041b043bae57e95002d4bf5df46a4ab4" 14 - }
-52
.sqlx/query-d8e646324c93b375cceccea533ddd880225931f29ce4d8c5184197fecce25fa7.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT\n d.controller_did,\n u.handle as \"handle?\",\n d.granted_scopes,\n d.granted_at,\n true as \"is_active!\",\n u.did IS NOT NULL as \"is_local!\"\n FROM account_delegations d\n LEFT JOIN users u ON u.did = d.controller_did\n WHERE d.delegated_did = $1\n AND d.revoked_at IS NULL\n AND (u.did IS NULL OR (u.deactivated_at IS NULL AND u.takedown_ref IS NULL))\n ORDER BY d.granted_at DESC\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "controller_did", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "handle?", 14 - "type_info": "Text" 15 - }, 16 - { 17 - "ordinal": 2, 18 - "name": "granted_scopes", 19 - "type_info": "Text" 20 - }, 21 - { 22 - "ordinal": 3, 23 - "name": "granted_at", 24 - "type_info": "Timestamptz" 25 - }, 26 - { 27 - "ordinal": 4, 28 - "name": "is_active!", 29 - "type_info": "Bool" 30 - }, 31 - { 32 - "ordinal": 5, 33 - "name": "is_local!", 34 - "type_info": "Bool" 35 - } 36 - ], 37 - "parameters": { 38 - "Left": [ 39 - "Text" 40 - ] 41 - }, 42 - "nullable": [ 43 - false, 44 - false, 45 - false, 46 - false, 47 - null, 48 - null 49 - ] 50 - }, 51 - "hash": "d8e646324c93b375cceccea533ddd880225931f29ce4d8c5184197fecce25fa7" 52 - }
-22
.sqlx/query-d92bfa1e67c60c4fb333b6390e49224381c1dd23c3ef09aff3a6d59eadd13c64.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n SELECT COUNT(*) as \"count!\"\n FROM account_delegations d\n JOIN users u ON u.did = d.controller_did\n WHERE d.delegated_did = $1\n AND d.revoked_at IS NULL\n AND u.deactivated_at IS NULL\n AND u.takedown_ref IS NULL\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "count!", 9 - "type_info": "Int8" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - null 19 - ] 20 - }, 21 - "hash": "d92bfa1e67c60c4fb333b6390e49224381c1dd23c3ef09aff3a6d59eadd13c64" 22 - }
-40
.sqlx/query-dd7d80d4d118a5fc95b574e2ca9ffaccf974e52fb6ac368f716409c55f9d3ab0.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO external_identities (did, provider, provider_user_id, provider_username, provider_email)\n VALUES ($1, $2, $3, $4, $5)\n RETURNING id\n ", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "id", 9 - "type_info": "Uuid" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text", 15 - { 16 - "Custom": { 17 - "name": "sso_provider_type", 18 - "kind": { 19 - "Enum": [ 20 - "github", 21 - "discord", 22 - "google", 23 - "gitlab", 24 - "oidc", 25 - "apple" 26 - ] 27 - } 28 - } 29 - }, 30 - "Text", 31 - "Text", 32 - "Text" 33 - ] 34 - }, 35 - "nullable": [ 36 - false 37 - ] 38 - }, 39 - "hash": "dd7d80d4d118a5fc95b574e2ca9ffaccf974e52fb6ac368f716409c55f9d3ab0" 40 - }
-22
.sqlx/query-e20cbe2a939d790aaea718b084a80d8ede655ba1cc0fd4346d7e91d6de7d6cf3.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT COUNT(*) FROM comms_queue WHERE user_id = $1 AND comms_type = 'password_reset'", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "count", 9 - "type_info": "Int8" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Uuid" 15 - ] 16 - }, 17 - "nullable": [ 18 - null 19 - ] 20 - }, 21 - "hash": "e20cbe2a939d790aaea718b084a80d8ede655ba1cc0fd4346d7e91d6de7d6cf3" 22 - }
-22
.sqlx/query-e64cd36284d10ab7f3d9f6959975a1a627809f444b0faff7e611d985f31b90e9.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT used_at FROM reserved_signing_keys WHERE public_key_did_key = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "used_at", 9 - "type_info": "Timestamptz" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - true 19 - ] 20 - }, 21 - "hash": "e64cd36284d10ab7f3d9f6959975a1a627809f444b0faff7e611d985f31b90e9" 22 - }
-30
.sqlx/query-eb54d2ce02cab7c2e7f9926bd469b19e5f0513f47173b2738fc01a57082d7abb.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "\n INSERT INTO external_identities (did, provider, provider_user_id)\n VALUES ($1, $2, $3)\n ", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text", 9 - { 10 - "Custom": { 11 - "name": "sso_provider_type", 12 - "kind": { 13 - "Enum": [ 14 - "github", 15 - "discord", 16 - "google", 17 - "gitlab", 18 - "oidc", 19 - "apple" 20 - ] 21 - } 22 - } 23 - }, 24 - "Text" 25 - ] 26 - }, 27 - "nullable": [] 28 - }, 29 - "hash": "eb54d2ce02cab7c2e7f9926bd469b19e5f0513f47173b2738fc01a57082d7abb" 30 - }
-15
.sqlx/query-ec22a8cc89e480c403a239eac44288e144d83364129491de6156760616666d3d.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "DELETE FROM external_identities WHERE id = $1 AND did = $2", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Uuid", 9 - "Text" 10 - ] 11 - }, 12 - "nullable": [] 13 - }, 14 - "hash": "ec22a8cc89e480c403a239eac44288e144d83364129491de6156760616666d3d" 15 - }
-22
.sqlx/query-f26c13023b47b908ec96da2e6b8bf8b34ca6a2246c20fc96f76f0e95530762a7.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT email FROM users WHERE did = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "email", 9 - "type_info": "Text" 10 - } 11 - ], 12 - "parameters": { 13 - "Left": [ 14 - "Text" 15 - ] 16 - }, 17 - "nullable": [ 18 - true 19 - ] 20 - }, 21 - "hash": "f26c13023b47b908ec96da2e6b8bf8b34ca6a2246c20fc96f76f0e95530762a7" 22 - }
-14
.sqlx/query-f29da3bdfbbc547b339b4cdb059fac26435b0feec65cf1c56f851d1c4d6b1814.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "UPDATE users SET is_admin = TRUE WHERE did = $1", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Text" 9 - ] 10 - }, 11 - "nullable": [] 12 - }, 13 - "hash": "f29da3bdfbbc547b339b4cdb059fac26435b0feec65cf1c56f851d1c4d6b1814" 14 - }
-15
.sqlx/query-f71428b1ce982504cd531937131d49196ec092b4d13e9ae7dcdaedfe98de5a70.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "UPDATE users SET backup_enabled = $1 WHERE did = $2", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Bool", 9 - "Text" 10 - ] 11 - }, 12 - "nullable": [] 13 - }, 14 - "hash": "f71428b1ce982504cd531937131d49196ec092b4d13e9ae7dcdaedfe98de5a70" 15 - }
-28
.sqlx/query-f7af28963099aec12cf1d4f8a9a03699bb3a90f39bc9c4c0f738a37827e8f382.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "SELECT password_reset_code, password_reset_code_expires_at FROM users WHERE email = $1", 4 - "describe": { 5 - "columns": [ 6 - { 7 - "ordinal": 0, 8 - "name": "password_reset_code", 9 - "type_info": "Text" 10 - }, 11 - { 12 - "ordinal": 1, 13 - "name": "password_reset_code_expires_at", 14 - "type_info": "Timestamptz" 15 - } 16 - ], 17 - "parameters": { 18 - "Left": [ 19 - "Text" 20 - ] 21 - }, 22 - "nullable": [ 23 - true, 24 - true 25 - ] 26 - }, 27 - "hash": "f7af28963099aec12cf1d4f8a9a03699bb3a90f39bc9c4c0f738a37827e8f382" 28 - }
-14
.sqlx/query-f85f8d49bbd2d5e048bd8c29081aef5b8097e2384793e85df72eeeb858b7c532.json
··· 1 - { 2 - "db_name": "PostgreSQL", 3 - "query": "DELETE FROM account_backups WHERE id = $1", 4 - "describe": { 5 - "columns": [], 6 - "parameters": { 7 - "Left": [ 8 - "Uuid" 9 - ] 10 - }, 11 - "nullable": [] 12 - }, 13 - "hash": "f85f8d49bbd2d5e048bd8c29081aef5b8097e2384793e85df72eeeb858b7c532" 14 - }
+1 -1
Cargo.toml
··· 24 24 ] 25 25 26 26 [workspace.package] 27 - version = "0.4.3" 27 + version = "0.4.4" 28 28 edition = "2024" 29 29 license = "AGPL-3.0-or-later" 30 30
+3 -3
crates/tranquil-api/src/actor/preferences.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::auth::{Auth, NotTakendown, Permissive}; 3 - use tranquil_pds::state::AppState; 4 1 use axum::{ 5 2 Json, 6 3 extract::State, ··· 10 7 use chrono::{Datelike, NaiveDate, Utc}; 11 8 use serde::{Deserialize, Serialize}; 12 9 use serde_json::Value; 10 + use tranquil_pds::api::error::ApiError; 11 + use tranquil_pds::auth::{Auth, NotTakendown, Permissive}; 12 + use tranquil_pds::state::AppState; 13 13 14 14 const APP_BSKY_NAMESPACE: &str = "app.bsky"; 15 15 const MAX_PREFERENCES_COUNT: usize = 100;
+5 -5
crates/tranquil-api/src/admin/account/delete.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::auth::{Admin, Auth}; 4 - use tranquil_pds::state::AppState; 5 - use tranquil_pds::types::Did; 6 1 use axum::{ 7 2 Json, 8 3 extract::State, ··· 10 5 }; 11 6 use serde::Deserialize; 12 7 use tracing::warn; 8 + use tranquil_pds::api::EmptyResponse; 9 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 10 + use tranquil_pds::auth::{Admin, Auth}; 11 + use tranquil_pds::state::AppState; 12 + use tranquil_pds::types::Did; 13 13 14 14 #[derive(Deserialize)] 15 15 pub struct DeleteAccountInput {
+4 -4
crates/tranquil-api/src/admin/account/email.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::auth::{Admin, Auth}; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::Did; 5 1 use axum::{ 6 2 Json, 7 3 extract::State, ··· 10 6 }; 11 7 use serde::{Deserialize, Serialize}; 12 8 use tracing::warn; 9 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 10 + use tranquil_pds::auth::{Admin, Auth}; 11 + use tranquil_pds::state::AppState; 12 + use tranquil_pds::types::Did; 13 13 14 14 #[derive(Deserialize)] 15 15 #[serde(rename_all = "camelCase")]
+9 -8
crates/tranquil-api/src/admin/account/info.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::auth::{Admin, Auth}; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::{Did, Handle}; 5 1 use axum::{ 6 2 Json, 7 3 extract::{Query, RawQuery, State}, ··· 10 6 }; 11 7 use serde::{Deserialize, Serialize}; 12 8 use std::collections::HashMap; 9 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 10 + use tranquil_pds::auth::{Admin, Auth}; 11 + use tranquil_pds::state::AppState; 12 + use tranquil_pds::types::{Did, Handle}; 13 13 14 14 #[derive(Deserialize)] 15 15 pub struct GetAccountInfoParams { ··· 196 196 _auth: Auth<Admin>, 197 197 RawQuery(raw_query): RawQuery, 198 198 ) -> Result<Response, ApiError> { 199 - let dids: Vec<String> = tranquil_pds::util::parse_repeated_query_param(raw_query.as_deref(), "dids") 200 - .into_iter() 201 - .filter(|d| !d.is_empty()) 202 - .collect(); 199 + let dids: Vec<String> = 200 + tranquil_pds::util::parse_repeated_query_param(raw_query.as_deref(), "dids") 201 + .into_iter() 202 + .filter(|d| !d.is_empty()) 203 + .collect(); 203 204 204 205 if dids.is_empty() { 205 206 return Err(ApiError::InvalidRequest("dids is required".into()));
+4 -4
crates/tranquil-api/src/admin/account/search.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::auth::{Admin, Auth}; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::{Did, Handle}; 5 1 use axum::{ 6 2 Json, 7 3 extract::{Query, State}, ··· 9 5 response::{IntoResponse, Response}, 10 6 }; 11 7 use serde::{Deserialize, Serialize}; 8 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 9 + use tranquil_pds::auth::{Admin, Auth}; 10 + use tranquil_pds::state::AppState; 11 + use tranquil_pds::types::{Did, Handle}; 12 12 13 13 #[derive(Deserialize)] 14 14 pub struct SearchAccountsParams {
+5 -5
crates/tranquil-api/src/admin/account/update.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::ApiError; 3 - use tranquil_pds::auth::{Admin, Auth}; 4 - use tranquil_pds::state::AppState; 5 - use tranquil_pds::types::{Did, Handle, PlainPassword}; 6 1 use axum::{ 7 2 Json, 8 3 extract::State, ··· 10 5 }; 11 6 use serde::Deserialize; 12 7 use tracing::{error, warn}; 8 + use tranquil_pds::api::EmptyResponse; 9 + use tranquil_pds::api::error::ApiError; 10 + use tranquil_pds::auth::{Admin, Auth}; 11 + use tranquil_pds::state::AppState; 12 + use tranquil_pds::types::{Did, Handle, PlainPassword}; 13 13 14 14 #[derive(Deserialize)] 15 15 pub struct UpdateAccountEmailInput {
+3 -3
crates/tranquil-api/src/admin/config.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::auth::{Admin, Auth}; 3 - use tranquil_pds::state::AppState; 4 1 use axum::{Json, extract::State}; 5 2 use serde::{Deserialize, Serialize}; 6 3 use tracing::{error, warn}; 4 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 5 + use tranquil_pds::auth::{Admin, Auth}; 6 + use tranquil_pds::state::AppState; 7 7 use tranquil_types::CidLink; 8 8 9 9 #[derive(Serialize)]
+4 -4
crates/tranquil-api/src/admin/invite.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::auth::{Admin, Auth}; 4 - use tranquil_pds::state::AppState; 5 1 use axum::{ 6 2 Json, 7 3 extract::{Query, State}, ··· 11 7 use serde::{Deserialize, Serialize}; 12 8 use tracing::error; 13 9 use tranquil_db_traits::InviteCodeSortOrder; 10 + use tranquil_pds::api::EmptyResponse; 11 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 12 + use tranquil_pds::auth::{Admin, Auth}; 13 + use tranquil_pds::state::AppState; 14 14 15 15 #[derive(Deserialize)] 16 16 #[serde(rename_all = "camelCase")]
+3 -3
crates/tranquil-api/src/admin/server_stats.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::auth::{Admin, Auth}; 3 - use tranquil_pds::state::AppState; 4 1 use axum::{ 5 2 Json, 6 3 extract::State, 7 4 response::{IntoResponse, Response}, 8 5 }; 9 6 use serde::Serialize; 7 + use tranquil_pds::api::error::ApiError; 8 + use tranquil_pds::auth::{Admin, Auth}; 9 + use tranquil_pds::state::AppState; 10 10 11 11 #[derive(Serialize)] 12 12 #[serde(rename_all = "camelCase")]
+4 -4
crates/tranquil-api/src/admin/status.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::auth::{Admin, Auth}; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::{CidLink, Did}; 5 1 use axum::{ 6 2 Json, 7 3 extract::{Query, State}, ··· 11 7 use serde::{Deserialize, Serialize}; 12 8 use serde_json::json; 13 9 use tracing::{error, warn}; 10 + use tranquil_pds::api::error::ApiError; 11 + use tranquil_pds::auth::{Admin, Auth}; 12 + use tranquil_pds::state::AppState; 13 + use tranquil_pds::types::{CidLink, Did}; 14 14 15 15 #[derive(Deserialize)] 16 16 pub struct GetSubjectStatusParams {
+4 -2
crates/tranquil-api/src/age_assurance.rs
··· 1 - use tranquil_pds::auth::{AccountRequirement, extract_auth_token_from_header, validate_token_with_dpop}; 2 - use tranquil_pds::state::AppState; 3 1 use axum::{ 4 2 Json, 5 3 extract::State, ··· 7 5 response::{IntoResponse, Response}, 8 6 }; 9 7 use serde_json::json; 8 + use tranquil_pds::auth::{ 9 + AccountRequirement, extract_auth_token_from_header, validate_token_with_dpop, 10 + }; 11 + use tranquil_pds::state::AppState; 10 12 11 13 pub async fn get_state(State(state): State<AppState>, headers: HeaderMap) -> Response { 12 14 let created_at = get_account_created_at(&state, &headers).await;
-766
crates/tranquil-api/src/backup.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::api::{EmptyResponse, EnabledResponse}; 3 - use tranquil_pds::auth::{Active, Auth}; 4 - use tranquil_pds::scheduled::generate_full_backup; 5 - use tranquil_pds::state::AppState; 6 - use tranquil_pds::storage::{BackupStorage, backup_retention_count}; 7 - use anyhow::Context; 8 - use axum::{ 9 - Json, 10 - extract::{Query, State}, 11 - http::StatusCode, 12 - response::{IntoResponse, Response}, 13 - }; 14 - use cid::Cid; 15 - use serde::{Deserialize, Serialize}; 16 - use serde_json::json; 17 - use std::str::FromStr; 18 - use tracing::{error, info, warn}; 19 - use tranquil_db::{BackupRepository, OldBackupInfo}; 20 - 21 - #[derive(Serialize)] 22 - #[serde(rename_all = "camelCase")] 23 - pub struct BackupInfo { 24 - pub id: String, 25 - pub repo_rev: String, 26 - pub repo_root_cid: String, 27 - pub block_count: i32, 28 - pub size_bytes: i64, 29 - pub created_at: String, 30 - } 31 - 32 - #[derive(Serialize)] 33 - #[serde(rename_all = "camelCase")] 34 - pub struct ListBackupsOutput { 35 - pub backups: Vec<BackupInfo>, 36 - pub backup_enabled: bool, 37 - } 38 - 39 - pub async fn list_backups( 40 - State(state): State<AppState>, 41 - auth: Auth<Active>, 42 - ) -> Result<Response, tranquil_pds::api::error::ApiError> { 43 - let (user_id, backup_enabled) = match state.backup_repo.get_user_backup_status(&auth.did).await 44 - { 45 - Ok(Some(status)) => status, 46 - Ok(None) => { 47 - return Ok(ApiError::AccountNotFound.into_response()); 48 - } 49 - Err(e) => { 50 - error!("DB error fetching user: {:?}", e); 51 - return Ok(ApiError::InternalError(None).into_response()); 52 - } 53 - }; 54 - 55 - let backups = match state.backup_repo.list_backups_for_user(user_id).await { 56 - Ok(rows) => rows, 57 - Err(e) => { 58 - error!("DB error fetching backups: {:?}", e); 59 - return Ok(ApiError::InternalError(None).into_response()); 60 - } 61 - }; 62 - 63 - let backup_list: Vec<BackupInfo> = backups 64 - .into_iter() 65 - .map(|b| BackupInfo { 66 - id: b.id.to_string(), 67 - repo_rev: b.repo_rev, 68 - repo_root_cid: b.repo_root_cid, 69 - block_count: b.block_count, 70 - size_bytes: b.size_bytes, 71 - created_at: b.created_at.to_rfc3339(), 72 - }) 73 - .collect(); 74 - 75 - Ok(( 76 - StatusCode::OK, 77 - Json(ListBackupsOutput { 78 - backups: backup_list, 79 - backup_enabled, 80 - }), 81 - ) 82 - .into_response()) 83 - } 84 - 85 - #[derive(Deserialize)] 86 - pub struct GetBackupQuery { 87 - pub id: String, 88 - } 89 - 90 - pub async fn get_backup( 91 - State(state): State<AppState>, 92 - auth: Auth<Active>, 93 - Query(query): Query<GetBackupQuery>, 94 - ) -> Result<Response, tranquil_pds::api::error::ApiError> { 95 - let backup_id = match uuid::Uuid::parse_str(&query.id) { 96 - Ok(id) => id, 97 - Err(_) => { 98 - return Ok(ApiError::InvalidRequest("Invalid backup ID".into()).into_response()); 99 - } 100 - }; 101 - 102 - let backup_info = match state 103 - .backup_repo 104 - .get_backup_storage_info(backup_id, &auth.did) 105 - .await 106 - { 107 - Ok(Some(b)) => b, 108 - Ok(None) => { 109 - return Ok(ApiError::BackupNotFound.into_response()); 110 - } 111 - Err(e) => { 112 - error!("DB error fetching backup: {:?}", e); 113 - return Ok(ApiError::InternalError(None).into_response()); 114 - } 115 - }; 116 - 117 - let backup_storage = match state.backup_storage.as_ref() { 118 - Some(storage) => storage, 119 - None => { 120 - return Ok(ApiError::BackupsDisabled.into_response()); 121 - } 122 - }; 123 - 124 - let car_bytes = match backup_storage.get_backup(&backup_info.storage_key).await { 125 - Ok(bytes) => bytes, 126 - Err(e) => { 127 - error!("Failed to fetch backup from storage: {:?}", e); 128 - return Ok( 129 - ApiError::InternalError(Some("Failed to retrieve backup".into())).into_response(), 130 - ); 131 - } 132 - }; 133 - 134 - Ok(( 135 - StatusCode::OK, 136 - [ 137 - (axum::http::header::CONTENT_TYPE, "application/vnd.ipld.car"), 138 - ( 139 - axum::http::header::CONTENT_DISPOSITION, 140 - &format!("attachment; filename=\"{}.car\"", backup_info.repo_rev), 141 - ), 142 - ], 143 - car_bytes, 144 - ) 145 - .into_response()) 146 - } 147 - 148 - #[derive(Serialize)] 149 - #[serde(rename_all = "camelCase")] 150 - pub struct CreateBackupOutput { 151 - pub id: String, 152 - pub repo_rev: String, 153 - pub size_bytes: i64, 154 - pub block_count: i32, 155 - } 156 - 157 - pub async fn create_backup( 158 - State(state): State<AppState>, 159 - auth: Auth<Active>, 160 - ) -> Result<Response, tranquil_pds::api::error::ApiError> { 161 - let backup_storage = match state.backup_storage.as_ref() { 162 - Some(storage) => storage, 163 - None => { 164 - return Ok(ApiError::BackupsDisabled.into_response()); 165 - } 166 - }; 167 - 168 - let user = match state.backup_repo.get_user_for_backup(&auth.did).await { 169 - Ok(Some(u)) => u, 170 - Ok(None) => { 171 - return Ok(ApiError::AccountNotFound.into_response()); 172 - } 173 - Err(e) => { 174 - error!("DB error fetching user: {:?}", e); 175 - return Ok(ApiError::InternalError(None).into_response()); 176 - } 177 - }; 178 - 179 - if user.deactivated_at.is_some() { 180 - return Ok(ApiError::AccountDeactivated.into_response()); 181 - } 182 - 183 - let repo_rev = match &user.repo_rev { 184 - Some(rev) => rev.clone(), 185 - None => { 186 - return Ok(ApiError::RepoNotReady.into_response()); 187 - } 188 - }; 189 - 190 - let head_cid = match Cid::from_str(&user.repo_root_cid) { 191 - Ok(c) => c, 192 - Err(_) => { 193 - return Ok( 194 - ApiError::InternalError(Some("Invalid repo root CID".into())).into_response(), 195 - ); 196 - } 197 - }; 198 - 199 - let car_bytes = match generate_full_backup( 200 - state.repo_repo.as_ref(), 201 - &state.block_store, 202 - user.id, 203 - &head_cid, 204 - ) 205 - .await 206 - { 207 - Ok(bytes) => bytes, 208 - Err(e) => { 209 - error!("Failed to generate CAR: {:?}", e); 210 - return Ok( 211 - ApiError::InternalError(Some("Failed to generate backup".into())).into_response(), 212 - ); 213 - } 214 - }; 215 - 216 - let block_count = tranquil_pds::scheduled::count_car_blocks(&car_bytes); 217 - let size_bytes = i64::try_from(car_bytes.len()).unwrap_or(i64::MAX); 218 - 219 - let storage_key = match backup_storage 220 - .put_backup(&user.did, &repo_rev, &car_bytes) 221 - .await 222 - { 223 - Ok(key) => key, 224 - Err(e) => { 225 - error!("Failed to upload backup: {:?}", e); 226 - return Ok( 227 - ApiError::InternalError(Some("Failed to store backup".into())).into_response(), 228 - ); 229 - } 230 - }; 231 - 232 - let backup_id = match state 233 - .backup_repo 234 - .insert_backup( 235 - user.id, 236 - &storage_key, 237 - &user.repo_root_cid, 238 - &repo_rev, 239 - block_count, 240 - size_bytes, 241 - ) 242 - .await 243 - { 244 - Ok(id) => id, 245 - Err(e) => { 246 - error!("DB error inserting backup: {:?}", e); 247 - if let Err(rollback_err) = backup_storage.delete_backup(&storage_key).await { 248 - error!( 249 - storage_key = %storage_key, 250 - error = %rollback_err, 251 - "Failed to rollback orphaned backup from S3" 252 - ); 253 - } 254 - return Ok( 255 - ApiError::InternalError(Some("Failed to record backup".into())).into_response(), 256 - ); 257 - } 258 - }; 259 - 260 - info!( 261 - did = %user.did, 262 - rev = %repo_rev, 263 - size_bytes, 264 - "Created manual backup" 265 - ); 266 - 267 - let retention = backup_retention_count(); 268 - if let Err(e) = cleanup_old_backups( 269 - state.backup_repo.as_ref(), 270 - backup_storage.as_ref(), 271 - user.id, 272 - retention, 273 - ) 274 - .await 275 - { 276 - warn!(did = %user.did, error = %e, "Failed to cleanup old backups after manual backup"); 277 - } 278 - 279 - Ok(( 280 - StatusCode::OK, 281 - Json(CreateBackupOutput { 282 - id: backup_id.to_string(), 283 - repo_rev, 284 - size_bytes, 285 - block_count, 286 - }), 287 - ) 288 - .into_response()) 289 - } 290 - 291 - async fn cleanup_old_backups( 292 - backup_repo: &dyn BackupRepository, 293 - backup_storage: &dyn BackupStorage, 294 - user_id: uuid::Uuid, 295 - retention_count: u32, 296 - ) -> anyhow::Result<()> { 297 - let old_backups: Vec<OldBackupInfo> = backup_repo 298 - .get_old_backups(user_id, i64::from(retention_count)) 299 - .await 300 - .context("DB error fetching old backups")?; 301 - 302 - for backup in old_backups { 303 - if let Err(e) = backup_storage.delete_backup(&backup.storage_key).await { 304 - warn!( 305 - storage_key = %backup.storage_key, 306 - error = %e, 307 - "Failed to delete old backup from storage, skipping DB cleanup to avoid orphan" 308 - ); 309 - continue; 310 - } 311 - 312 - backup_repo 313 - .delete_backup(backup.id) 314 - .await 315 - .context("Failed to delete old backup record")?; 316 - } 317 - 318 - Ok(()) 319 - } 320 - 321 - #[derive(Deserialize)] 322 - pub struct DeleteBackupQuery { 323 - pub id: String, 324 - } 325 - 326 - pub async fn delete_backup( 327 - State(state): State<AppState>, 328 - auth: Auth<Active>, 329 - Query(query): Query<DeleteBackupQuery>, 330 - ) -> Result<Response, tranquil_pds::api::error::ApiError> { 331 - let backup_id = match uuid::Uuid::parse_str(&query.id) { 332 - Ok(id) => id, 333 - Err(_) => { 334 - return Ok(ApiError::InvalidRequest("Invalid backup ID".into()).into_response()); 335 - } 336 - }; 337 - 338 - let backup = match state 339 - .backup_repo 340 - .get_backup_for_deletion(backup_id, &auth.did) 341 - .await 342 - { 343 - Ok(Some(b)) => b, 344 - Ok(None) => { 345 - return Ok(ApiError::BackupNotFound.into_response()); 346 - } 347 - Err(e) => { 348 - error!("DB error fetching backup: {:?}", e); 349 - return Ok(ApiError::InternalError(None).into_response()); 350 - } 351 - }; 352 - 353 - if backup.deactivated_at.is_some() { 354 - return Ok(ApiError::AccountDeactivated.into_response()); 355 - } 356 - 357 - if let Some(backup_storage) = state.backup_storage.as_ref() 358 - && let Err(e) = backup_storage.delete_backup(&backup.storage_key).await 359 - { 360 - warn!( 361 - storage_key = %backup.storage_key, 362 - error = %e, 363 - "Failed to delete backup from storage (continuing anyway)" 364 - ); 365 - } 366 - 367 - if let Err(e) = state.backup_repo.delete_backup(backup.id).await { 368 - error!("DB error deleting backup: {:?}", e); 369 - return Ok(ApiError::InternalError(Some("Failed to delete backup".into())).into_response()); 370 - } 371 - 372 - info!(did = %auth.did, backup_id = %backup_id, "Deleted backup"); 373 - 374 - Ok(EmptyResponse::ok().into_response()) 375 - } 376 - 377 - #[derive(Deserialize)] 378 - #[serde(rename_all = "camelCase")] 379 - pub struct SetBackupEnabledInput { 380 - pub enabled: bool, 381 - } 382 - 383 - pub async fn set_backup_enabled( 384 - State(state): State<AppState>, 385 - auth: Auth<Active>, 386 - Json(input): Json<SetBackupEnabledInput>, 387 - ) -> Result<Response, tranquil_pds::api::error::ApiError> { 388 - let deactivated_at = match state 389 - .backup_repo 390 - .get_user_deactivated_status(&auth.did) 391 - .await 392 - { 393 - Ok(Some(status)) => status, 394 - Ok(None) => { 395 - return Ok(ApiError::AccountNotFound.into_response()); 396 - } 397 - Err(e) => { 398 - error!("DB error fetching user: {:?}", e); 399 - return Ok(ApiError::InternalError(None).into_response()); 400 - } 401 - }; 402 - 403 - if deactivated_at.is_some() { 404 - return Ok(ApiError::AccountDeactivated.into_response()); 405 - } 406 - 407 - if let Err(e) = state 408 - .backup_repo 409 - .update_backup_enabled(&auth.did, input.enabled) 410 - .await 411 - { 412 - error!("DB error updating backup_enabled: {:?}", e); 413 - return Ok( 414 - ApiError::InternalError(Some("Failed to update setting".into())).into_response(), 415 - ); 416 - } 417 - 418 - info!(did = %auth.did, enabled = input.enabled, "Updated backup_enabled setting"); 419 - 420 - Ok(EnabledResponse::response(input.enabled).into_response()) 421 - } 422 - 423 - pub async fn export_blobs( 424 - State(state): State<AppState>, 425 - auth: Auth<Active>, 426 - ) -> Result<Response, tranquil_pds::api::error::ApiError> { 427 - let user_id = match state.backup_repo.get_user_id_by_did(&auth.did).await { 428 - Ok(Some(id)) => id, 429 - Ok(None) => { 430 - return Ok(ApiError::AccountNotFound.into_response()); 431 - } 432 - Err(e) => { 433 - error!("DB error fetching user: {:?}", e); 434 - return Ok(ApiError::InternalError(None).into_response()); 435 - } 436 - }; 437 - 438 - let blobs = match state.backup_repo.get_blobs_for_export(user_id).await { 439 - Ok(rows) => rows, 440 - Err(e) => { 441 - error!("DB error fetching blobs: {:?}", e); 442 - return Ok(ApiError::InternalError(None).into_response()); 443 - } 444 - }; 445 - 446 - if blobs.is_empty() { 447 - return Ok(( 448 - StatusCode::OK, 449 - [ 450 - (axum::http::header::CONTENT_TYPE, "application/zip"), 451 - ( 452 - axum::http::header::CONTENT_DISPOSITION, 453 - "attachment; filename=\"blobs.zip\"", 454 - ), 455 - ], 456 - Vec::<u8>::new(), 457 - ) 458 - .into_response()); 459 - } 460 - 461 - let mut zip_buffer = std::io::Cursor::new(Vec::new()); 462 - { 463 - let mut zip = zip::ZipWriter::new(&mut zip_buffer); 464 - 465 - let options = zip::write::SimpleFileOptions::default() 466 - .compression_method(zip::CompressionMethod::Deflated); 467 - 468 - let mut exported: Vec<serde_json::Value> = Vec::new(); 469 - let mut skipped: Vec<serde_json::Value> = Vec::new(); 470 - 471 - for blob in &blobs { 472 - let blob_data = match state.blob_store.get(&blob.storage_key).await { 473 - Ok(data) => data, 474 - Err(e) => { 475 - warn!(cid = %blob.cid, error = %e, "Failed to fetch blob, skipping"); 476 - skipped.push(json!({ 477 - "cid": blob.cid, 478 - "mimeType": blob.mime_type, 479 - "reason": "fetch_failed" 480 - })); 481 - continue; 482 - } 483 - }; 484 - 485 - let extension = mime_to_extension(&blob.mime_type); 486 - let filename = format!("{}{}", blob.cid, extension); 487 - 488 - if let Err(e) = zip.start_file(&filename, options) { 489 - warn!(filename = %filename, error = %e, "Failed to start zip file entry"); 490 - skipped.push(json!({ 491 - "cid": blob.cid, 492 - "mimeType": blob.mime_type, 493 - "reason": "zip_entry_failed" 494 - })); 495 - continue; 496 - } 497 - 498 - if let Err(e) = std::io::Write::write_all(&mut zip, &blob_data) { 499 - warn!(filename = %filename, error = %e, "Failed to write blob to zip"); 500 - skipped.push(json!({ 501 - "cid": blob.cid, 502 - "mimeType": blob.mime_type, 503 - "reason": "write_failed" 504 - })); 505 - continue; 506 - } 507 - 508 - exported.push(json!({ 509 - "cid": blob.cid, 510 - "filename": filename, 511 - "mimeType": blob.mime_type, 512 - "sizeBytes": blob_data.len() 513 - })); 514 - } 515 - 516 - let manifest = json!({ 517 - "exportedAt": chrono::Utc::now().to_rfc3339(), 518 - "totalBlobs": blobs.len(), 519 - "exportedCount": exported.len(), 520 - "skippedCount": skipped.len(), 521 - "exported": exported, 522 - "skipped": skipped 523 - }); 524 - 525 - if zip.start_file("manifest.json", options).is_ok() { 526 - let _ = std::io::Write::write_all( 527 - &mut zip, 528 - serde_json::to_string_pretty(&manifest) 529 - .unwrap_or_else(|_| "{}".to_string()) 530 - .as_bytes(), 531 - ); 532 - } 533 - 534 - if let Err(e) = zip.finish() { 535 - error!("Failed to finish zip: {:?}", e); 536 - return Ok( 537 - ApiError::InternalError(Some("Failed to create zip file".into())).into_response(), 538 - ); 539 - } 540 - } 541 - 542 - let zip_bytes = zip_buffer.into_inner(); 543 - 544 - info!(did = %auth.did, blob_count = blobs.len(), size_bytes = zip_bytes.len(), "Exported blobs"); 545 - 546 - Ok(( 547 - StatusCode::OK, 548 - [ 549 - (axum::http::header::CONTENT_TYPE, "application/zip"), 550 - ( 551 - axum::http::header::CONTENT_DISPOSITION, 552 - "attachment; filename=\"blobs.zip\"", 553 - ), 554 - ], 555 - zip_bytes, 556 - ) 557 - .into_response()) 558 - } 559 - 560 - fn mime_to_extension(mime_type: &str) -> &'static str { 561 - match mime_type { 562 - "application/font-sfnt" => ".otf", 563 - "application/font-tdpfr" => ".pfr", 564 - "application/font-woff" => ".woff", 565 - "application/gzip" => ".gz", 566 - "application/json" => ".json", 567 - "application/json5" => ".json5", 568 - "application/jsonml+json" => ".jsonml", 569 - "application/octet-stream" => ".bin", 570 - "application/pdf" => ".pdf", 571 - "application/zip" => ".zip", 572 - "audio/aac" => ".aac", 573 - "audio/ac3" => ".ac3", 574 - "audio/aiff" => ".aiff", 575 - "audio/annodex" => ".axa", 576 - "audio/audible" => ".aa", 577 - "audio/basic" => ".au", 578 - "audio/flac" => ".flac", 579 - "audio/m4a" => ".m4a", 580 - "audio/m4b" => ".m4b", 581 - "audio/m4p" => ".m4p", 582 - "audio/mid" => ".mid", 583 - "audio/midi" => ".midi", 584 - "audio/mp4" => ".mp4a", 585 - "audio/mpeg" => ".mp3", 586 - "audio/ogg" => ".ogg", 587 - "audio/s3m" => ".s3m", 588 - "audio/scpls" => ".pls", 589 - "audio/silk" => ".sil", 590 - "audio/vnd.audible.aax" => ".aax", 591 - "audio/vnd.dece.audio" => ".uva", 592 - "audio/vnd.digital-winds" => ".eol", 593 - "audio/vnd.dlna.adts" => ".adt", 594 - "audio/vnd.dra" => ".dra", 595 - "audio/vnd.dts" => ".dts", 596 - "audio/vnd.dts.hd" => ".dtshd", 597 - "audio/vnd.lucent.voice" => ".lvp", 598 - "audio/vnd.ms-playready.media.pya" => ".pya", 599 - "audio/vnd.nuera.ecelp4800" => ".ecelp4800", 600 - "audio/vnd.nuera.ecelp7470" => ".ecelp7470", 601 - "audio/vnd.nuera.ecelp9600" => ".ecelp9600", 602 - "audio/vnd.rip" => ".rip", 603 - "audio/wav" => ".wav", 604 - "audio/webm" => ".weba", 605 - "audio/x-caf" => ".caf", 606 - "audio/x-gsm" => ".gsm", 607 - "audio/x-m4r" => ".m4r", 608 - "audio/x-matroska" => ".mka", 609 - "audio/x-mpegurl" => ".m3u", 610 - "audio/x-ms-wax" => ".wax", 611 - "audio/x-ms-wma" => ".wma", 612 - "audio/x-pn-realaudio" => ".ra", 613 - "audio/x-pn-realaudio-plugin" => ".rpm", 614 - "audio/x-sd2" => ".sd2", 615 - "audio/x-smd" => ".smd", 616 - "audio/xm" => ".xm", 617 - "font/collection" => ".ttc", 618 - "font/ttf" => ".ttf", 619 - "font/woff" => ".woff", 620 - "font/woff2" => ".woff2", 621 - "image/apng" => ".apng", 622 - "image/avif" => ".avif", 623 - "image/avif-sequence" => ".avifs", 624 - "image/bmp" => ".bmp", 625 - "image/cgm" => ".cgm", 626 - "image/cis-cod" => ".cod", 627 - "image/g3fax" => ".g3", 628 - "image/gif" => ".gif", 629 - "image/heic" => ".heic", 630 - "image/heic-sequence" => ".heics", 631 - "image/heif" => ".heif", 632 - "image/heif-sequence" => ".heifs", 633 - "image/ief" => ".ief", 634 - "image/jp2" => ".jp2", 635 - "image/jpeg" => ".jpg", 636 - "image/jpm" => ".jpm", 637 - "image/jpx" => ".jpf", 638 - "image/jxl" => ".jxl", 639 - "image/ktx" => ".ktx", 640 - "image/pict" => ".pct", 641 - "image/png" => ".png", 642 - "image/prs.btif" => ".btif", 643 - "image/qoi" => ".qoi", 644 - "image/sgi" => ".sgi", 645 - "image/svg+xml" => ".svg", 646 - "image/tiff" => ".tiff", 647 - "image/vnd.dece.graphic" => ".uvg", 648 - "image/vnd.djvu" => ".djv", 649 - "image/vnd.fastbidsheet" => ".fbs", 650 - "image/vnd.fpx" => ".fpx", 651 - "image/vnd.fst" => ".fst", 652 - "image/vnd.fujixerox.edmics-mmr" => ".mmr", 653 - "image/vnd.fujixerox.edmics-rlc" => ".rlc", 654 - "image/vnd.ms-modi" => ".mdi", 655 - "image/vnd.ms-photo" => ".wdp", 656 - "image/vnd.net-fpx" => ".npx", 657 - "image/vnd.radiance" => ".hdr", 658 - "image/vnd.rn-realflash" => ".rf", 659 - "image/vnd.wap.wbmp" => ".wbmp", 660 - "image/vnd.xiff" => ".xif", 661 - "image/webp" => ".webp", 662 - "image/x-3ds" => ".3ds", 663 - "image/x-adobe-dng" => ".dng", 664 - "image/x-canon-cr2" => ".cr2", 665 - "image/x-canon-cr3" => ".cr3", 666 - "image/x-canon-crw" => ".crw", 667 - "image/x-cmu-raster" => ".ras", 668 - "image/x-cmx" => ".cmx", 669 - "image/x-epson-erf" => ".erf", 670 - "image/x-freehand" => ".fh", 671 - "image/x-fuji-raf" => ".raf", 672 - "image/x-icon" => ".ico", 673 - "image/x-jg" => ".art", 674 - "image/x-jng" => ".jng", 675 - "image/x-kodak-dcr" => ".dcr", 676 - "image/x-kodak-k25" => ".k25", 677 - "image/x-kodak-kdc" => ".kdc", 678 - "image/x-macpaint" => ".mac", 679 - "image/x-minolta-mrw" => ".mrw", 680 - "image/x-mrsid-image" => ".sid", 681 - "image/x-nikon-nef" => ".nef", 682 - "image/x-nikon-nrw" => ".nrw", 683 - "image/x-olympus-orf" => ".orf", 684 - "image/x-panasonic-rw" => ".raw", 685 - "image/x-panasonic-rw2" => ".rw2", 686 - "image/x-pentax-pef" => ".pef", 687 - "image/x-portable-anymap" => ".pnm", 688 - "image/x-portable-bitmap" => ".pbm", 689 - "image/x-portable-graymap" => ".pgm", 690 - "image/x-portable-pixmap" => ".ppm", 691 - "image/x-qoi" => ".qoi", 692 - "image/x-quicktime" => ".qti", 693 - "image/x-rgb" => ".rgb", 694 - "image/x-sigma-x3f" => ".x3f", 695 - "image/x-sony-arw" => ".arw", 696 - "image/x-sony-sr2" => ".sr2", 697 - "image/x-sony-srf" => ".srf", 698 - "image/x-tga" => ".tga", 699 - "image/x-xbitmap" => ".xbm", 700 - "image/x-xcf" => ".xcf", 701 - "image/x-xpixmap" => ".xpm", 702 - "image/x-xwindowdump" => ".xwd", 703 - "model/gltf+json" => ".gltf", 704 - "model/gltf-binary" => ".glb", 705 - "model/iges" => ".igs", 706 - "model/mesh" => ".msh", 707 - "model/vnd.collada+xml" => ".dae", 708 - "model/vnd.gdl" => ".gdl", 709 - "model/vnd.gtw" => ".gtw", 710 - "model/vnd.vtu" => ".vtu", 711 - "model/vrml" => ".vrml", 712 - "model/x3d+binary" => ".x3db", 713 - "model/x3d+vrml" => ".x3dv", 714 - "model/x3d+xml" => ".x3d", 715 - "text/css" => ".css", 716 - "text/html" => ".html", 717 - "text/plain" => ".txt", 718 - "video/3gpp" => ".3gp", 719 - "video/3gpp2" => ".3g2", 720 - "video/annodex" => ".axv", 721 - "video/divx" => ".divx", 722 - "video/h261" => ".h261", 723 - "video/h263" => ".h263", 724 - "video/h264" => ".h264", 725 - "video/jpeg" => ".jpgv", 726 - "video/jpm" => ".jpgm", 727 - "video/mj2" => ".mj2", 728 - "video/mp4" => ".mp4", 729 - "video/mpeg" => ".mpg", 730 - "video/ogg" => ".ogv", 731 - "video/quicktime" => ".mov", 732 - "video/vnd.dece.hd" => ".uvh", 733 - "video/vnd.dece.mobile" => ".uvm", 734 - "video/vnd.dece.pd" => ".uvp", 735 - "video/vnd.dece.sd" => ".uvs", 736 - "video/vnd.dece.video" => ".uvv", 737 - "video/vnd.dlna.mpeg-tts" => ".ts", 738 - "video/vnd.dvb.file" => ".dvb", 739 - "video/vnd.fvt" => ".fvt", 740 - "video/vnd.mpegurl" => ".m4u", 741 - "video/vnd.ms-playready.media.pyv" => ".pyv", 742 - "video/vnd.uvvu.mp4" => ".uvu", 743 - "video/vnd.vivo" => ".viv", 744 - "video/webm" => ".webm", 745 - "video/x-dv" => ".dv", 746 - "video/x-f4v" => ".f4v", 747 - "video/x-fli" => ".fli", 748 - "video/x-flv" => ".flv", 749 - "video/x-ivf" => ".ivf", 750 - "video/x-la-asf" => ".lsf", 751 - "video/x-m4v" => ".m4v", 752 - "video/x-matroska" => ".mkv", 753 - "video/x-mng" => ".mng", 754 - "video/x-ms-asf" => ".asf", 755 - "video/x-ms-vob" => ".vob", 756 - "video/x-ms-wm" => ".wm", 757 - "video/x-ms-wmp" => ".wmp", 758 - "video/x-ms-wmv" => ".wmv", 759 - "video/x-ms-wmx" => ".wmx", 760 - "video/x-ms-wvx" => ".wvx", 761 - "video/x-msvideo" => ".avi", 762 - "video/x-sgi-movie" => ".movie", 763 - "video/x-smv" => ".smv", 764 - _ => ".bin", 765 - } 766 - }
+48 -43
crates/tranquil-api/src/delegation.rs
··· 1 1 use crate::identity::provision::{create_plc_did, init_genesis_repo}; 2 - use tranquil_pds::api::error::ApiError; 3 - use tranquil_pds::auth::{Active, Auth}; 4 - use tranquil_pds::delegation::{ 5 - DelegationActionType, SCOPE_PRESETS, ValidatedDelegationScope, verify_can_add_controllers, 6 - verify_can_control_accounts, 7 - }; 8 - use tranquil_pds::rate_limit::{AccountCreationLimit, RateLimited}; 9 - use tranquil_pds::state::AppState; 10 - use tranquil_pds::types::{Did, Handle}; 11 2 use axum::{ 12 3 Json, 13 4 extract::{Query, State}, ··· 17 8 use serde::{Deserialize, Serialize}; 18 9 use serde_json::json; 19 10 use tracing::{error, info, warn}; 11 + use tranquil_pds::api::error::ApiError; 12 + use tranquil_pds::auth::{Active, Auth}; 13 + use tranquil_pds::delegation::{ 14 + DelegationActionType, SCOPE_PRESETS, ValidatedDelegationScope, verify_can_add_controllers, 15 + verify_can_control_accounts, 16 + }; 17 + use tranquil_pds::rate_limit::{AccountCreationLimit, RateLimited}; 18 + use tranquil_pds::state::AppState; 19 + use tranquil_pds::types::{Did, Handle}; 20 20 21 21 pub async fn list_controllers( 22 22 State(state): State<AppState>, ··· 70 70 .await 71 71 .ok_or(ApiError::ControllerNotFound)?; 72 72 73 - if !resolved.is_local { 74 - if let Some(ref pds_url) = resolved.pds_url { 75 - if !pds_url.starts_with("https://") { 76 - return Ok( 77 - ApiError::InvalidDelegation("Controller PDS must use HTTPS".into()) 78 - .into_response(), 79 - ); 73 + if !resolved.is_local 74 + && let Some(ref pds_url) = resolved.pds_url 75 + { 76 + if !pds_url.starts_with("https://") { 77 + return Ok( 78 + ApiError::InvalidDelegation("Controller PDS must use HTTPS".into()).into_response(), 79 + ); 80 + } 81 + match state 82 + .cross_pds_oauth 83 + .check_remote_is_delegated(pds_url, input.controller_did.as_str()) 84 + .await 85 + { 86 + Some(true) => { 87 + return Ok(ApiError::InvalidDelegation( 88 + "Cannot add a delegated account from another PDS as a controller".into(), 89 + ) 90 + .into_response()); 80 91 } 81 - match state 82 - .cross_pds_oauth 83 - .check_remote_is_delegated(pds_url, input.controller_did.as_str()) 84 - .await 85 - { 86 - Some(true) => { 87 - return Ok(ApiError::InvalidDelegation( 88 - "Cannot add a delegated account from another PDS as a controller".into(), 89 - ) 90 - .into_response()); 91 - } 92 - Some(false) => {} 93 - None => { 94 - warn!( 95 - controller = %input.controller_did, 96 - pds = %pds_url, 97 - "Could not verify remote controller delegation status" 98 - ); 99 - } 92 + Some(false) => {} 93 + None => { 94 + warn!( 95 + controller = %input.controller_did, 96 + pds = %pds_url, 97 + "Could not verify remote controller delegation status" 98 + ); 100 99 } 101 100 } 102 101 } ··· 106 105 Err(response) => return Ok(response), 107 106 }; 108 107 109 - if resolved.is_local { 110 - if state.delegation_repo.is_delegated_account(&input.controller_did).await.unwrap_or(false) { 111 - return Ok(ApiError::InvalidDelegation( 112 - "Cannot add a controlled account as a controller".into(), 113 - ).into_response()); 114 - } 108 + if resolved.is_local 109 + && state 110 + .delegation_repo 111 + .is_delegated_account(&input.controller_did) 112 + .await 113 + .unwrap_or(false) 114 + { 115 + return Ok(ApiError::InvalidDelegation( 116 + "Cannot add a controlled account as a controller".into(), 117 + ) 118 + .into_response()); 115 119 } 116 120 117 121 match state ··· 511 515 let identifier = params.identifier.trim().trim_start_matches('@'); 512 516 513 517 let did: Did = if identifier.starts_with("did:") { 514 - identifier.parse().map_err(|_| ApiError::ControllerNotFound)? 518 + identifier 519 + .parse() 520 + .map_err(|_| ApiError::ControllerNotFound)? 515 521 } else { 516 522 let local_handle: Option<Handle> = identifier.parse().ok(); 517 523 let local_user = match local_handle { ··· 534 540 535 541 Ok(Json(resolved).into_response()) 536 542 } 537 -
+33 -27
crates/tranquil-api/src/identity/account.rs
··· 1 1 use super::did::verify_did_web; 2 - use tranquil_pds::api::error::ApiError; 3 - use tranquil_pds::auth::{ServiceTokenVerifier, extract_auth_token_from_header, is_service_token}; 4 - use tranquil_pds::rate_limit::{AccountCreationLimit, RateLimited}; 5 - use tranquil_pds::state::AppState; 6 - use tranquil_pds::types::{Did, Handle, PlainPassword}; 7 - use tranquil_pds::validation::validate_password; 8 2 use axum::{ 9 3 Json, 10 4 extract::State, ··· 17 11 use serde::{Deserialize, Serialize}; 18 12 use serde_json::json; 19 13 use tracing::{debug, error, info, warn}; 14 + use tranquil_pds::api::error::ApiError; 15 + use tranquil_pds::auth::{ServiceTokenVerifier, extract_auth_token_from_header, is_service_token}; 16 + use tranquil_pds::rate_limit::{AccountCreationLimit, RateLimited}; 17 + use tranquil_pds::state::AppState; 18 + use tranquil_pds::types::{Did, Handle, PlainPassword}; 19 + use tranquil_pds::validation::validate_password; 20 20 21 21 #[derive(Deserialize)] 22 22 #[serde(rename_all = "camelCase")] ··· 335 335 .into_response(); 336 336 } 337 337 }; 338 - let access_meta = 339 - match tranquil_pds::auth::create_access_token_with_metadata(&did, &secret_key_bytes) { 340 - Ok(m) => m, 341 - Err(e) => { 342 - error!("Error creating access token: {:?}", e); 343 - return ApiError::InternalError(None).into_response(); 344 - } 345 - }; 338 + let access_meta = match tranquil_pds::auth::create_access_token_with_metadata( 339 + &did, 340 + &secret_key_bytes, 341 + ) { 342 + Ok(m) => m, 343 + Err(e) => { 344 + error!("Error creating access token: {:?}", e); 345 + return ApiError::InternalError(None).into_response(); 346 + } 347 + }; 346 348 let refresh_meta = match tranquil_pds::auth::create_refresh_token_with_metadata( 347 349 &did, 348 350 &secret_key_bytes, ··· 646 648 verification_channel, 647 649 recipient, 648 650 ); 649 - let formatted_token = 650 - tranquil_pds::auth::verification_token::format_token_for_display(&verification_token); 651 + let formatted_token = tranquil_pds::auth::verification_token::format_token_for_display( 652 + &verification_token, 653 + ); 651 654 if let Err(e) = tranquil_pds::comms::comms_repo::enqueue_signup_verification( 652 655 state.user_repo.as_ref(), 653 656 state.infra_repo.as_ref(), ··· 666 669 } 667 670 } 668 671 } else if let Some(ref user_email) = email { 669 - let token = 670 - tranquil_pds::auth::verification_token::generate_migration_token(&did_for_commit, user_email); 671 - let formatted_token = tranquil_pds::auth::verification_token::format_token_for_display(&token); 672 + let token = tranquil_pds::auth::verification_token::generate_migration_token( 673 + &did_for_commit, 674 + user_email, 675 + ); 676 + let formatted_token = 677 + tranquil_pds::auth::verification_token::format_token_for_display(&token); 672 678 if let Err(e) = tranquil_pds::comms::comms_repo::enqueue_migration_verification( 673 679 state.user_repo.as_ref(), 674 680 state.infra_repo.as_ref(), ··· 683 689 } 684 690 } 685 691 686 - let access_meta = match tranquil_pds::auth::create_access_token_with_metadata(&did, &secret_key_bytes) 687 - { 688 - Ok(m) => m, 689 - Err(e) => { 690 - error!("createAccount: Error creating access token: {:?}", e); 691 - return ApiError::InternalError(None).into_response(); 692 - } 693 - }; 692 + let access_meta = 693 + match tranquil_pds::auth::create_access_token_with_metadata(&did, &secret_key_bytes) { 694 + Ok(m) => m, 695 + Err(e) => { 696 + error!("createAccount: Error creating access token: {:?}", e); 697 + return ApiError::InternalError(None).into_response(); 698 + } 699 + }; 694 700 let refresh_meta = 695 701 match tranquil_pds::auth::create_refresh_token_with_metadata(&did, &secret_key_bytes) { 696 702 Ok(m) => m,
+13 -11
crates/tranquil-api/src/identity/did.rs
··· 1 - use tranquil_pds::api::{ApiError, DidResponse, EmptyResponse}; 2 - use tranquil_pds::auth::{Auth, NotTakendown}; 3 - use tranquil_pds::plc::signing_key_to_did_key; 4 - use tranquil_pds::rate_limit::{ 5 - HandleUpdateDailyLimit, HandleUpdateLimit, check_user_rate_limit_with_message, 6 - }; 7 - use tranquil_pds::state::AppState; 8 - use tranquil_pds::types::Handle; 9 - use tranquil_pds::util::get_header_str; 10 1 use axum::{ 11 2 Json, 12 3 extract::{Path, Query, State}, ··· 19 10 use serde::{Deserialize, Serialize}; 20 11 use serde_json::json; 21 12 use tracing::{error, warn}; 13 + use tranquil_pds::api::{ApiError, DidResponse, EmptyResponse}; 14 + use tranquil_pds::auth::{Auth, NotTakendown}; 15 + use tranquil_pds::plc::signing_key_to_did_key; 16 + use tranquil_pds::rate_limit::{ 17 + HandleUpdateDailyLimit, HandleUpdateLimit, check_user_rate_limit_with_message, 18 + }; 19 + use tranquil_pds::state::AppState; 20 + use tranquil_pds::types::Handle; 21 + use tranquil_pds::util::get_header_str; 22 22 23 23 #[derive(Debug, Clone, Serialize, Deserialize)] 24 24 #[serde(rename_all = "camelCase")] ··· 503 503 ))?; 504 504 let pds_endpoint = format!("https://{}", hostname); 505 505 let has_valid_service = services.iter().any(|s| { 506 - s["type"] == tranquil_pds::plc::ServiceType::Pds.as_str() && s["serviceEndpoint"] == pds_endpoint 506 + s["type"] == tranquil_pds::plc::ServiceType::Pds.as_str() 507 + && s["serviceEndpoint"] == pds_endpoint 507 508 }); 508 509 if !has_valid_service { 509 510 return Err(DidWebVerifyError::PdsNotListed(pds_endpoint)); ··· 811 812 Some(r) => r, 812 813 None => return Ok(()), 813 814 }; 814 - let key_bytes = tranquil_pds::config::decrypt_key(&user_row.key_bytes, user_row.encryption_version)?; 815 + let key_bytes = 816 + tranquil_pds::config::decrypt_key(&user_row.key_bytes, user_row.encryption_version)?; 815 817 let signing_key = k256::ecdsa::SigningKey::from_slice(&key_bytes)?; 816 818 let plc_client = state.plc_client(); 817 819 let last_op = plc_client.get_last_op(did).await?;
+2 -2
crates/tranquil-api/src/identity/handle.rs
··· 1 - use tranquil_pds::rate_limit::{HandleVerificationLimit, RateLimited}; 2 - use tranquil_pds::types::{Did, Handle}; 3 1 use axum::{ 4 2 Json, 5 3 response::{IntoResponse, Response}, 6 4 }; 7 5 use serde::{Deserialize, Serialize}; 6 + use tranquil_pds::rate_limit::{HandleVerificationLimit, RateLimited}; 7 + use tranquil_pds::types::{Did, Handle}; 8 8 9 9 #[derive(Deserialize)] 10 10 pub struct VerifyHandleOwnershipInput {
+4 -4
crates/tranquil-api/src/identity/plc/request.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::auth::{Auth, Permissive}; 4 - use tranquil_pds::state::AppState; 5 1 use axum::{ 6 2 extract::State, 7 3 response::{IntoResponse, Response}, 8 4 }; 9 5 use chrono::{Duration, Utc}; 10 6 use tracing::{info, warn}; 7 + use tranquil_pds::api::EmptyResponse; 8 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 9 + use tranquil_pds::auth::{Auth, Permissive}; 10 + use tranquil_pds::state::AppState; 11 11 12 12 fn generate_plc_token() -> String { 13 13 tranquil_pds::util::generate_token_code()
+13 -11
crates/tranquil-api/src/identity/plc/sign.rs
··· 1 - use tranquil_pds::api::ApiError; 2 - use tranquil_pds::api::error::DbResultExt; 3 - use tranquil_pds::auth::{Auth, Permissive}; 4 - use tranquil_pds::circuit_breaker::with_circuit_breaker; 5 - use tranquil_pds::plc::{PlcError, PlcService, ServiceType, create_update_op, sign_operation}; 6 - use tranquil_pds::state::AppState; 7 1 use axum::{ 8 2 Json, 9 3 extract::State, ··· 16 10 use serde_json::Value; 17 11 use std::collections::HashMap; 18 12 use tracing::{error, info}; 13 + use tranquil_pds::api::ApiError; 14 + use tranquil_pds::api::error::DbResultExt; 15 + use tranquil_pds::auth::{Auth, Permissive}; 16 + use tranquil_pds::circuit_breaker::with_circuit_breaker; 17 + use tranquil_pds::plc::{PlcError, PlcService, ServiceType, create_update_op, sign_operation}; 18 + use tranquil_pds::state::AppState; 19 19 20 20 #[derive(Debug, Deserialize)] 21 21 #[serde(rename_all = "camelCase")] ··· 86 86 .log_db_err("fetching user key")? 87 87 .ok_or_else(|| ApiError::InternalError(Some("User signing key not found".into())))?; 88 88 89 - let key_bytes = tranquil_pds::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) 90 - .map_err(|e| { 91 - error!("Failed to decrypt user key: {}", e); 92 - ApiError::InternalError(None) 93 - })?; 89 + let key_bytes = 90 + tranquil_pds::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version).map_err( 91 + |e| { 92 + error!("Failed to decrypt user key: {}", e); 93 + ApiError::InternalError(None) 94 + }, 95 + )?; 94 96 95 97 let signing_key = SigningKey::from_slice(&key_bytes).map_err(|e| { 96 98 error!("Failed to create signing key: {:?}", e);
+13 -11
crates/tranquil-api/src/identity/plc/submit.rs
··· 1 - use tranquil_pds::api::error::DbResultExt; 2 - use tranquil_pds::api::{ApiError, EmptyResponse}; 3 - use tranquil_pds::auth::{Auth, Permissive}; 4 - use tranquil_pds::circuit_breaker::with_circuit_breaker; 5 - use tranquil_pds::plc::{signing_key_to_did_key, validate_plc_operation}; 6 - use tranquil_pds::state::AppState; 7 1 use axum::{ 8 2 Json, 9 3 extract::State, ··· 13 7 use serde::Deserialize; 14 8 use serde_json::Value; 15 9 use tracing::{error, info, warn}; 10 + use tranquil_pds::api::error::DbResultExt; 11 + use tranquil_pds::api::{ApiError, EmptyResponse}; 12 + use tranquil_pds::auth::{Auth, Permissive}; 13 + use tranquil_pds::circuit_breaker::with_circuit_breaker; 14 + use tranquil_pds::plc::{signing_key_to_did_key, validate_plc_operation}; 15 + use tranquil_pds::state::AppState; 16 16 17 17 #[derive(Debug, Deserialize)] 18 18 pub struct SubmitPlcOperationInput { ··· 57 57 .log_db_err("fetching user key")? 58 58 .ok_or_else(|| ApiError::InternalError(Some("User signing key not found".into())))?; 59 59 60 - let key_bytes = tranquil_pds::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) 61 - .map_err(|e| { 62 - error!("Failed to decrypt user key: {}", e); 63 - ApiError::InternalError(None) 64 - })?; 60 + let key_bytes = 61 + tranquil_pds::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version).map_err( 62 + |e| { 63 + error!("Failed to decrypt user key: {}", e); 64 + ApiError::InternalError(None) 65 + }, 66 + )?; 65 67 66 68 let signing_key = SigningKey::from_slice(&key_bytes).map_err(|e| { 67 69 error!("Failed to create signing key: {:?}", e);
+19 -14
crates/tranquil-api/src/identity/provision.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::repo_ops::create_signed_commit; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::Did; 5 1 use jacquard_common::types::{integer::LimitedU32, string::Tid}; 6 2 use jacquard_repo::{mst::Mst, storage::BlockStore}; 7 3 use k256::ecdsa::SigningKey; 8 4 use std::sync::Arc; 5 + use tranquil_pds::api::error::ApiError; 6 + use tranquil_pds::repo_ops::create_signed_commit; 7 + use tranquil_pds::state::AppState; 8 + use tranquil_pds::types::Did; 9 9 10 10 pub struct PlcDidResult { 11 11 pub did: Did, ··· 50 50 .clone() 51 51 .unwrap_or_else(|| tranquil_pds::plc::signing_key_to_did_key(signing_key)); 52 52 53 - let genesis_result = 54 - tranquil_pds::plc::create_genesis_operation(signing_key, &rotation_key, handle, &pds_endpoint) 55 - .map_err(|e| { 56 - tracing::error!("Error creating PLC genesis operation: {:?}", e); 57 - ApiError::InternalError(Some("Failed to create PLC operation".into())) 58 - })?; 53 + let genesis_result = tranquil_pds::plc::create_genesis_operation( 54 + signing_key, 55 + &rotation_key, 56 + handle, 57 + &pds_endpoint, 58 + ) 59 + .map_err(|e| { 60 + tracing::error!("Error creating PLC genesis operation: {:?}", e); 61 + ApiError::InternalError(Some("Failed to create PLC operation".into())) 62 + })?; 59 63 60 64 state 61 65 .plc_client() ··· 84 88 signing_key: &SigningKey, 85 89 signing_key_bytes: &[u8], 86 90 ) -> Result<GenesisRepo, ApiError> { 87 - let encrypted_key_bytes = tranquil_pds::config::encrypt_key(signing_key_bytes).map_err(|e| { 88 - tracing::error!("Error encrypting signing key: {:?}", e); 89 - ApiError::InternalError(None) 90 - })?; 91 + let encrypted_key_bytes = 92 + tranquil_pds::config::encrypt_key(signing_key_bytes).map_err(|e| { 93 + tracing::error!("Error encrypting signing key: {:?}", e); 94 + ApiError::InternalError(None) 95 + })?; 91 96 92 97 let mst = Mst::new(Arc::new(state.block_store.clone())); 93 98 let mst_root = mst.persist().await.map_err(|e| {
+304 -109
crates/tranquil-api/src/lib.rs
··· 1 1 pub mod actor; 2 2 pub mod admin; 3 3 pub mod age_assurance; 4 - pub mod backup; 5 4 pub mod delegation; 6 5 pub mod discord_webhook; 7 6 pub mod identity; ··· 32 31 "/com.atproto.server.createSession", 33 32 post(server::create_session), 34 33 ) 35 - .route( 36 - "/com.atproto.server.getSession", 37 - get(server::get_session), 38 - ) 34 + .route("/com.atproto.server.getSession", get(server::get_session)) 39 35 .route("/_account.listSessions", get(server::list_sessions)) 40 36 .route("/_account.revokeSession", post(server::revoke_session)) 41 37 .route( ··· 66 62 "/com.atproto.identity.resolveHandle", 67 63 get(identity::resolve_handle), 68 64 ) 69 - .route( 70 - "/com.atproto.repo.createRecord", 71 - post(repo::create_record), 72 - ) 65 + .route("/com.atproto.repo.createRecord", post(repo::create_record)) 73 66 .route("/com.atproto.repo.putRecord", post(repo::put_record)) 74 67 .route("/com.atproto.repo.getRecord", get(repo::get_record)) 75 - .route( 76 - "/com.atproto.repo.deleteRecord", 77 - post(repo::delete_record), 78 - ) 79 - .route( 80 - "/com.atproto.repo.listRecords", 81 - get(repo::list_records), 82 - ) 83 - .route( 84 - "/com.atproto.repo.describeRepo", 85 - get(repo::describe_repo), 86 - ) 68 + .route("/com.atproto.repo.deleteRecord", post(repo::delete_record)) 69 + .route("/com.atproto.repo.listRecords", get(repo::list_records)) 70 + .route("/com.atproto.repo.describeRepo", get(repo::describe_repo)) 87 71 .route("/com.atproto.repo.uploadBlob", post(repo::upload_blob)) 88 - .route( 89 - "/com.atproto.repo.applyWrites", 90 - post(repo::apply_writes), 91 - ) 72 + .route("/com.atproto.repo.applyWrites", post(repo::apply_writes)) 92 73 .route( 93 74 "/com.atproto.server.checkAccountStatus", 94 75 get(server::check_account_status), ··· 144 125 .route("/_account.changePassword", post(server::change_password)) 145 126 .route("/_account.removePassword", post(server::remove_password)) 146 127 .route("/_account.setPassword", post(server::set_password)) 147 - .route("/_account.getPasswordStatus", get(server::get_password_status)) 128 + .route( 129 + "/_account.getPasswordStatus", 130 + get(server::get_password_status), 131 + ) 148 132 .route("/_account.getReauthStatus", get(server::get_reauth_status)) 149 133 .route("/_account.reauthPassword", post(server::reauth_password)) 150 134 .route("/_account.reauthTotp", post(server::reauth_totp)) 151 - .route("/_account.reauthPasskeyStart", post(server::reauth_passkey_start)) 152 - .route("/_account.reauthPasskeyFinish", post(server::reauth_passkey_finish)) 153 - .route("/_account.getLegacyLoginPreference", get(server::get_legacy_login_preference)) 154 - .route("/_account.updateLegacyLoginPreference", post(server::update_legacy_login_preference)) 135 + .route( 136 + "/_account.reauthPasskeyStart", 137 + post(server::reauth_passkey_start), 138 + ) 139 + .route( 140 + "/_account.reauthPasskeyFinish", 141 + post(server::reauth_passkey_finish), 142 + ) 143 + .route( 144 + "/_account.getLegacyLoginPreference", 145 + get(server::get_legacy_login_preference), 146 + ) 147 + .route( 148 + "/_account.updateLegacyLoginPreference", 149 + post(server::update_legacy_login_preference), 150 + ) 155 151 .route("/_account.updateLocale", post(server::update_locale)) 156 - .route("/_account.listTrustedDevices", get(server::list_trusted_devices)) 157 - .route("/_account.revokeTrustedDevice", post(server::revoke_trusted_device)) 158 - .route("/_account.updateTrustedDevice", post(server::update_trusted_device)) 159 - .route("/_account.createPasskeyAccount", post(server::create_passkey_account)) 160 - .route("/_account.startPasskeyRegistrationForSetup", post(server::start_passkey_registration_for_setup)) 161 - .route("/_account.completePasskeySetup", post(server::complete_passkey_setup)) 162 - .route("/_account.requestPasskeyRecovery", post(server::request_passkey_recovery)) 163 - .route("/_account.recoverPasskeyAccount", post(server::recover_passkey_account)) 164 - .route("/_account.updateDidDocument", post(server::update_did_document)) 152 + .route( 153 + "/_account.listTrustedDevices", 154 + get(server::list_trusted_devices), 155 + ) 156 + .route( 157 + "/_account.revokeTrustedDevice", 158 + post(server::revoke_trusted_device), 159 + ) 160 + .route( 161 + "/_account.updateTrustedDevice", 162 + post(server::update_trusted_device), 163 + ) 164 + .route( 165 + "/_account.createPasskeyAccount", 166 + post(server::create_passkey_account), 167 + ) 168 + .route( 169 + "/_account.startPasskeyRegistrationForSetup", 170 + post(server::start_passkey_registration_for_setup), 171 + ) 172 + .route( 173 + "/_account.completePasskeySetup", 174 + post(server::complete_passkey_setup), 175 + ) 176 + .route( 177 + "/_account.requestPasskeyRecovery", 178 + post(server::request_passkey_recovery), 179 + ) 180 + .route( 181 + "/_account.recoverPasskeyAccount", 182 + post(server::recover_passkey_account), 183 + ) 184 + .route( 185 + "/_account.updateDidDocument", 186 + post(server::update_did_document), 187 + ) 165 188 .route("/_account.getDidDocument", get(server::get_did_document)) 166 - .route("/com.atproto.server.requestEmailUpdate", post(server::request_email_update)) 189 + .route( 190 + "/com.atproto.server.requestEmailUpdate", 191 + post(server::request_email_update), 192 + ) 167 193 .route("/_checkEmailVerified", post(server::check_email_verified)) 168 - .route("/_checkChannelVerified", post(server::check_channel_verified)) 169 - .route("/com.atproto.server.confirmEmail", post(server::confirm_email)) 170 - .route("/com.atproto.server.updateEmail", post(server::update_email)) 171 - .route("/_account.authorizeEmailUpdate", get(server::authorize_email_update)) 172 - .route("/_account.checkEmailUpdateStatus", get(server::check_email_update_status)) 173 - .route("/_account.checkEmailInUse", post(server::check_email_in_use)) 174 - .route("/_account.checkCommsChannelInUse", post(server::check_comms_channel_in_use)) 175 - .route("/com.atproto.server.reserveSigningKey", post(server::reserve_signing_key)) 176 - .route("/com.atproto.server.verifyMigrationEmail", post(server::verify_migration_email)) 177 - .route("/com.atproto.server.resendMigrationVerification", post(server::resend_migration_verification)) 178 - .route("/com.atproto.identity.updateHandle", post(identity::update_handle)) 179 - .route("/com.atproto.identity.requestPlcOperationSignature", post(identity::request_plc_operation_signature)) 180 - .route("/com.atproto.identity.signPlcOperation", post(identity::sign_plc_operation)) 181 - .route("/com.atproto.identity.submitPlcOperation", post(identity::submit_plc_operation)) 182 - .route("/_identity.verifyHandleOwnership", post(identity::verify_handle_ownership)) 194 + .route( 195 + "/_checkChannelVerified", 196 + post(server::check_channel_verified), 197 + ) 198 + .route( 199 + "/com.atproto.server.confirmEmail", 200 + post(server::confirm_email), 201 + ) 202 + .route( 203 + "/com.atproto.server.updateEmail", 204 + post(server::update_email), 205 + ) 206 + .route( 207 + "/_account.authorizeEmailUpdate", 208 + get(server::authorize_email_update), 209 + ) 210 + .route( 211 + "/_account.checkEmailUpdateStatus", 212 + get(server::check_email_update_status), 213 + ) 214 + .route( 215 + "/_account.checkEmailInUse", 216 + post(server::check_email_in_use), 217 + ) 218 + .route( 219 + "/_account.checkCommsChannelInUse", 220 + post(server::check_comms_channel_in_use), 221 + ) 222 + .route( 223 + "/com.atproto.server.reserveSigningKey", 224 + post(server::reserve_signing_key), 225 + ) 226 + .route( 227 + "/com.atproto.server.verifyMigrationEmail", 228 + post(server::verify_migration_email), 229 + ) 230 + .route( 231 + "/com.atproto.server.resendMigrationVerification", 232 + post(server::resend_migration_verification), 233 + ) 234 + .route( 235 + "/com.atproto.identity.updateHandle", 236 + post(identity::update_handle), 237 + ) 238 + .route( 239 + "/com.atproto.identity.requestPlcOperationSignature", 240 + post(identity::request_plc_operation_signature), 241 + ) 242 + .route( 243 + "/com.atproto.identity.signPlcOperation", 244 + post(identity::sign_plc_operation), 245 + ) 246 + .route( 247 + "/com.atproto.identity.submitPlcOperation", 248 + post(identity::submit_plc_operation), 249 + ) 250 + .route( 251 + "/_identity.verifyHandleOwnership", 252 + post(identity::verify_handle_ownership), 253 + ) 183 254 .route("/com.atproto.repo.importRepo", post(repo::import_repo)) 184 - .route("/com.atproto.admin.deleteAccount", post(admin::delete_account)) 185 - .route("/com.atproto.admin.updateAccountEmail", post(admin::update_account_email)) 186 - .route("/com.atproto.admin.updateAccountHandle", post(admin::update_account_handle)) 187 - .route("/com.atproto.admin.updateAccountPassword", post(admin::update_account_password)) 188 - .route("/com.atproto.server.listAppPasswords", get(server::list_app_passwords)) 189 - .route("/com.atproto.server.createAppPassword", post(server::create_app_password)) 190 - .route("/com.atproto.server.revokeAppPassword", post(server::revoke_app_password)) 191 - .route("/com.atproto.server.createInviteCode", post(server::create_invite_code)) 192 - .route("/com.atproto.server.createInviteCodes", post(server::create_invite_codes)) 193 - .route("/com.atproto.server.getAccountInviteCodes", get(server::get_account_invite_codes)) 194 - .route("/com.atproto.server.createTotpSecret", post(server::create_totp_secret)) 255 + .route( 256 + "/com.atproto.admin.deleteAccount", 257 + post(admin::delete_account), 258 + ) 259 + .route( 260 + "/com.atproto.admin.updateAccountEmail", 261 + post(admin::update_account_email), 262 + ) 263 + .route( 264 + "/com.atproto.admin.updateAccountHandle", 265 + post(admin::update_account_handle), 266 + ) 267 + .route( 268 + "/com.atproto.admin.updateAccountPassword", 269 + post(admin::update_account_password), 270 + ) 271 + .route( 272 + "/com.atproto.server.listAppPasswords", 273 + get(server::list_app_passwords), 274 + ) 275 + .route( 276 + "/com.atproto.server.createAppPassword", 277 + post(server::create_app_password), 278 + ) 279 + .route( 280 + "/com.atproto.server.revokeAppPassword", 281 + post(server::revoke_app_password), 282 + ) 283 + .route( 284 + "/com.atproto.server.createInviteCode", 285 + post(server::create_invite_code), 286 + ) 287 + .route( 288 + "/com.atproto.server.createInviteCodes", 289 + post(server::create_invite_codes), 290 + ) 291 + .route( 292 + "/com.atproto.server.getAccountInviteCodes", 293 + get(server::get_account_invite_codes), 294 + ) 295 + .route( 296 + "/com.atproto.server.createTotpSecret", 297 + post(server::create_totp_secret), 298 + ) 195 299 .route("/com.atproto.server.enableTotp", post(server::enable_totp)) 196 - .route("/com.atproto.server.disableTotp", post(server::disable_totp)) 197 - .route("/com.atproto.server.getTotpStatus", get(server::get_totp_status)) 198 - .route("/com.atproto.server.regenerateBackupCodes", post(server::regenerate_backup_codes)) 199 - .route("/com.atproto.server.startPasskeyRegistration", post(server::start_passkey_registration)) 200 - .route("/com.atproto.server.finishPasskeyRegistration", post(server::finish_passkey_registration)) 201 - .route("/com.atproto.server.listPasskeys", get(server::list_passkeys)) 202 - .route("/com.atproto.server.deletePasskey", post(server::delete_passkey)) 203 - .route("/com.atproto.server.updatePasskey", post(server::update_passkey)) 204 - .route("/com.atproto.admin.getInviteCodes", get(admin::get_invite_codes)) 300 + .route( 301 + "/com.atproto.server.disableTotp", 302 + post(server::disable_totp), 303 + ) 304 + .route( 305 + "/com.atproto.server.getTotpStatus", 306 + get(server::get_totp_status), 307 + ) 308 + .route( 309 + "/com.atproto.server.regenerateBackupCodes", 310 + post(server::regenerate_backup_codes), 311 + ) 312 + .route( 313 + "/com.atproto.server.startPasskeyRegistration", 314 + post(server::start_passkey_registration), 315 + ) 316 + .route( 317 + "/com.atproto.server.finishPasskeyRegistration", 318 + post(server::finish_passkey_registration), 319 + ) 320 + .route( 321 + "/com.atproto.server.listPasskeys", 322 + get(server::list_passkeys), 323 + ) 324 + .route( 325 + "/com.atproto.server.deletePasskey", 326 + post(server::delete_passkey), 327 + ) 328 + .route( 329 + "/com.atproto.server.updatePasskey", 330 + post(server::update_passkey), 331 + ) 332 + .route( 333 + "/com.atproto.admin.getInviteCodes", 334 + get(admin::get_invite_codes), 335 + ) 205 336 .route("/_admin.getServerStats", get(admin::get_server_stats)) 206 337 .route("/_server.getConfig", get(admin::get_server_config)) 207 - .route("/_admin.updateServerConfig", post(admin::update_server_config)) 208 - .route("/com.atproto.admin.disableAccountInvites", post(admin::disable_account_invites)) 209 - .route("/com.atproto.admin.enableAccountInvites", post(admin::enable_account_invites)) 210 - .route("/com.atproto.admin.disableInviteCodes", post(admin::disable_invite_codes)) 211 - .route("/com.atproto.admin.getSubjectStatus", get(admin::get_subject_status)) 212 - .route("/com.atproto.admin.updateSubjectStatus", post(admin::update_subject_status)) 338 + .route( 339 + "/_admin.updateServerConfig", 340 + post(admin::update_server_config), 341 + ) 342 + .route( 343 + "/com.atproto.admin.disableAccountInvites", 344 + post(admin::disable_account_invites), 345 + ) 346 + .route( 347 + "/com.atproto.admin.enableAccountInvites", 348 + post(admin::enable_account_invites), 349 + ) 350 + .route( 351 + "/com.atproto.admin.disableInviteCodes", 352 + post(admin::disable_invite_codes), 353 + ) 354 + .route( 355 + "/com.atproto.admin.getSubjectStatus", 356 + get(admin::get_subject_status), 357 + ) 358 + .route( 359 + "/com.atproto.admin.updateSubjectStatus", 360 + post(admin::update_subject_status), 361 + ) 213 362 .route("/com.atproto.admin.sendEmail", post(admin::send_email)) 214 - .route("/app.bsky.actor.getPreferences", get(actor::get_preferences)) 215 - .route("/app.bsky.actor.putPreferences", post(actor::put_preferences)) 216 - .route("/com.atproto.temp.checkSignupQueue", get(temp::check_signup_queue)) 217 - .route("/com.atproto.temp.dereferenceScope", post(temp::dereference_scope)) 218 - .route("/_account.getNotificationPrefs", get(notification_prefs::get_notification_prefs)) 219 - .route("/_account.updateNotificationPrefs", post(notification_prefs::update_notification_prefs)) 220 - .route("/_account.getNotificationHistory", get(notification_prefs::get_notification_history)) 221 - .route("/_account.confirmChannelVerification", post(verification::confirm_channel_verification)) 363 + .route( 364 + "/app.bsky.actor.getPreferences", 365 + get(actor::get_preferences), 366 + ) 367 + .route( 368 + "/app.bsky.actor.putPreferences", 369 + post(actor::put_preferences), 370 + ) 371 + .route( 372 + "/com.atproto.temp.checkSignupQueue", 373 + get(temp::check_signup_queue), 374 + ) 375 + .route( 376 + "/com.atproto.temp.dereferenceScope", 377 + post(temp::dereference_scope), 378 + ) 379 + .route( 380 + "/_account.getNotificationPrefs", 381 + get(notification_prefs::get_notification_prefs), 382 + ) 383 + .route( 384 + "/_account.updateNotificationPrefs", 385 + post(notification_prefs::update_notification_prefs), 386 + ) 387 + .route( 388 + "/_account.getNotificationHistory", 389 + get(notification_prefs::get_notification_history), 390 + ) 391 + .route( 392 + "/_account.confirmChannelVerification", 393 + post(verification::confirm_channel_verification), 394 + ) 222 395 .route("/_account.verifyToken", post(server::verify_token)) 223 - .route("/_delegation.listControllers", get(delegation::list_controllers)) 224 - .route("/_delegation.addController", post(delegation::add_controller)) 225 - .route("/_delegation.removeController", post(delegation::remove_controller)) 226 - .route("/_delegation.updateControllerScopes", post(delegation::update_controller_scopes)) 227 - .route("/_delegation.listControlledAccounts", get(delegation::list_controlled_accounts)) 396 + .route( 397 + "/_delegation.listControllers", 398 + get(delegation::list_controllers), 399 + ) 400 + .route( 401 + "/_delegation.addController", 402 + post(delegation::add_controller), 403 + ) 404 + .route( 405 + "/_delegation.removeController", 406 + post(delegation::remove_controller), 407 + ) 408 + .route( 409 + "/_delegation.updateControllerScopes", 410 + post(delegation::update_controller_scopes), 411 + ) 412 + .route( 413 + "/_delegation.listControlledAccounts", 414 + get(delegation::list_controlled_accounts), 415 + ) 228 416 .route("/_delegation.getAuditLog", get(delegation::get_audit_log)) 229 - .route("/_delegation.getScopePresets", get(delegation::get_scope_presets)) 230 - .route("/_delegation.createDelegatedAccount", post(delegation::create_delegated_account)) 231 - .route("/_delegation.resolveController", get(delegation::resolve_controller)) 232 - .route("/_backup.listBackups", get(backup::list_backups)) 233 - .route("/_backup.getBackup", get(backup::get_backup)) 234 - .route("/_backup.createBackup", post(backup::create_backup)) 235 - .route("/_backup.deleteBackup", post(backup::delete_backup)) 236 - .route("/_backup.setEnabled", post(backup::set_backup_enabled)) 237 - .route("/_backup.exportBlobs", get(backup::export_blobs)) 238 - .route("/app.bsky.ageassurance.getState", get(age_assurance::get_state)) 239 - .route("/app.bsky.unspecced.getAgeAssuranceState", get(age_assurance::get_age_assurance_state)) 417 + .route( 418 + "/_delegation.getScopePresets", 419 + get(delegation::get_scope_presets), 420 + ) 421 + .route( 422 + "/_delegation.createDelegatedAccount", 423 + post(delegation::create_delegated_account), 424 + ) 425 + .route( 426 + "/_delegation.resolveController", 427 + get(delegation::resolve_controller), 428 + ) 429 + .route( 430 + "/app.bsky.ageassurance.getState", 431 + get(age_assurance::get_state), 432 + ) 433 + .route( 434 + "/app.bsky.unspecced.getAgeAssuranceState", 435 + get(age_assurance::get_age_assurance_state), 436 + ) 240 437 } 241 438 242 439 pub fn well_known_api_routes() -> axum::Router<AppState> { ··· 253 450 axum::Router::new() 254 451 .route( 255 452 "/webhook/telegram", 256 - post(telegram_webhook::handle_telegram_webhook) 257 - .layer(DefaultBodyLimit::max(64 * 1024)), 453 + post(telegram_webhook::handle_telegram_webhook).layer(DefaultBodyLimit::max(64 * 1024)), 258 454 ) 259 455 .route( 260 456 "/webhook/discord", 261 - post(discord_webhook::handle_discord_webhook) 262 - .layer(DefaultBodyLimit::max(64 * 1024)), 457 + post(discord_webhook::handle_discord_webhook).layer(DefaultBodyLimit::max(64 * 1024)), 263 458 ) 264 459 } 265 460
+4 -4
crates/tranquil-api/src/moderation/mod.rs
··· 1 - use tranquil_pds::api::ApiError; 2 - use tranquil_pds::api::proxy_client::{is_ssrf_safe, proxy_client}; 3 - use tranquil_pds::auth::{AnyUser, Auth}; 4 - use tranquil_pds::state::AppState; 5 1 use axum::{ 6 2 Json, 7 3 extract::State, ··· 11 7 use serde::{Deserialize, Serialize}; 12 8 use serde_json::{Value, json}; 13 9 use tracing::{error, info}; 10 + use tranquil_pds::api::ApiError; 11 + use tranquil_pds::api::proxy_client::{is_ssrf_safe, proxy_client}; 12 + use tranquil_pds::auth::{AnyUser, Auth}; 13 + use tranquil_pds::state::AppState; 14 14 15 15 #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 16 16 pub enum ReportReasonType {
+6 -5
crates/tranquil-api/src/notification_prefs.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::auth::{Active, Auth}; 3 - use tranquil_pds::state::AppState; 4 1 use axum::{ 5 2 Json, 6 3 extract::State, ··· 10 7 use serde_json::json; 11 8 use tracing::info; 12 9 use tranquil_db_traits::{CommsChannel, CommsStatus, CommsType}; 10 + use tranquil_pds::api::error::ApiError; 11 + use tranquil_pds::auth::{Active, Auth}; 12 + use tranquil_pds::state::AppState; 13 13 use tranquil_types::Did; 14 14 15 15 #[derive(Serialize)] ··· 141 141 identifier: &str, 142 142 handle: Option<&str>, 143 143 ) -> Result<String, ApiError> { 144 - let token = 145 - tranquil_pds::auth::verification_token::generate_channel_update_token(did, channel, identifier); 144 + let token = tranquil_pds::auth::verification_token::generate_channel_update_token( 145 + did, channel, identifier, 146 + ); 146 147 let formatted_token = tranquil_pds::auth::verification_token::format_token_for_display(&token); 147 148 148 149 match channel {
+6 -6
crates/tranquil-api/src/repo/blob.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::auth::{Auth, AuthAny, NotTakendown, Permissive, VerifyScope}; 3 - use tranquil_pds::delegation::DelegationActionType; 4 - use tranquil_pds::state::AppState; 5 - use tranquil_pds::types::{CidLink, Did}; 6 - use tranquil_pds::util::get_header_str; 7 1 use axum::body::Body; 8 2 use axum::{ 9 3 Json, ··· 19 13 use serde_json::json; 20 14 use std::pin::Pin; 21 15 use tracing::{debug, error, info, warn}; 16 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 17 + use tranquil_pds::auth::{Auth, AuthAny, NotTakendown, Permissive, VerifyScope}; 18 + use tranquil_pds::delegation::DelegationActionType; 19 + use tranquil_pds::state::AppState; 20 + use tranquil_pds::types::{CidLink, Did}; 21 + use tranquil_pds::util::get_header_str; 22 22 23 23 fn detect_mime_type(data: &[u8], client_hint: &str) -> String { 24 24 if let Some(kind) = infer::get(data) {
+11 -11
crates/tranquil-api/src/repo/import.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::repo_ops::create_signed_commit; 4 - use tranquil_pds::auth::{Auth, NotTakendown}; 5 - use tranquil_pds::state::AppState; 6 - use tranquil_pds::sync::import::{ImportError, apply_import, parse_car}; 7 - use tranquil_pds::sync::verify::CarVerifier; 8 - use tranquil_pds::types::Did; 9 1 use axum::{ 10 2 body::Bytes, 11 3 extract::State, ··· 16 8 use k256::ecdsa::SigningKey; 17 9 use serde_json::json; 18 10 use tracing::{debug, error, info, warn}; 11 + use tranquil_pds::api::EmptyResponse; 12 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 13 + use tranquil_pds::auth::{Auth, NotTakendown}; 14 + use tranquil_pds::repo_ops::create_signed_commit; 15 + use tranquil_pds::state::AppState; 16 + use tranquil_pds::sync::import::{ImportError, apply_import, parse_car}; 17 + use tranquil_pds::sync::verify::CarVerifier; 18 + use tranquil_pds::types::Did; 19 19 use tranquil_types::{AtUri, CidLink}; 20 20 21 21 pub async fn import_repo( ··· 266 266 let key_bytes = 267 267 tranquil_pds::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version) 268 268 .map_err(|e| { 269 - error!("Failed to decrypt signing key: {}", e); 270 - ApiError::InternalError(None) 271 - })?; 269 + error!("Failed to decrypt signing key: {}", e); 270 + ApiError::InternalError(None) 271 + })?; 272 272 let signing_key = SigningKey::from_slice(&key_bytes).map_err(|e| { 273 273 error!("Invalid signing key: {:?}", e); 274 274 ApiError::InternalError(None)
+3 -3
crates/tranquil-api/src/repo/meta.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::state::AppState; 3 - use tranquil_pds::types::AtIdentifier; 4 1 use axum::{ 5 2 Json, 6 3 extract::{Query, State}, ··· 8 5 }; 9 6 use serde::Deserialize; 10 7 use serde_json::json; 8 + use tranquil_pds::api::error::ApiError; 9 + use tranquil_pds::state::AppState; 10 + use tranquil_pds::types::AtIdentifier; 11 11 12 12 #[derive(Deserialize)] 13 13 pub struct DescribeRepoInput {
+11 -11
crates/tranquil-api/src/repo/record/batch.rs
··· 1 1 use super::validation::validate_record_with_status; 2 2 use super::validation_mode::{ValidationMode, deserialize_validation_mode}; 3 - use tranquil_pds::api::error::ApiError; 4 3 use crate::repo::record::utils::{CommitParams, RecordOp, commit_and_log, extract_blob_cids}; 5 - use tranquil_pds::auth::{ 6 - Active, Auth, WriteOpKind, require_not_migrated, require_verified_or_delegated, 7 - verify_batch_write_scopes, 8 - }; 9 - use tranquil_pds::cid_types::CommitCid; 10 - use tranquil_pds::delegation::DelegationActionType; 11 - use tranquil_pds::repo::tracking::TrackingBlockStore; 12 - use tranquil_pds::state::AppState; 13 - use tranquil_pds::types::{AtIdentifier, AtUri, Did, Nsid, Rkey}; 14 - use tranquil_pds::validation::ValidationStatus; 15 4 use axum::{ 16 5 Json, 17 6 extract::State, ··· 25 14 use std::str::FromStr; 26 15 use std::sync::Arc; 27 16 use tracing::info; 17 + use tranquil_pds::api::error::ApiError; 18 + use tranquil_pds::auth::{ 19 + Active, Auth, WriteOpKind, require_not_migrated, require_verified_or_delegated, 20 + verify_batch_write_scopes, 21 + }; 22 + use tranquil_pds::cid_types::CommitCid; 23 + use tranquil_pds::delegation::DelegationActionType; 24 + use tranquil_pds::repo::tracking::TrackingBlockStore; 25 + use tranquil_pds::state::AppState; 26 + use tranquil_pds::types::{AtIdentifier, AtUri, Did, Nsid, Rkey}; 27 + use tranquil_pds::validation::ValidationStatus; 28 28 29 29 const MAX_BATCH_WRITES: usize = 200; 30 30
+7 -7
crates/tranquil-api/src/repo/record/delete.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 1 use crate::repo::record::utils::{ 3 2 CommitError, CommitParams, RecordOp, commit_and_log, get_current_root_cid, 4 3 }; 5 4 use crate::repo::record::write::{CommitInfo, prepare_repo_write}; 6 - use tranquil_pds::auth::{Active, Auth, VerifyScope}; 7 - use tranquil_pds::cid_types::CommitCid; 8 - use tranquil_pds::delegation::DelegationActionType; 9 - use tranquil_pds::repo::tracking::TrackingBlockStore; 10 - use tranquil_pds::state::AppState; 11 - use tranquil_pds::types::{AtIdentifier, AtUri, Nsid, Rkey}; 12 5 use axum::{ 13 6 Json, 14 7 extract::State, ··· 22 15 use std::str::FromStr; 23 16 use std::sync::Arc; 24 17 use tracing::error; 18 + use tranquil_pds::api::error::ApiError; 19 + use tranquil_pds::auth::{Active, Auth, VerifyScope}; 20 + use tranquil_pds::cid_types::CommitCid; 21 + use tranquil_pds::delegation::DelegationActionType; 22 + use tranquil_pds::repo::tracking::TrackingBlockStore; 23 + use tranquil_pds::state::AppState; 24 + use tranquil_pds::types::{AtIdentifier, AtUri, Nsid, Rkey}; 25 25 26 26 #[derive(Deserialize)] 27 27 pub struct DeleteRecordInput {
+3 -3
crates/tranquil-api/src/repo/record/read.rs
··· 1 1 use super::pagination::{PaginationDirection, deserialize_pagination_direction}; 2 - use tranquil_pds::api::error::ApiError; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::{AtIdentifier, Nsid, Rkey}; 5 2 use axum::{ 6 3 Json, 7 4 extract::{Query, State}, ··· 16 13 use serde_json::{Map, Value, json}; 17 14 use std::str::FromStr; 18 15 use tracing::error; 16 + use tranquil_pds::api::error::ApiError; 17 + use tranquil_pds::state::AppState; 18 + use tranquil_pds::types::{AtIdentifier, Nsid, Rkey}; 19 19 20 20 fn ipld_to_json(ipld: Ipld) -> Value { 21 21 match ipld {
+1 -1
crates/tranquil-api/src/repo/record/validation.rs
··· 1 + use axum::response::Response; 1 2 use tranquil_pds::api::error::ApiError; 2 3 use tranquil_pds::types::{Nsid, Rkey}; 3 4 use tranquil_pds::validation::{RecordValidator, ValidationError, ValidationStatus}; 4 - use axum::response::Response; 5 5 6 6 pub async fn validate_record_with_status( 7 7 record: &serde_json::Value,
+11 -11
crates/tranquil-api/src/repo/record/write.rs
··· 1 1 use super::validation::validate_record_with_status; 2 2 use super::validation_mode::{ValidationMode, deserialize_validation_mode}; 3 - use tranquil_pds::api::error::ApiError; 4 3 use crate::repo::record::utils::{ 5 4 CommitParams, RecordOp, commit_and_log, extract_backlinks, extract_blob_cids, 6 5 get_current_root_cid, 7 6 }; 8 - use tranquil_pds::auth::{ 9 - Active, Auth, AuthSource, RepoScopeAction, ScopeVerified, VerifyScope, require_not_migrated, 10 - require_verified_or_delegated, 11 - }; 12 - use tranquil_pds::cid_types::CommitCid; 13 - use tranquil_pds::delegation::DelegationActionType; 14 - use tranquil_pds::repo::tracking::TrackingBlockStore; 15 - use tranquil_pds::state::AppState; 16 - use tranquil_pds::types::{AtIdentifier, AtUri, Did, Nsid, Rkey}; 17 - use tranquil_pds::validation::ValidationStatus; 18 7 use axum::{ 19 8 Json, 20 9 extract::State, ··· 28 17 use std::str::FromStr; 29 18 use std::sync::Arc; 30 19 use tracing::error; 20 + use tranquil_pds::api::error::ApiError; 21 + use tranquil_pds::auth::{ 22 + Active, Auth, AuthSource, RepoScopeAction, ScopeVerified, VerifyScope, require_not_migrated, 23 + require_verified_or_delegated, 24 + }; 25 + use tranquil_pds::cid_types::CommitCid; 26 + use tranquil_pds::delegation::DelegationActionType; 27 + use tranquil_pds::repo::tracking::TrackingBlockStore; 28 + use tranquil_pds::state::AppState; 29 + use tranquil_pds::types::{AtIdentifier, AtUri, Did, Nsid, Rkey}; 30 + use tranquil_pds::validation::ValidationStatus; 31 31 use uuid::Uuid; 32 32 33 33 pub struct RepoWriteAuth {
+18 -15
crates/tranquil-api/src/server/account_status.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::auth::{Auth, NotTakendown, Permissive, require_legacy_session_mfa}; 4 - use tranquil_pds::cache::Cache; 5 - use tranquil_pds::plc::PlcClient; 6 - use tranquil_pds::state::AppState; 7 - use tranquil_pds::types::PlainPassword; 8 1 use axum::{ 9 2 Json, 10 3 extract::State, ··· 23 16 use std::sync::Arc; 24 17 use std::sync::atomic::{AtomicUsize, Ordering}; 25 18 use tracing::{error, info, warn}; 19 + use tranquil_pds::api::EmptyResponse; 20 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 21 + use tranquil_pds::auth::{Auth, NotTakendown, Permissive, require_legacy_session_mfa}; 22 + use tranquil_pds::cache::Cache; 23 + use tranquil_pds::plc::PlcClient; 24 + use tranquil_pds::state::AppState; 25 + use tranquil_pds::types::PlainPassword; 26 26 use uuid::Uuid; 27 27 28 28 #[derive(Serialize)] ··· 365 365 did 366 366 ); 367 367 if let Some(ref h) = handle { 368 - let _ = state.cache.delete(&tranquil_pds::cache_keys::handle_key(h)).await; 368 + let _ = state 369 + .cache 370 + .delete(&tranquil_pds::cache_keys::handle_key(h)) 371 + .await; 369 372 } 370 373 let _ = state 371 374 .cache ··· 404 407 did, handle 405 408 ); 406 409 let handle_typed = handle.clone(); 407 - if let Err(e) = tranquil_pds::repo_ops::sequence_identity_event( 408 - &state, 409 - &did, 410 - handle_typed.as_ref(), 411 - ) 412 - .await 410 + if let Err(e) = 411 + tranquil_pds::repo_ops::sequence_identity_event(&state, &did, handle_typed.as_ref()) 412 + .await 413 413 { 414 414 warn!( 415 415 "[MIGRATION] activateAccount: Failed to sequence identity event for activation: {}", ··· 507 507 match result { 508 508 Ok(true) => { 509 509 if let Some(ref h) = handle { 510 - let _ = state.cache.delete(&tranquil_pds::cache_keys::handle_key(h)).await; 510 + let _ = state 511 + .cache 512 + .delete(&tranquil_pds::cache_keys::handle_key(h)) 513 + .await; 511 514 } 512 515 if let Err(e) = tranquil_pds::repo_ops::sequence_account_event( 513 516 &state,
+6 -6
crates/tranquil-api/src/server/app_password.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::auth::{Auth, NotTakendown, Permissive, generate_app_password}; 4 - use tranquil_pds::delegation::{DelegationActionType, intersect_scopes}; 5 - use tranquil_pds::rate_limit::{AppPasswordLimit, RateLimited}; 6 - use tranquil_pds::state::AppState; 7 1 use axum::{ 8 2 Json, 9 3 extract::State, ··· 13 7 use serde_json::json; 14 8 use tracing::error; 15 9 use tranquil_db_traits::AppPasswordCreate; 10 + use tranquil_pds::api::EmptyResponse; 11 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 12 + use tranquil_pds::auth::{Auth, NotTakendown, Permissive, generate_app_password}; 13 + use tranquil_pds::delegation::{DelegationActionType, intersect_scopes}; 14 + use tranquil_pds::rate_limit::{AppPasswordLimit, RateLimited}; 15 + use tranquil_pds::state::AppState; 16 16 17 17 #[derive(Serialize)] 18 18 #[serde(rename_all = "camelCase")]
+8 -6
crates/tranquil-api/src/server/email.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::api::{EmptyResponse, TokenRequiredResponse, VerifiedResponse}; 3 - use tranquil_pds::auth::{Auth, NotTakendown}; 4 - use tranquil_pds::rate_limit::{EmailUpdateLimit, RateLimited, VerificationCheckLimit}; 5 - use tranquil_pds::state::AppState; 6 1 use axum::{ 7 2 Json, 8 3 extract::State, ··· 16 11 use subtle::ConstantTimeEq; 17 12 use tracing::{error, info, warn}; 18 13 use tranquil_db_traits::CommsChannel; 14 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 15 + use tranquil_pds::api::{EmptyResponse, TokenRequiredResponse, VerifiedResponse}; 16 + use tranquil_pds::auth::{Auth, NotTakendown}; 17 + use tranquil_pds::rate_limit::{EmailUpdateLimit, RateLimited, VerificationCheckLimit}; 18 + use tranquil_pds::state::AppState; 19 19 20 20 const EMAIL_UPDATE_TTL: Duration = Duration::from_secs(30 * 60); 21 21 ··· 470 470 } 471 471 }; 472 472 473 - if token_data.purpose != tranquil_pds::auth::verification_token::VerificationPurpose::ChannelUpdate { 473 + if token_data.purpose 474 + != tranquil_pds::auth::verification_token::VerificationPurpose::ChannelUpdate 475 + { 474 476 warn!( 475 477 "authorize_email_update: wrong purpose: {:?}", 476 478 token_data.purpose
+5 -5
crates/tranquil-api/src/server/invite.rs
··· 1 - use tranquil_pds::api::ApiError; 2 - use tranquil_pds::api::error::DbResultExt; 3 - use tranquil_pds::auth::{Admin, Auth, NotTakendown}; 4 - use tranquil_pds::state::AppState; 5 - use tranquil_pds::types::Did; 6 1 use axum::{ 7 2 Json, 8 3 extract::State, ··· 11 6 use rand::Rng; 12 7 use serde::{Deserialize, Serialize}; 13 8 use tracing::error; 9 + use tranquil_pds::api::ApiError; 10 + use tranquil_pds::api::error::DbResultExt; 11 + use tranquil_pds::auth::{Admin, Auth, NotTakendown}; 12 + use tranquil_pds::state::AppState; 13 + use tranquil_pds::types::Did; 14 14 15 15 const BASE32_ALPHABET: &[u8] = b"abcdefghijklmnopqrstuvwxyz234567"; 16 16
+1 -1
crates/tranquil-api/src/server/logo.rs
··· 1 - use tranquil_pds::state::AppState; 2 1 use axum::{ 3 2 body::Body, 4 3 extract::State, ··· 7 6 response::{IntoResponse, Response}, 8 7 }; 9 8 use tracing::error; 9 + use tranquil_pds::state::AppState; 10 10 11 11 pub async fn get_logo(State(state): State<AppState>) -> Response { 12 12 let logo_cid = match state.infra_repo.get_server_config("logo_cid").await {
+2 -2
crates/tranquil-api/src/server/meta.rs
··· 1 + use axum::{Json, extract::State, http::StatusCode, response::IntoResponse}; 2 + use serde_json::json; 1 3 use tranquil_pds::BUILD_VERSION; 2 4 use tranquil_pds::state::AppState; 3 5 use tranquil_pds::util::{discord_app_id, discord_bot_username, telegram_bot_username}; 4 - use axum::{Json, extract::State, http::StatusCode, response::IntoResponse}; 5 - use serde_json::json; 6 6 7 7 fn get_available_comms_channels() -> Vec<tranquil_db_traits::CommsChannel> { 8 8 use tranquil_db_traits::CommsChannel;
+11 -9
crates/tranquil-api/src/server/migration.rs
··· 1 - use tranquil_pds::api::ApiError; 2 - use tranquil_pds::api::error::DbResultExt; 3 - use tranquil_pds::auth::{Active, Auth}; 4 - use tranquil_pds::state::AppState; 5 1 use axum::{ 6 2 Json, 7 3 extract::State, ··· 10 6 }; 11 7 use serde::{Deserialize, Serialize}; 12 8 use serde_json::json; 9 + use tranquil_pds::api::ApiError; 10 + use tranquil_pds::api::error::DbResultExt; 11 + use tranquil_pds::auth::{Active, Auth}; 12 + use tranquil_pds::state::AppState; 13 13 14 14 #[derive(Debug, Clone, Serialize, Deserialize)] 15 15 #[serde(rename_all = "camelCase")] ··· 209 209 .flatten(); 210 210 211 211 let public_key_multibase = match key_info { 212 - Some(info) => match tranquil_pds::config::decrypt_key(&info.key_bytes, info.encryption_version) { 213 - Ok(key_bytes) => crate::identity::did::get_public_key_multibase(&key_bytes) 214 - .unwrap_or_else(|_| "error".to_string()), 215 - Err(_) => "error".to_string(), 216 - }, 212 + Some(info) => { 213 + match tranquil_pds::config::decrypt_key(&info.key_bytes, info.encryption_version) { 214 + Ok(key_bytes) => crate::identity::did::get_public_key_multibase(&key_bytes) 215 + .unwrap_or_else(|_| "error".to_string()), 216 + Err(_) => "error".to_string(), 217 + } 218 + } 217 219 None => "error".to_string(), 218 220 }; 219 221
+9 -11
crates/tranquil-api/src/server/passkey_account.rs
··· 1 - use tranquil_pds::api::SuccessResponse; 2 - use tranquil_pds::api::error::ApiError; 3 - use tranquil_pds::auth::NormalizedLoginIdentifier; 4 1 use axum::{ 5 2 Json, 6 3 extract::State, ··· 17 14 use std::sync::Arc; 18 15 use tracing::{debug, error, info, warn}; 19 16 use tranquil_db_traits::WebauthnChallengeType; 17 + use tranquil_pds::api::SuccessResponse; 18 + use tranquil_pds::api::error::ApiError; 19 + use tranquil_pds::auth::NormalizedLoginIdentifier; 20 20 use uuid::Uuid; 21 21 22 - use tranquil_pds::repo_ops::create_signed_commit; 23 22 use tranquil_pds::auth::{ServiceTokenVerifier, generate_app_password, is_service_token}; 24 23 use tranquil_pds::rate_limit::{AccountCreationLimit, PasswordResetLimit, RateLimited}; 24 + use tranquil_pds::repo_ops::create_signed_commit; 25 25 use tranquil_pds::state::AppState; 26 26 use tranquil_pds::types::{Did, Handle, PlainPassword}; 27 27 use tranquil_pds::validation::validate_password; ··· 324 324 } 325 325 }; 326 326 327 - let plc_client = tranquil_pds::plc::PlcClient::with_cache(None, Some(state.cache.clone())); 327 + let plc_client = 328 + tranquil_pds::plc::PlcClient::with_cache(None, Some(state.cache.clone())); 328 329 if let Err(e) = plc_client 329 330 .send_operation(&genesis_result.did, &genesis_result.signed_operation) 330 331 .await ··· 465 466 let user_id = create_result.user_id; 466 467 467 468 if !is_byod_did_web { 468 - if let Err(e) = tranquil_pds::repo_ops::sequence_identity_event( 469 - &state, 470 - &did_typed, 471 - Some(&handle_typed), 472 - ) 473 - .await 469 + if let Err(e) = 470 + tranquil_pds::repo_ops::sequence_identity_event(&state, &did_typed, Some(&handle_typed)) 471 + .await 474 472 { 475 473 warn!("Failed to sequence identity event for {}: {}", did, e); 476 474 }
+4 -4
crates/tranquil-api/src/server/passkeys.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 - use tranquil_pds::auth::{Active, Auth, require_legacy_session_mfa, require_reauth_window}; 4 - use tranquil_pds::state::AppState; 5 1 use axum::{ 6 2 Json, 7 3 extract::State, ··· 10 6 use serde::{Deserialize, Serialize}; 11 7 use tracing::{error, info, warn}; 12 8 use tranquil_db_traits::WebauthnChallengeType; 9 + use tranquil_pds::api::EmptyResponse; 10 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 11 + use tranquil_pds::auth::{Active, Auth, require_legacy_session_mfa, require_reauth_window}; 12 + use tranquil_pds::state::AppState; 13 13 use webauthn_rs::prelude::*; 14 14 15 15 #[derive(Deserialize)]
+9 -9
crates/tranquil-api/src/server/password.rs
··· 1 + use axum::{ 2 + Json, 3 + extract::State, 4 + response::{IntoResponse, Response}, 5 + }; 6 + use bcrypt::{DEFAULT_COST, hash}; 7 + use chrono::{Duration, Utc}; 8 + use serde::Deserialize; 9 + use tracing::{error, info, warn}; 1 10 use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 11 use tranquil_pds::api::{EmptyResponse, HasPasswordResponse, SuccessResponse}; 3 12 use tranquil_pds::auth::{ ··· 8 17 use tranquil_pds::state::AppState; 9 18 use tranquil_pds::types::PlainPassword; 10 19 use tranquil_pds::validation::validate_password; 11 - use axum::{ 12 - Json, 13 - extract::State, 14 - response::{IntoResponse, Response}, 15 - }; 16 - use bcrypt::{DEFAULT_COST, hash}; 17 - use chrono::{Duration, Utc}; 18 - use serde::Deserialize; 19 - use tracing::{error, info, warn}; 20 20 21 21 fn generate_reset_code() -> String { 22 22 tranquil_pds::util::generate_token_code()
+2 -3
crates/tranquil-api/src/server/reauth.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 1 use axum::{ 3 2 Json, 4 3 extract::State, ··· 9 8 use serde::{Deserialize, Serialize}; 10 9 use tracing::{error, info, warn}; 11 10 use tranquil_db_traits::{SessionRepository, UserRepository, WebauthnChallengeType}; 11 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 12 12 13 13 use tranquil_pds::auth::{Active, Auth}; 14 14 use tranquil_pds::rate_limit::{TotpVerifyLimit, check_user_rate_limit_with_message}; ··· 125 125 .await?; 126 126 127 127 let valid = 128 - crate::server::totp::verify_totp_or_backup_for_user(&state, &auth.did, &input.code) 129 - .await; 128 + crate::server::totp::verify_totp_or_backup_for_user(&state, &auth.did, &input.code).await; 130 129 131 130 if !valid { 132 131 warn!(did = %&auth.did, "Re-auth failed: invalid TOTP code");
+8 -5
crates/tranquil-api/src/server/service_auth.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::auth::extractor::{Auth, Permissive}; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::types::Did; 5 1 use axum::{ 6 2 Json, 7 3 extract::{Query, State}, ··· 12 8 use std::collections::HashSet; 13 9 use std::sync::LazyLock; 14 10 use tracing::{error, info, warn}; 11 + use tranquil_pds::api::error::ApiError; 12 + use tranquil_pds::auth::extractor::{Auth, Permissive}; 13 + use tranquil_pds::state::AppState; 14 + use tranquil_pds::types::Did; 15 15 use tranquil_types::Nsid; 16 16 17 17 static CREATE_ACCOUNT_NSID: LazyLock<Nsid> = ··· 75 75 match state.user_repo.get_user_info_by_did(&auth.did).await { 76 76 Ok(Some(info)) => match info.key_bytes { 77 77 Some(key_bytes_enc) => { 78 - match tranquil_pds::config::decrypt_key(&key_bytes_enc, info.encryption_version) { 78 + match tranquil_pds::config::decrypt_key( 79 + &key_bytes_enc, 80 + info.encryption_version, 81 + ) { 79 82 Ok(key) => key, 80 83 Err(e) => { 81 84 error!(error = ?e, "Failed to decrypt user key for service auth");
+60 -51
crates/tranquil-api/src/server/session.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::api::{EmptyResponse, SuccessResponse}; 3 - use tranquil_pds::auth::{ 4 - Active, Auth, NormalizedLoginIdentifier, Permissive, require_legacy_session_mfa, 5 - require_reauth_window, 6 - }; 7 - use tranquil_pds::rate_limit::{LoginLimit, RateLimited, RefreshSessionLimit}; 8 - use tranquil_pds::state::AppState; 9 - use tranquil_pds::types::{AccountState, Did, Handle, PlainPassword}; 10 1 use axum::{ 11 2 Json, 12 3 extract::State, ··· 18 9 use serde_json::json; 19 10 use tracing::{error, info, warn}; 20 11 use tranquil_db_traits::{SessionId, TokenFamilyId}; 12 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 13 + use tranquil_pds::api::{EmptyResponse, SuccessResponse}; 14 + use tranquil_pds::auth::{ 15 + Active, Auth, NormalizedLoginIdentifier, Permissive, require_legacy_session_mfa, 16 + require_reauth_window, 17 + }; 18 + use tranquil_pds::rate_limit::{LoginLimit, RateLimited, RefreshSessionLimit}; 19 + use tranquil_pds::state::AppState; 20 + use tranquil_pds::types::{AccountState, Did, Handle, PlainPassword}; 21 21 use tranquil_types::TokenId; 22 22 23 23 fn full_handle(stored_handle: &str, _pds_hostname: &str) -> String { ··· 93 93 return ApiError::InternalError(None).into_response(); 94 94 } 95 95 }; 96 - let key_bytes = match tranquil_pds::config::decrypt_key(&row.key_bytes, row.encryption_version) { 96 + let key_bytes = match tranquil_pds::config::decrypt_key(&row.key_bytes, row.encryption_version) 97 + { 97 98 Ok(k) => k, 98 99 Err(e) => { 99 100 error!("Failed to decrypt user key: {:?}", e); ··· 203 204 .await 204 205 { 205 206 error!("Failed to send 2FA code: {:?}", e); 206 - tranquil_pds::auth::legacy_2fa::clear_challenge(state.cache.as_ref(), &row.did).await; 207 + tranquil_pds::auth::legacy_2fa::clear_challenge(state.cache.as_ref(), &row.did) 208 + .await; 207 209 return ApiError::InternalError(Some( 208 210 "Failed to send verification code. Please try again.".into(), 209 211 )) ··· 261 263 return ApiError::InternalError(None).into_response(); 262 264 } 263 265 }; 264 - let refresh_meta = match tranquil_pds::auth::create_refresh_token_with_metadata(&row.did, &key_bytes) { 265 - Ok(m) => m, 266 - Err(e) => { 267 - error!("Failed to create refresh token: {:?}", e); 268 - return ApiError::InternalError(None).into_response(); 269 - } 270 - }; 266 + let refresh_meta = 267 + match tranquil_pds::auth::create_refresh_token_with_metadata(&row.did, &key_bytes) { 268 + Ok(m) => m, 269 + Err(e) => { 270 + error!("Failed to create refresh token: {:?}", e); 271 + return ApiError::InternalError(None).into_response(); 272 + } 273 + }; 271 274 let did_for_doc = row.did.clone(); 272 275 let did_resolver = state.did_resolver.clone(); 273 276 let session_data = tranquil_db_traits::SessionTokenCreate { ··· 406 409 headers: axum::http::HeaderMap, 407 410 _auth: Auth<Active>, 408 411 ) -> Result<Response, ApiError> { 409 - let extracted = tranquil_pds::auth::extract_auth_token_from_header(tranquil_pds::util::get_header_str( 410 - &headers, 411 - http::header::AUTHORIZATION, 412 - )) 412 + let extracted = tranquil_pds::auth::extract_auth_token_from_header( 413 + tranquil_pds::util::get_header_str(&headers, http::header::AUTHORIZATION), 414 + ) 413 415 .ok_or(ApiError::AuthenticationRequired)?; 414 416 let jti = tranquil_pds::auth::get_jti_from_token(&extracted.token) 415 417 .map_err(|_| ApiError::AuthenticationFailed(None))?; ··· 432 434 _rate_limit: RateLimited<RefreshSessionLimit>, 433 435 headers: axum::http::HeaderMap, 434 436 ) -> Response { 435 - let extracted = match tranquil_pds::auth::extract_auth_token_from_header(tranquil_pds::util::get_header_str( 436 - &headers, 437 - http::header::AUTHORIZATION, 438 - )) { 437 + let extracted = match tranquil_pds::auth::extract_auth_token_from_header( 438 + tranquil_pds::util::get_header_str(&headers, http::header::AUTHORIZATION), 439 + ) { 439 440 Some(t) => t, 440 441 None => return ApiError::AuthenticationRequired.into_response(), 441 442 }; ··· 500 501 return ApiError::InternalError(None).into_response(); 501 502 } 502 503 }; 503 - let new_refresh_meta = 504 - match tranquil_pds::auth::create_refresh_token_with_metadata(&session_row.did, &key_bytes) { 505 - Ok(m) => m, 506 - Err(e) => { 507 - error!("Failed to create refresh token: {:?}", e); 508 - return ApiError::InternalError(None).into_response(); 509 - } 510 - }; 504 + let new_refresh_meta = match tranquil_pds::auth::create_refresh_token_with_metadata( 505 + &session_row.did, 506 + &key_bytes, 507 + ) { 508 + Ok(m) => m, 509 + Err(e) => { 510 + error!("Failed to create refresh token: {:?}", e); 511 + return ApiError::InternalError(None).into_response(); 512 + } 513 + }; 511 514 let refresh_data = tranquil_db_traits::SessionRefreshData { 512 515 old_refresh_jti: refresh_jti.clone(), 513 516 session_id: session_row.id, ··· 668 671 } 669 672 } 670 673 671 - let key_bytes = match tranquil_pds::config::decrypt_key(&row.key_bytes, row.encryption_version) { 674 + let key_bytes = match tranquil_pds::config::decrypt_key(&row.key_bytes, row.encryption_version) 675 + { 672 676 Ok(k) => k, 673 677 Err(e) => { 674 678 error!("Failed to decrypt user key: {:?}", e); ··· 676 680 } 677 681 }; 678 682 679 - let access_meta = match tranquil_pds::auth::create_access_token_with_metadata(&row.did, &key_bytes) { 680 - Ok(m) => m, 681 - Err(e) => { 682 - error!("Failed to create access token: {:?}", e); 683 - return ApiError::InternalError(None).into_response(); 684 - } 685 - }; 686 - let refresh_meta = match tranquil_pds::auth::create_refresh_token_with_metadata(&row.did, &key_bytes) { 687 - Ok(m) => m, 688 - Err(e) => { 689 - error!("Failed to create refresh token: {:?}", e); 690 - return ApiError::InternalError(None).into_response(); 691 - } 692 - }; 683 + let access_meta = 684 + match tranquil_pds::auth::create_access_token_with_metadata(&row.did, &key_bytes) { 685 + Ok(m) => m, 686 + Err(e) => { 687 + error!("Failed to create access token: {:?}", e); 688 + return ApiError::InternalError(None).into_response(); 689 + } 690 + }; 691 + let refresh_meta = 692 + match tranquil_pds::auth::create_refresh_token_with_metadata(&row.did, &key_bytes) { 693 + Ok(m) => m, 694 + Err(e) => { 695 + error!("Failed to create refresh token: {:?}", e); 696 + return ApiError::InternalError(None).into_response(); 697 + } 698 + }; 693 699 694 700 if let Err(e) = state 695 701 .user_repo ··· 855 861 tranquil_db_traits::CommsChannel::Signal => row.signal_username.clone().unwrap_or_default(), 856 862 }; 857 863 858 - let verification_token = 859 - tranquil_pds::auth::verification_token::generate_signup_token(&input.did, row.channel, &recipient); 864 + let verification_token = tranquil_pds::auth::verification_token::generate_signup_token( 865 + &input.did, 866 + row.channel, 867 + &recipient, 868 + ); 860 869 let formatted_token = 861 870 tranquil_pds::auth::verification_token::format_token_for_display(&verification_token); 862 871
+2 -2
crates/tranquil-api/src/server/signing_key.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::state::AppState; 3 1 use axum::{ 4 2 Json, 5 3 extract::State, ··· 10 8 use k256::ecdsa::SigningKey; 11 9 use serde::{Deserialize, Serialize}; 12 10 use tracing::{error, info}; 11 + use tranquil_pds::api::error::ApiError; 12 + use tranquil_pds::state::AppState; 13 13 14 14 const SECP256K1_MULTICODEC_PREFIX: [u8; 2] = [0xe7, 0x01]; 15 15
+7 -7
crates/tranquil-api/src/server/totp.rs
··· 1 + use axum::{ 2 + Json, 3 + extract::State, 4 + response::{IntoResponse, Response}, 5 + }; 6 + use serde::{Deserialize, Serialize}; 7 + use tracing::{error, info, warn}; 1 8 use tranquil_pds::api::EmptyResponse; 2 9 use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 10 use tranquil_pds::auth::{ ··· 9 16 use tranquil_pds::rate_limit::{TotpVerifyLimit, check_user_rate_limit_with_message}; 10 17 use tranquil_pds::state::AppState; 11 18 use tranquil_pds::types::PlainPassword; 12 - use axum::{ 13 - Json, 14 - extract::State, 15 - response::{IntoResponse, Response}, 16 - }; 17 - use serde::{Deserialize, Serialize}; 18 - use tracing::{error, info, warn}; 19 19 20 20 const ENCRYPTION_VERSION: i32 = 1; 21 21
+2 -2
crates/tranquil-api/src/server/trusted_devices.rs
··· 1 - use tranquil_pds::api::SuccessResponse; 2 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 3 1 use axum::{ 4 2 Json, 5 3 extract::State, ··· 9 7 use serde::{Deserialize, Serialize}; 10 8 use tracing::{error, info}; 11 9 use tranquil_db_traits::OAuthRepository; 10 + use tranquil_pds::api::SuccessResponse; 11 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 12 12 use tranquil_types::DeviceId; 13 13 14 14 use tranquil_pds::auth::{Active, Auth};
+2 -2
crates/tranquil-api/src/server/verify_email.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::types::Did; 3 1 use axum::{Json, extract::State}; 4 2 use serde::{Deserialize, Serialize}; 5 3 use tracing::{info, warn}; 4 + use tranquil_pds::api::error::ApiError; 5 + use tranquil_pds::types::Did; 6 6 7 7 use tranquil_pds::state::AppState; 8 8
+4 -4
crates/tranquil-api/src/server/verify_token.rs
··· 1 - use tranquil_pds::api::error::{ApiError, DbResultExt}; 2 - use tranquil_pds::comms::comms_repo; 3 - use tranquil_pds::types::Did; 4 1 use axum::{Json, extract::State}; 5 2 use serde::{Deserialize, Serialize}; 6 3 use tracing::{info, warn}; 4 + use tranquil_pds::api::error::{ApiError, DbResultExt}; 5 + use tranquil_pds::comms::comms_repo; 6 + use tranquil_pds::types::Did; 7 7 8 + use tranquil_db_traits::CommsChannel; 8 9 use tranquil_pds::auth::verification_token::{ 9 10 VerificationPurpose, normalize_token_input, verify_token_signature, 10 11 }; 11 12 use tranquil_pds::state::AppState; 12 - use tranquil_db_traits::CommsChannel; 13 13 14 14 #[derive(Deserialize, Clone)] 15 15 #[serde(rename_all = "camelCase")]
+3 -3
crates/tranquil-api/src/temp.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::auth::{Active, Auth, Permissive}; 3 - use tranquil_pds::state::AppState; 4 1 use axum::{ 5 2 Json, 6 3 extract::State, ··· 10 7 use jacquard_repo::storage::BlockStore; 11 8 use serde::{Deserialize, Serialize}; 12 9 use std::str::FromStr; 10 + use tranquil_pds::api::error::ApiError; 11 + use tranquil_pds::auth::{Active, Auth, Permissive}; 12 + use tranquil_pds::state::AppState; 13 13 14 14 #[derive(Serialize)] 15 15 #[serde(rename_all = "camelCase")]
+2 -2
crates/tranquil-api/src/verification.rs
··· 1 - use tranquil_pds::api::SuccessResponse; 2 - use tranquil_pds::state::AppState; 3 1 use axum::{ 4 2 Json, 5 3 extract::State, 6 4 response::{IntoResponse, Response}, 7 5 }; 8 6 use serde::Deserialize; 7 + use tranquil_pds::api::SuccessResponse; 8 + use tranquil_pds::state::AppState; 9 9 10 10 #[derive(Deserialize)] 11 11 #[serde(rename_all = "camelCase")]
-51
crates/tranquil-config/src/lib.rs
··· 108 108 #[config(nested)] 109 109 pub storage: StorageConfig, 110 110 111 - #[config(nested)] 112 - pub backup: BackupConfig, 113 - 114 111 #[config(nested)] 115 112 pub cache: CacheConfig, 116 113 ··· 234 231 } 235 232 } 236 233 237 - // -- backup storage --------------------------------------------------- 238 - if self.backup.enabled { 239 - match self.backup.backend.as_str() { 240 - "s3" => { 241 - if self.backup.s3_bucket.is_none() { 242 - errors.push( 243 - "backup.backend is \"s3\" but backup.s3_bucket \ 244 - (BACKUP_S3_BUCKET) is not set" 245 - .to_string(), 246 - ); 247 - } 248 - } 249 - "filesystem" => {} 250 - other => { 251 - errors.push(format!( 252 - "backup.backend must be \"filesystem\" or \"s3\", got \"{other}\"" 253 - )); 254 - } 255 - } 256 - } 257 - 258 234 // -- SSO providers ---------------------------------------------------- 259 235 self.validate_sso_provider("sso.github", &self.sso.github, &mut errors); 260 236 self.validate_sso_provider("sso.google", &self.sso.google, &mut errors); ··· 604 580 pub s3_endpoint: Option<String>, 605 581 } 606 582 607 - #[derive(Debug, Config)] 608 - pub struct BackupConfig { 609 - /// Enable automatic backups. 610 - #[config(env = "BACKUP_ENABLED", default = true)] 611 - pub enabled: bool, 612 - 613 - /// Backup storage backend: `filesystem` or `s3`. 614 - #[config(env = "BACKUP_STORAGE_BACKEND", default = "filesystem")] 615 - pub backend: String, 616 - 617 - /// Path on disk for the filesystem backup backend. 618 - #[config(env = "BACKUP_STORAGE_PATH", default = "/var/lib/tranquil-pds/backups")] 619 - pub path: String, 620 - 621 - /// S3 bucket name for backups. 622 - #[config(env = "BACKUP_S3_BUCKET")] 623 - pub s3_bucket: Option<String>, 624 - 625 - /// Number of backup revisions to keep per account. 626 - #[config(env = "BACKUP_RETENTION_COUNT", default = 7)] 627 - pub retention_count: u32, 628 - 629 - /// Seconds between backup runs. 630 - #[config(env = "BACKUP_INTERVAL_SECS", default = 86400)] 631 - pub interval_secs: u64, 632 - } 633 - 634 583 #[derive(Debug, Config)] 635 584 pub struct CacheConfig { 636 585 /// Cache backend: `ripple` (default, built-in gossip) or `valkey`.
-108
crates/tranquil-db-traits/src/backup.rs
··· 1 - use async_trait::async_trait; 2 - use chrono::{DateTime, Utc}; 3 - use tranquil_types::Did; 4 - use uuid::Uuid; 5 - 6 - use crate::DbError; 7 - 8 - #[derive(Debug, Clone)] 9 - pub struct BackupRow { 10 - pub id: Uuid, 11 - pub repo_rev: String, 12 - pub repo_root_cid: String, 13 - pub block_count: i32, 14 - pub size_bytes: i64, 15 - pub created_at: DateTime<Utc>, 16 - } 17 - 18 - #[derive(Debug, Clone)] 19 - pub struct BackupStorageInfo { 20 - pub storage_key: String, 21 - pub repo_rev: String, 22 - } 23 - 24 - #[derive(Debug, Clone)] 25 - pub struct BackupForDeletion { 26 - pub id: Uuid, 27 - pub storage_key: String, 28 - pub deactivated_at: Option<DateTime<Utc>>, 29 - } 30 - 31 - #[derive(Debug, Clone)] 32 - pub struct OldBackupInfo { 33 - pub id: Uuid, 34 - pub storage_key: String, 35 - } 36 - 37 - #[derive(Debug, Clone)] 38 - pub struct UserBackupInfo { 39 - pub id: Uuid, 40 - pub did: Did, 41 - pub backup_enabled: bool, 42 - pub deactivated_at: Option<DateTime<Utc>>, 43 - pub repo_root_cid: String, 44 - pub repo_rev: Option<String>, 45 - } 46 - 47 - #[derive(Debug, Clone)] 48 - pub struct BlobExportInfo { 49 - pub cid: String, 50 - pub storage_key: String, 51 - pub mime_type: String, 52 - } 53 - 54 - #[async_trait] 55 - pub trait BackupRepository: Send + Sync { 56 - async fn get_user_backup_status(&self, did: &Did) -> Result<Option<(Uuid, bool)>, DbError>; 57 - 58 - async fn list_backups_for_user(&self, user_id: Uuid) -> Result<Vec<BackupRow>, DbError>; 59 - 60 - async fn get_backup_storage_info( 61 - &self, 62 - backup_id: Uuid, 63 - did: &Did, 64 - ) -> Result<Option<BackupStorageInfo>, DbError>; 65 - 66 - async fn get_user_for_backup(&self, did: &Did) -> Result<Option<UserBackupInfo>, DbError>; 67 - 68 - async fn insert_backup( 69 - &self, 70 - user_id: Uuid, 71 - storage_key: &str, 72 - repo_root_cid: &str, 73 - repo_rev: &str, 74 - block_count: i32, 75 - size_bytes: i64, 76 - ) -> Result<Uuid, DbError>; 77 - 78 - async fn get_old_backups( 79 - &self, 80 - user_id: Uuid, 81 - retention_offset: i64, 82 - ) -> Result<Vec<OldBackupInfo>, DbError>; 83 - 84 - async fn delete_backup(&self, backup_id: Uuid) -> Result<(), DbError>; 85 - 86 - async fn get_backup_for_deletion( 87 - &self, 88 - backup_id: Uuid, 89 - did: &Did, 90 - ) -> Result<Option<BackupForDeletion>, DbError>; 91 - 92 - async fn get_user_deactivated_status( 93 - &self, 94 - did: &Did, 95 - ) -> Result<Option<Option<DateTime<Utc>>>, DbError>; 96 - 97 - async fn update_backup_enabled(&self, did: &Did, enabled: bool) -> Result<(), DbError>; 98 - 99 - async fn get_user_id_by_did(&self, did: &Did) -> Result<Option<Uuid>, DbError>; 100 - 101 - async fn get_blobs_for_export(&self, user_id: Uuid) -> Result<Vec<BlobExportInfo>, DbError>; 102 - 103 - async fn get_users_needing_backup( 104 - &self, 105 - backup_interval_secs: i64, 106 - limit: i64, 107 - ) -> Result<Vec<UserBackupInfo>, DbError>; 108 - }
-5
crates/tranquil-db-traits/src/lib.rs
··· 1 1 mod backlink; 2 - mod backup; 3 2 mod blob; 4 3 mod channel_verification; 5 4 mod delegation; ··· 15 14 mod user; 16 15 17 16 pub use backlink::{Backlink, BacklinkPath, BacklinkRepository}; 18 - pub use backup::{ 19 - BackupForDeletion, BackupRepository, BackupRow, BackupStorageInfo, BlobExportInfo, 20 - OldBackupInfo, UserBackupInfo, 21 - }; 22 17 pub use blob::{BlobForExport, BlobMetadata, BlobRepository, BlobWithTakedown, MissingBlobInfo}; 23 18 pub use channel_verification::ChannelVerificationStatus; 24 19 pub use delegation::{
-300
crates/tranquil-db/src/postgres/backup.rs
··· 1 - use async_trait::async_trait; 2 - use chrono::{DateTime, Utc}; 3 - use sqlx::PgPool; 4 - use tranquil_db_traits::{ 5 - BackupForDeletion, BackupRepository, BackupRow, BackupStorageInfo, BlobExportInfo, DbError, 6 - OldBackupInfo, UserBackupInfo, 7 - }; 8 - use tranquil_types::Did; 9 - use uuid::Uuid; 10 - 11 - use super::user::map_sqlx_error; 12 - 13 - pub struct PostgresBackupRepository { 14 - pool: PgPool, 15 - } 16 - 17 - impl PostgresBackupRepository { 18 - pub fn new(pool: PgPool) -> Self { 19 - Self { pool } 20 - } 21 - } 22 - 23 - #[async_trait] 24 - impl BackupRepository for PostgresBackupRepository { 25 - async fn get_user_backup_status(&self, did: &Did) -> Result<Option<(Uuid, bool)>, DbError> { 26 - let result = sqlx::query!( 27 - "SELECT id, backup_enabled FROM users WHERE did = $1", 28 - did.as_str() 29 - ) 30 - .fetch_optional(&self.pool) 31 - .await 32 - .map_err(map_sqlx_error)?; 33 - 34 - Ok(result.map(|r| (r.id, r.backup_enabled))) 35 - } 36 - 37 - async fn list_backups_for_user(&self, user_id: Uuid) -> Result<Vec<BackupRow>, DbError> { 38 - let results = sqlx::query_as!( 39 - BackupRow, 40 - r#" 41 - SELECT id, repo_rev, repo_root_cid, block_count, size_bytes, created_at 42 - FROM account_backups 43 - WHERE user_id = $1 44 - ORDER BY created_at DESC 45 - "#, 46 - user_id 47 - ) 48 - .fetch_all(&self.pool) 49 - .await 50 - .map_err(map_sqlx_error)?; 51 - 52 - Ok(results) 53 - } 54 - 55 - async fn get_backup_storage_info( 56 - &self, 57 - backup_id: Uuid, 58 - did: &Did, 59 - ) -> Result<Option<BackupStorageInfo>, DbError> { 60 - let result = sqlx::query!( 61 - r#" 62 - SELECT ab.storage_key, ab.repo_rev 63 - FROM account_backups ab 64 - JOIN users u ON u.id = ab.user_id 65 - WHERE ab.id = $1 AND u.did = $2 66 - "#, 67 - backup_id, 68 - did.as_str() 69 - ) 70 - .fetch_optional(&self.pool) 71 - .await 72 - .map_err(map_sqlx_error)?; 73 - 74 - Ok(result.map(|r| BackupStorageInfo { 75 - storage_key: r.storage_key, 76 - repo_rev: r.repo_rev, 77 - })) 78 - } 79 - 80 - async fn get_user_for_backup(&self, did: &Did) -> Result<Option<UserBackupInfo>, DbError> { 81 - let result = sqlx::query!( 82 - r#" 83 - SELECT u.id, u.did, u.backup_enabled, u.deactivated_at, r.repo_root_cid, r.repo_rev 84 - FROM users u 85 - JOIN repos r ON r.user_id = u.id 86 - WHERE u.did = $1 87 - "#, 88 - did.as_str() 89 - ) 90 - .fetch_optional(&self.pool) 91 - .await 92 - .map_err(map_sqlx_error)?; 93 - 94 - Ok(result.map(|r| UserBackupInfo { 95 - id: r.id, 96 - did: r.did.into(), 97 - backup_enabled: r.backup_enabled, 98 - deactivated_at: r.deactivated_at, 99 - repo_root_cid: r.repo_root_cid, 100 - repo_rev: r.repo_rev, 101 - })) 102 - } 103 - 104 - async fn insert_backup( 105 - &self, 106 - user_id: Uuid, 107 - storage_key: &str, 108 - repo_root_cid: &str, 109 - repo_rev: &str, 110 - block_count: i32, 111 - size_bytes: i64, 112 - ) -> Result<Uuid, DbError> { 113 - let id = sqlx::query_scalar!( 114 - r#" 115 - INSERT INTO account_backups (user_id, storage_key, repo_root_cid, repo_rev, block_count, size_bytes) 116 - VALUES ($1, $2, $3, $4, $5, $6) 117 - ON CONFLICT (storage_key) DO UPDATE SET created_at = NOW() 118 - RETURNING id 119 - "#, 120 - user_id, 121 - storage_key, 122 - repo_root_cid, 123 - repo_rev, 124 - block_count, 125 - size_bytes 126 - ) 127 - .fetch_one(&self.pool) 128 - .await 129 - .map_err(map_sqlx_error)?; 130 - 131 - Ok(id) 132 - } 133 - 134 - async fn get_old_backups( 135 - &self, 136 - user_id: Uuid, 137 - retention_offset: i64, 138 - ) -> Result<Vec<OldBackupInfo>, DbError> { 139 - let results = sqlx::query!( 140 - r#" 141 - SELECT id, storage_key 142 - FROM account_backups 143 - WHERE user_id = $1 144 - ORDER BY created_at DESC 145 - OFFSET $2 146 - "#, 147 - user_id, 148 - retention_offset 149 - ) 150 - .fetch_all(&self.pool) 151 - .await 152 - .map_err(map_sqlx_error)?; 153 - 154 - Ok(results 155 - .into_iter() 156 - .map(|r| OldBackupInfo { 157 - id: r.id, 158 - storage_key: r.storage_key, 159 - }) 160 - .collect()) 161 - } 162 - 163 - async fn delete_backup(&self, backup_id: Uuid) -> Result<(), DbError> { 164 - sqlx::query!("DELETE FROM account_backups WHERE id = $1", backup_id) 165 - .execute(&self.pool) 166 - .await 167 - .map_err(map_sqlx_error)?; 168 - 169 - Ok(()) 170 - } 171 - 172 - async fn get_backup_for_deletion( 173 - &self, 174 - backup_id: Uuid, 175 - did: &Did, 176 - ) -> Result<Option<BackupForDeletion>, DbError> { 177 - let result = sqlx::query!( 178 - r#" 179 - SELECT ab.id, ab.storage_key, u.deactivated_at 180 - FROM account_backups ab 181 - JOIN users u ON u.id = ab.user_id 182 - WHERE ab.id = $1 AND u.did = $2 183 - "#, 184 - backup_id, 185 - did.as_str() 186 - ) 187 - .fetch_optional(&self.pool) 188 - .await 189 - .map_err(map_sqlx_error)?; 190 - 191 - Ok(result.map(|r| BackupForDeletion { 192 - id: r.id, 193 - storage_key: r.storage_key, 194 - deactivated_at: r.deactivated_at, 195 - })) 196 - } 197 - 198 - async fn get_user_deactivated_status( 199 - &self, 200 - did: &Did, 201 - ) -> Result<Option<Option<DateTime<Utc>>>, DbError> { 202 - let result = sqlx::query!( 203 - "SELECT deactivated_at FROM users WHERE did = $1", 204 - did.as_str() 205 - ) 206 - .fetch_optional(&self.pool) 207 - .await 208 - .map_err(map_sqlx_error)?; 209 - 210 - Ok(result.map(|r| r.deactivated_at)) 211 - } 212 - 213 - async fn update_backup_enabled(&self, did: &Did, enabled: bool) -> Result<(), DbError> { 214 - sqlx::query!( 215 - "UPDATE users SET backup_enabled = $1 WHERE did = $2", 216 - enabled, 217 - did.as_str() 218 - ) 219 - .execute(&self.pool) 220 - .await 221 - .map_err(map_sqlx_error)?; 222 - 223 - Ok(()) 224 - } 225 - 226 - async fn get_user_id_by_did(&self, did: &Did) -> Result<Option<Uuid>, DbError> { 227 - let result = sqlx::query_scalar!("SELECT id FROM users WHERE did = $1", did.as_str()) 228 - .fetch_optional(&self.pool) 229 - .await 230 - .map_err(map_sqlx_error)?; 231 - 232 - Ok(result) 233 - } 234 - 235 - async fn get_blobs_for_export(&self, user_id: Uuid) -> Result<Vec<BlobExportInfo>, DbError> { 236 - let results = sqlx::query!( 237 - r#" 238 - SELECT DISTINCT b.cid, b.storage_key, b.mime_type 239 - FROM blobs b 240 - JOIN record_blobs rb ON rb.blob_cid = b.cid 241 - WHERE rb.repo_id = $1 242 - "#, 243 - user_id 244 - ) 245 - .fetch_all(&self.pool) 246 - .await 247 - .map_err(map_sqlx_error)?; 248 - 249 - Ok(results 250 - .into_iter() 251 - .map(|r| BlobExportInfo { 252 - cid: r.cid, 253 - storage_key: r.storage_key, 254 - mime_type: r.mime_type, 255 - }) 256 - .collect()) 257 - } 258 - 259 - async fn get_users_needing_backup( 260 - &self, 261 - backup_interval_secs: i64, 262 - limit: i64, 263 - ) -> Result<Vec<UserBackupInfo>, DbError> { 264 - let results = sqlx::query!( 265 - r#" 266 - SELECT u.id, u.did, u.backup_enabled, u.deactivated_at, r.repo_root_cid, r.repo_rev 267 - FROM users u 268 - JOIN repos r ON r.user_id = u.id 269 - WHERE u.backup_enabled = true 270 - AND u.deactivated_at IS NULL 271 - AND ( 272 - NOT EXISTS ( 273 - SELECT 1 FROM account_backups ab WHERE ab.user_id = u.id 274 - ) 275 - OR ( 276 - SELECT MAX(ab.created_at) FROM account_backups ab WHERE ab.user_id = u.id 277 - ) < NOW() - make_interval(secs => $1) 278 - ) 279 - LIMIT $2 280 - "#, 281 - backup_interval_secs as f64, 282 - limit 283 - ) 284 - .fetch_all(&self.pool) 285 - .await 286 - .map_err(map_sqlx_error)?; 287 - 288 - Ok(results 289 - .into_iter() 290 - .map(|r| UserBackupInfo { 291 - id: r.id, 292 - did: r.did.into(), 293 - backup_enabled: r.backup_enabled, 294 - deactivated_at: r.deactivated_at, 295 - repo_root_cid: r.repo_root_cid, 296 - repo_rev: r.repo_rev, 297 - }) 298 - .collect()) 299 - } 300 - }
+2 -7
crates/tranquil-db/src/postgres/mod.rs
··· 1 1 mod backlink; 2 - mod backup; 3 2 mod blob; 4 3 mod delegation; 5 4 mod event_notifier; ··· 14 13 use std::sync::Arc; 15 14 16 15 pub use backlink::PostgresBacklinkRepository; 17 - pub use backup::PostgresBackupRepository; 18 16 pub use blob::PostgresBlobRepository; 19 17 pub use delegation::PostgresDelegationRepository; 20 18 pub use event_notifier::PostgresRepoEventNotifier; ··· 24 22 pub use session::PostgresSessionRepository; 25 23 pub use sso::PostgresSsoRepository; 26 24 use tranquil_db_traits::{ 27 - BacklinkRepository, BackupRepository, BlobRepository, DelegationRepository, InfraRepository, 28 - OAuthRepository, RepoEventNotifier, RepoRepository, SessionRepository, SsoRepository, 29 - UserRepository, 25 + BacklinkRepository, BlobRepository, DelegationRepository, InfraRepository, OAuthRepository, 26 + RepoEventNotifier, RepoRepository, SessionRepository, SsoRepository, UserRepository, 30 27 }; 31 28 pub use user::PostgresUserRepository; 32 29 ··· 39 36 pub repo: Arc<dyn RepoRepository>, 40 37 pub blob: Arc<dyn BlobRepository>, 41 38 pub infra: Arc<dyn InfraRepository>, 42 - pub backup: Arc<dyn BackupRepository>, 43 39 pub backlink: Arc<dyn BacklinkRepository>, 44 40 pub sso: Arc<dyn SsoRepository>, 45 41 pub event_notifier: Arc<dyn RepoEventNotifier>, ··· 56 52 repo: Arc::new(PostgresRepoRepository::new(pool.clone())), 57 53 blob: Arc::new(PostgresBlobRepository::new(pool.clone())), 58 54 infra: Arc::new(PostgresInfraRepository::new(pool.clone())), 59 - backup: Arc::new(PostgresBackupRepository::new(pool.clone())), 60 55 backlink: Arc::new(PostgresBacklinkRepository::new(pool.clone())), 61 56 sso: Arc::new(PostgresSsoRepository::new(pool.clone())), 62 57 event_notifier: Arc::new(PostgresRepoEventNotifier::new(pool)),
-5
crates/tranquil-db/src/postgres/user.rs
··· 2315 2315 .await 2316 2316 .map_err(map_sqlx_error)?; 2317 2317 2318 - sqlx::query!("DELETE FROM account_backups WHERE user_id = $1", user_id) 2319 - .execute(&mut *tx) 2320 - .await 2321 - .map_err(map_sqlx_error)?; 2322 - 2323 2318 sqlx::query!( 2324 2319 "DELETE FROM account_deletion_requests WHERE did = $1", 2325 2320 did.as_str()
-19
crates/tranquil-infra/src/lib.rs
··· 37 37 async fn copy(&self, src_key: &str, dst_key: &str) -> Result<(), StorageError>; 38 38 } 39 39 40 - #[async_trait] 41 - pub trait BackupStorage: Send + Sync { 42 - async fn put_backup(&self, did: &str, rev: &str, data: &[u8]) -> Result<String, StorageError>; 43 - async fn get_backup(&self, storage_key: &str) -> Result<Bytes, StorageError>; 44 - async fn delete_backup(&self, storage_key: &str) -> Result<(), StorageError>; 45 - } 46 - 47 - pub fn backup_retention_count() -> u32 { 48 - tranquil_config::try_get() 49 - .map(|c| c.backup.retention_count) 50 - .unwrap_or(7) 51 - } 52 - 53 - pub fn backup_interval_secs() -> u64 { 54 - tranquil_config::try_get() 55 - .map(|c| c.backup.interval_secs) 56 - .unwrap_or(86400) 57 - } 58 - 59 40 #[derive(Debug, thiserror::Error)] 60 41 pub enum CacheError { 61 42 #[error("Cache connection error: {0}")]
+34 -22
crates/tranquil-oauth-server/src/endpoints/authorize.rs
··· 1 - use tranquil_pds::auth::{BareLoginIdentifier, NormalizedLoginIdentifier}; 2 - use tranquil_pds::comms::comms_repo::enqueue_2fa_code; 3 - use tranquil_pds::oauth::{ 4 - AuthFlow, ClientMetadataCache, Code, DeviceData, DeviceId, OAuthError, Prompt, SessionId, 5 - db::should_show_consent, scopes::expand_include_scopes, 6 - }; 7 - use tranquil_pds::rate_limit::{ 8 - OAuthAuthorizeLimit, OAuthRateLimited, OAuthRegisterCompleteLimit, TotpVerifyLimit, 9 - check_user_rate_limit, 10 - }; 11 - use tranquil_pds::state::AppState; 12 - use tranquil_pds::types::{Did, Handle, PlainPassword}; 13 - use tranquil_pds::util::extract_client_ip; 14 1 use axum::{ 15 2 Json, 16 3 extract::{Query, State}, ··· 24 11 use serde::{Deserialize, Serialize}; 25 12 use subtle::ConstantTimeEq; 26 13 use tranquil_db_traits::{ScopePreference, WebauthnChallengeType}; 14 + use tranquil_pds::auth::{BareLoginIdentifier, NormalizedLoginIdentifier}; 15 + use tranquil_pds::comms::comms_repo::enqueue_2fa_code; 16 + use tranquil_pds::oauth::{ 17 + AuthFlow, ClientMetadataCache, Code, DeviceData, DeviceId, OAuthError, Prompt, SessionId, 18 + db::should_show_consent, scopes::expand_include_scopes, 19 + }; 20 + use tranquil_pds::rate_limit::{ 21 + OAuthAuthorizeLimit, OAuthRateLimited, OAuthRegisterCompleteLimit, TotpVerifyLimit, 22 + check_user_rate_limit, 23 + }; 24 + use tranquil_pds::state::AppState; 25 + use tranquil_pds::types::{Did, Handle, PlainPassword}; 26 + use tranquil_pds::util::extract_client_ip; 27 27 use tranquil_types::{AuthorizationCode, ClientId, DeviceId as DeviceIdType, RequestId}; 28 28 use urlencoding::encode as url_encode; 29 29 ··· 95 95 cookie_str.split(';').map(|c| c.trim()).find_map(|cookie| { 96 96 cookie 97 97 .strip_prefix(&format!("{}=", DEVICE_COOKIE_NAME)) 98 - .and_then(|value| tranquil_pds::config::AuthConfig::get().verify_device_cookie(value)) 98 + .and_then(|value| { 99 + tranquil_pds::config::AuthConfig::get().verify_device_cookie(value) 100 + }) 99 101 .map(tranquil_types::DeviceId::new) 100 102 }) 101 103 }) ··· 109 111 } 110 112 111 113 fn make_device_cookie(device_id: &tranquil_types::DeviceId) -> String { 112 - let signed_value = tranquil_pds::config::AuthConfig::get().sign_device_cookie(device_id.as_str()); 114 + let signed_value = 115 + tranquil_pds::config::AuthConfig::get().sign_device_cookie(device_id.as_str()); 113 116 format!( 114 117 "{}={}; Path=/oauth; HttpOnly; Secure; SameSite=Lax; Max-Age=31536000", 115 118 DEVICE_COOKIE_NAME, signed_value ··· 665 668 666 669 if device_is_trusted { 667 670 if let Some(ref dev_id) = device_cookie { 668 - let _ = tranquil_api::server::extend_device_trust(state.oauth_repo.as_ref(), dev_id) 669 - .await; 671 + let _ = 672 + tranquil_api::server::extend_device_trust(state.oauth_repo.as_ref(), dev_id) 673 + .await; 670 674 } 671 675 } else { 672 676 if state ··· 1413 1417 }; 1414 1418 1415 1419 let effective_scope_str = if let Some(ref grant) = delegation_grant { 1416 - tranquil_pds::delegation::intersect_scopes(requested_scope_str, grant.granted_scopes.as_str()) 1420 + tranquil_pds::delegation::intersect_scopes( 1421 + requested_scope_str, 1422 + grant.granted_scopes.as_str(), 1423 + ) 1417 1424 } else { 1418 1425 requested_scope_str.to_string() 1419 1426 }; ··· 1622 1629 }; 1623 1630 1624 1631 let effective_scope_str = if let Some(ref grant) = delegation_grant { 1625 - tranquil_pds::delegation::intersect_scopes(original_scope_str, grant.granted_scopes.as_str()) 1632 + tranquil_pds::delegation::intersect_scopes( 1633 + original_scope_str, 1634 + grant.granted_scopes.as_str(), 1635 + ) 1626 1636 } else { 1627 1637 original_scope_str.to_string() 1628 1638 }; ··· 2011 2021 .oauth_repo 2012 2022 .upsert_account_device(&did, &trust_device_id) 2013 2023 .await; 2014 - let _ = tranquil_api::server::trust_device(state.oauth_repo.as_ref(), &trust_device_id).await; 2024 + let _ = 2025 + tranquil_api::server::trust_device(state.oauth_repo.as_ref(), &trust_device_id).await; 2015 2026 } 2016 2027 let requested_scope_str = request_data 2017 2028 .parameters ··· 3132 3143 3133 3144 if device_is_trusted { 3134 3145 if let Some(ref dev_id) = device_cookie { 3135 - let _ = tranquil_api::server::extend_device_trust(state.oauth_repo.as_ref(), dev_id) 3136 - .await; 3146 + let _ = 3147 + tranquil_api::server::extend_device_trust(state.oauth_repo.as_ref(), dev_id) 3148 + .await; 3137 3149 } 3138 3150 } else { 3139 3151 let user = match state.user_repo.get_2fa_status_by_did(&did).await {
+11 -8
crates/tranquil-oauth-server/src/endpoints/delegation.rs
··· 1 - use tranquil_pds::auth::{Active, Auth}; 2 - use tranquil_pds::delegation::DelegationActionType; 3 - use tranquil_pds::oauth::client::{build_client_metadata, delegation_oauth_urls}; 4 - use tranquil_pds::rate_limit::{LoginLimit, OAuthRateLimited, TotpVerifyLimit}; 5 - use tranquil_pds::state::AppState; 6 - use tranquil_pds::types::PlainPassword; 7 - use tranquil_pds::util::extract_client_ip; 8 1 use axum::{ 9 2 Json, 10 3 extract::{Query, State}, ··· 12 5 response::{IntoResponse, Redirect, Response}, 13 6 }; 14 7 use serde::{Deserialize, Serialize}; 8 + use tranquil_pds::auth::{Active, Auth}; 9 + use tranquil_pds::delegation::DelegationActionType; 15 10 use tranquil_pds::oauth::RequestData; 11 + use tranquil_pds::oauth::client::{build_client_metadata, delegation_oauth_urls}; 12 + use tranquil_pds::rate_limit::{LoginLimit, OAuthRateLimited, TotpVerifyLimit}; 13 + use tranquil_pds::state::AppState; 14 + use tranquil_pds::types::PlainPassword; 15 + use tranquil_pds::util::extract_client_ip; 16 16 use tranquil_types::did_doc::{extract_handle, extract_pds_endpoint}; 17 17 use tranquil_types::{Did, RequestId}; 18 18 ··· 534 534 let delegated_did = &auth_state.delegated_did; 535 535 let controller_did = &auth_state.controller_did; 536 536 537 - if let Err(_) = get_delegation_grant(&state, delegated_did, controller_did).await { 537 + if get_delegation_grant(&state, delegated_did, controller_did) 538 + .await 539 + .is_err() 540 + { 538 541 tracing::warn!( 539 542 "Delegation grant revoked during cross-PDS auth: {} -> {}", 540 543 controller_did,
+2 -2
crates/tranquil-oauth-server/src/endpoints/metadata.rs
··· 1 1 use std::fmt::Debug; 2 2 3 3 use crate::jwks::{JwkSet, create_jwk_set}; 4 - use tranquil_pds::state::AppState; 5 4 use axum::{Json, extract::State}; 6 5 use http::{HeaderName, header}; 7 6 use serde::{Deserialize, Serialize}; 7 + use tranquil_pds::state::AppState; 8 8 9 9 #[derive(Debug, Serialize, Deserialize)] 10 10 pub struct ProtectedResourceMetadata { ··· 129 129 } 130 130 131 131 pub async fn oauth_jwks(State(_state): State<AppState>) -> Json<JwkSet> { 132 - use tranquil_pds::config::AuthConfig; 133 132 use crate::jwks::Jwk; 133 + use tranquil_pds::config::AuthConfig; 134 134 let config = AuthConfig::get(); 135 135 let server_key = Jwk { 136 136 kty: "EC".to_string(),
+4 -4
crates/tranquil-oauth-server/src/endpoints/par.rs
··· 1 + use axum::body::Bytes; 2 + use axum::{Json, extract::State, http::HeaderMap}; 3 + use chrono::{Duration, Utc}; 4 + use serde::{Deserialize, Serialize}; 1 5 use tranquil_pds::oauth::{ 2 6 AuthorizationRequestParameters, ClientAuth, ClientMetadataCache, CodeChallengeMethod, 3 7 OAuthError, Prompt, RequestData, RequestId, ResponseMode, ResponseType, ··· 5 9 }; 6 10 use tranquil_pds::rate_limit::{OAuthParLimit, OAuthRateLimited}; 7 11 use tranquil_pds::state::AppState; 8 - use axum::body::Bytes; 9 - use axum::{Json, extract::State, http::HeaderMap}; 10 - use chrono::{Duration, Utc}; 11 - use serde::{Deserialize, Serialize}; 12 12 use tranquil_types::RequestId as RequestIdType; 13 13 14 14 const PAR_EXPIRY_SECONDS: i64 = 600;
+4 -4
crates/tranquil-oauth-server/src/endpoints/token/grants.rs
··· 2 2 use super::types::{ 3 3 RequestClientAuth, TokenGrant, TokenResponse, TokenType, ValidatedTokenRequest, 4 4 }; 5 + use axum::Json; 6 + use axum::http::{HeaderMap, Method}; 7 + use chrono::{Duration, Utc}; 8 + use tranquil_db_traits::RefreshTokenLookup; 5 9 use tranquil_pds::config::AuthConfig; 6 10 use tranquil_pds::delegation::intersect_scopes; 7 11 use tranquil_pds::oauth::{ ··· 12 16 verify_client_auth, 13 17 }; 14 18 use tranquil_pds::state::AppState; 15 - use axum::Json; 16 - use axum::http::{HeaderMap, Method}; 17 - use chrono::{Duration, Utc}; 18 - use tranquil_db_traits::RefreshTokenLookup; 19 19 use tranquil_types::{AuthorizationCode, Did, RefreshToken as RefreshTokenType}; 20 20 21 21 const ACCESS_TOKEN_EXPIRY_SECONDS: u64 = 300;
+2 -2
crates/tranquil-oauth-server/src/endpoints/token/helpers.rs
··· 1 - use tranquil_pds::config::AuthConfig; 2 - use tranquil_pds::oauth::OAuthError; 3 1 use base64::Engine; 4 2 use base64::engine::general_purpose::URL_SAFE_NO_PAD; 5 3 use chrono::Utc; 6 4 use hmac::Mac; 7 5 use sha2::Sha256; 8 6 use subtle::ConstantTimeEq; 7 + use tranquil_pds::config::AuthConfig; 8 + use tranquil_pds::oauth::OAuthError; 9 9 10 10 const ACCESS_TOKEN_EXPIRY_SECONDS: i64 = 300; 11 11
+3 -3
crates/tranquil-oauth-server/src/endpoints/token/introspect.rs
··· 1 1 use super::helpers::extract_token_claims; 2 - use tranquil_pds::oauth::OAuthError; 3 - use tranquil_pds::rate_limit::{OAuthIntrospectLimit, OAuthRateLimited}; 4 - use tranquil_pds::state::AppState; 5 2 use axum::extract::State; 6 3 use axum::http::StatusCode; 7 4 use axum::{Form, Json}; 8 5 use chrono::Utc; 9 6 use serde::{Deserialize, Serialize}; 7 + use tranquil_pds::oauth::OAuthError; 8 + use tranquil_pds::rate_limit::{OAuthIntrospectLimit, OAuthRateLimited}; 9 + use tranquil_pds::state::AppState; 10 10 use tranquil_types::{RefreshToken, TokenId}; 11 11 12 12 #[derive(Debug, Deserialize)]
+2 -2
crates/tranquil-oauth-server/src/endpoints/token/mod.rs
··· 3 3 mod introspect; 4 4 mod types; 5 5 6 + use axum::body::Bytes; 7 + use axum::{Json, extract::State, http::HeaderMap}; 6 8 use tranquil_pds::oauth::OAuthError; 7 9 use tranquil_pds::rate_limit::{OAuthRateLimited, OAuthTokenLimit}; 8 10 use tranquil_pds::state::AppState; 9 - use axum::body::Bytes; 10 - use axum::{Json, extract::State, http::HeaderMap}; 11 11 12 12 pub use grants::{handle_authorization_code_grant, handle_refresh_token_grant}; 13 13 pub use helpers::{TokenClaims, create_access_token, extract_token_claims, verify_pkce};
+1 -1
crates/tranquil-oauth-server/src/endpoints/token/types.rs
··· 1 - use tranquil_pds::oauth::OAuthError; 2 1 use serde::{Deserialize, Serialize}; 2 + use tranquil_pds::oauth::OAuthError; 3 3 4 4 #[derive(Debug, Clone, PartialEq, Eq)] 5 5 pub enum GrantType {
+19 -39
crates/tranquil-oauth-server/src/lib.rs
··· 5 5 use tranquil_pds::state::AppState; 6 6 7 7 pub fn oauth_routes() -> axum::Router<AppState> { 8 - use axum::{middleware, routing::{get, post}}; 8 + use axum::{ 9 + middleware, 10 + routing::{get, post}, 11 + }; 9 12 10 13 axum::Router::new() 11 14 .route("/jwks", get(endpoints::oauth_jwks)) 12 15 .route("/par", post(endpoints::pushed_authorization_request)) 13 16 .route("/authorize", get(endpoints::authorize_get)) 14 17 .route("/authorize", post(endpoints::authorize_post)) 15 - .route( 16 - "/authorize/accounts", 17 - get(endpoints::authorize_accounts), 18 - ) 19 - .route( 20 - "/authorize/select", 21 - post(endpoints::authorize_select), 22 - ) 18 + .route("/authorize/accounts", get(endpoints::authorize_accounts)) 19 + .route("/authorize/select", post(endpoints::authorize_select)) 23 20 .route("/authorize/2fa", get(endpoints::authorize_2fa_get)) 24 21 .route("/authorize/2fa", post(endpoints::authorize_2fa_post)) 25 22 .route( ··· 30 27 "/authorize/passkey", 31 28 post(endpoints::authorize_passkey_finish), 32 29 ) 33 - .route( 34 - "/passkey/check", 35 - get(endpoints::check_user_has_passkeys), 36 - ) 30 + .route("/passkey/check", get(endpoints::check_user_has_passkeys)) 37 31 .route( 38 32 "/security-status", 39 33 get(endpoints::check_user_security_status), ··· 41 35 .route("/passkey/start", post(endpoints::passkey_start)) 42 36 .route("/passkey/finish", post(endpoints::passkey_finish)) 43 37 .route("/authorize/deny", post(endpoints::authorize_deny)) 44 - .route( 45 - "/register/complete", 46 - post(endpoints::register_complete), 47 - ) 48 - .route( 49 - "/establish-session", 50 - post(endpoints::establish_session), 51 - ) 38 + .route("/register/complete", post(endpoints::register_complete)) 39 + .route("/establish-session", post(endpoints::establish_session)) 52 40 .route("/authorize/consent", get(endpoints::consent_get)) 53 41 .route("/authorize/consent", post(endpoints::consent_post)) 54 42 .route("/authorize/renew", post(endpoints::authorize_renew)) 55 - .route( 56 - "/authorize/redirect", 57 - get(endpoints::authorize_redirect), 58 - ) 43 + .route("/authorize/redirect", get(endpoints::authorize_redirect)) 59 44 .route("/delegation/auth", post(endpoints::delegation_auth)) 60 45 .route( 61 46 "/delegation/auth-token", 62 47 post(endpoints::delegation_auth_token), 63 48 ) 64 - .route( 65 - "/delegation/totp", 66 - post(endpoints::delegation_totp_verify), 67 - ) 68 - .route( 69 - "/delegation/callback", 70 - get(endpoints::delegation_callback), 71 - ) 49 + .route("/delegation/totp", post(endpoints::delegation_totp_verify)) 50 + .route("/delegation/callback", get(endpoints::delegation_callback)) 72 51 .route( 73 52 "/delegation/client-metadata", 74 53 get(endpoints::delegation_client_metadata), ··· 96 75 "/sso/check-handle-available", 97 76 get(sso_endpoints::check_handle_available), 98 77 ) 99 - .layer(middleware::from_fn(tranquil_pds::oauth::verify::dpop_nonce_middleware)) 78 + .layer(middleware::from_fn( 79 + tranquil_pds::oauth::verify::dpop_nonce_middleware, 80 + )) 100 81 } 101 82 102 83 pub fn well_known_oauth_routes() -> axum::Router<AppState> { ··· 116 97 pub fn frontend_client_metadata_route() -> axum::Router<AppState> { 117 98 use axum::routing::get; 118 99 119 - axum::Router::new() 120 - .route( 121 - "/oauth-client-metadata.json", 122 - get(endpoints::frontend_client_metadata), 123 - ) 100 + axum::Router::new().route( 101 + "/oauth-client-metadata.json", 102 + get(endpoints::frontend_client_metadata), 103 + ) 124 104 }
+11 -10
crates/tranquil-oauth-server/src/sso_endpoints.rs
··· 9 9 use tranquil_db_traits::{SsoAction, SsoProviderType}; 10 10 use tranquil_types::RequestId; 11 11 12 - use tranquil_pds::sso::SsoConfig; 13 12 use tranquil_pds::api::error::ApiError; 14 13 use tranquil_pds::auth::extractor::extract_auth_token_from_header; 15 14 use tranquil_pds::auth::{generate_app_password, validate_bearer_token_cached}; ··· 17 16 AccountCreationLimit, RateLimited, SsoCallbackLimit, SsoInitiateLimit, SsoUnlinkLimit, 18 17 check_user_rate_limit_with_message, 19 18 }; 19 + use tranquil_pds::sso::SsoConfig; 20 20 use tranquil_pds::state::AppState; 21 21 22 22 fn generate_nonce() -> String { ··· 1048 1048 } 1049 1049 }; 1050 1050 1051 - let plc_client = tranquil_pds::plc::PlcClient::with_cache(None, Some(state.cache.clone())); 1051 + let plc_client = 1052 + tranquil_pds::plc::PlcClient::with_cache(None, Some(state.cache.clone())); 1052 1053 if let Err(e) = plc_client 1053 1054 .send_operation(&genesis_result.did, &genesis_result.signed_operation) 1054 1055 .await ··· 1291 1292 } 1292 1293 }; 1293 1294 1294 - let access_meta = match tranquil_pds::auth::create_access_token_with_metadata(&did, &key_bytes) 1295 - { 1296 - Ok(m) => m, 1297 - Err(e) => { 1298 - tracing::error!("Failed to create access token: {:?}", e); 1299 - return Err(ApiError::InternalError(None)); 1300 - } 1301 - }; 1295 + let access_meta = 1296 + match tranquil_pds::auth::create_access_token_with_metadata(&did, &key_bytes) { 1297 + Ok(m) => m, 1298 + Err(e) => { 1299 + tracing::error!("Failed to create access token: {:?}", e); 1300 + return Err(ApiError::InternalError(None)); 1301 + } 1302 + }; 1302 1303 let refresh_meta = 1303 1304 match tranquil_pds::auth::create_refresh_token_with_metadata(&did, &key_bytes) { 1304 1305 Ok(m) => m,
+1 -2
crates/tranquil-oauth/src/lib.rs
··· 7 7 pub use dpop::{ 8 8 DPoPJwk, DPoPProofHeader, DPoPProofPayload, DPoPVerifier, DPoPVerifyResult, 9 9 compute_access_token_hash, compute_es256_jkt, compute_jwk_thumbprint, compute_pkce_challenge, 10 - create_dpop_proof, 11 - es256_signing_key_to_jwk, 10 + create_dpop_proof, es256_signing_key_to_jwk, 12 11 }; 13 12 pub use error::OAuthError; 14 13 pub use types::{
+2 -9
crates/tranquil-pds/src/api/error.rs
··· 72 72 InvalidDelegation(String), 73 73 DelegationNotFound, 74 74 InviteCodeRequired, 75 - BackupNotFound, 76 - BackupsDisabled, 77 75 RepoNotReady, 78 76 DeviceNotFound, 79 77 NoEmail, ··· 124 122 Self::UpstreamFailure | Self::UpstreamUnavailable(_) | Self::UpstreamErrorMsg(_) => { 125 123 StatusCode::BAD_GATEWAY 126 124 } 127 - Self::ServiceUnavailable(_) | Self::BackupsDisabled => StatusCode::SERVICE_UNAVAILABLE, 125 + Self::ServiceUnavailable(_) => StatusCode::SERVICE_UNAVAILABLE, 128 126 Self::UpstreamTimeout => StatusCode::GATEWAY_TIMEOUT, 129 127 Self::UpstreamError { status, .. } => *status, 130 128 Self::AuthenticationRequired ··· 155 153 | Self::DeviceNotFound 156 154 | Self::ControllerNotFound 157 155 | Self::DelegationNotFound 158 - | Self::BackupNotFound 159 156 | Self::InvalidRecoveryLink 160 157 | Self::HandleNotFound 161 158 | Self::SubjectNotFound ··· 283 280 Self::InvalidDelegation(_) => Cow::Borrowed("InvalidDelegation"), 284 281 Self::DelegationNotFound => Cow::Borrowed("DelegationNotFound"), 285 282 Self::InviteCodeRequired => Cow::Borrowed("InviteCodeRequired"), 286 - Self::BackupNotFound => Cow::Borrowed("BackupNotFound"), 287 - Self::BackupsDisabled => Cow::Borrowed("BackupsDisabled"), 288 283 Self::RepoNotReady => Cow::Borrowed("RepoNotReady"), 289 284 Self::MfaVerificationRequired => Cow::Borrowed("MfaVerificationRequired"), 290 285 Self::RateLimitExceeded(_) => Cow::Borrowed("RateLimitExceeded"), ··· 373 368 Self::InviteCodeRequired => { 374 369 Some("An invite code is required to create an account".to_string()) 375 370 } 376 - Self::BackupNotFound => Some("Backup not found".to_string()), 377 - Self::BackupsDisabled => Some("Backup storage not configured".to_string()), 378 - Self::RepoNotReady => Some("Repository not ready for backup".to_string()), 371 + Self::RepoNotReady => Some("Repository not ready".to_string()), 379 372 Self::PasskeyCounterAnomaly => Some( 380 373 "Authentication failed: security key counter anomaly detected. This may indicate a cloned key.".to_string(), 381 374 ),
+1 -3
crates/tranquil-pds/src/api/validation.rs
··· 317 317 318 318 if !input.contains('.') || matched_domain.is_some() { 319 319 let handle_to_validate = match matched_domain { 320 - Some(domain) => input 321 - .strip_suffix(&format!(".{}", domain)) 322 - .unwrap_or(input), 320 + Some(domain) => input.strip_suffix(&format!(".{}", domain)).unwrap_or(input), 323 321 None => input, 324 322 }; 325 323 let validated = validate_short_handle(handle_to_validate)?;
+4 -2
crates/tranquil-pds/src/auth/mod.rs
··· 13 13 pub mod account_verified; 14 14 pub mod email_token; 15 15 pub mod extractor; 16 - pub mod reauth; 17 16 pub mod legacy_2fa; 18 17 pub mod login_identifier; 19 18 pub mod mfa_verified; 19 + pub mod reauth; 20 20 pub mod scope_check; 21 21 pub mod scope_verified; 22 22 pub mod service; ··· 206 206 return ScopePermissions::from_scope_string(Some(scope)); 207 207 } 208 208 if !self.is_oauth() { 209 - return ScopePermissions::from_scope_string(Some("transition:generic transition:chat.bsky")); 209 + return ScopePermissions::from_scope_string(Some( 210 + "transition:generic transition:chat.bsky", 211 + )); 210 212 } 211 213 ScopePermissions::from_scope_string(self.scope.as_deref()) 212 214 }
+1 -1
crates/tranquil-pds/src/crawlers.rs
··· 1 1 use crate::circuit_breaker::CircuitBreaker; 2 - use tranquil_db_traits::SequencedEvent; 3 2 use reqwest::Client; 4 3 use std::sync::Arc; 5 4 use std::sync::atomic::{AtomicU64, Ordering}; ··· 8 7 use tokio_util::sync::CancellationToken; 9 8 use tracing::{debug, error, info, warn}; 10 9 use tranquil_db_traits::RepoEventType; 10 + use tranquil_db_traits::SequencedEvent; 11 11 12 12 const NOTIFY_THRESHOLD_SECS: u64 = 20 * 60; 13 13
+1 -2
crates/tranquil-pds/src/delegation/mod.rs
··· 2 2 pub mod scopes; 3 3 4 4 pub use roles::{ 5 - CanAddControllers, CanControlAccounts, verify_can_add_controllers, 6 - verify_can_control_accounts, 5 + CanAddControllers, CanControlAccounts, verify_can_add_controllers, verify_can_control_accounts, 7 6 }; 8 7 pub use scopes::{ 9 8 InvalidDelegationScopeError, SCOPE_PRESETS, ScopePreset, ValidatedDelegationScope,
-1
crates/tranquil-pds/src/delegation/roles.rs
··· 81 81 _tag: PhantomData, 82 82 }) 83 83 } 84 -
+9 -7
crates/tranquil-pds/src/delegation/scopes.rs
··· 214 214 215 215 #[test] 216 216 fn test_intersect_granted_with_params_covers_requested_no_params() { 217 - let result = 218 - intersect_scopes("repo:app.bsky.feed.post", "repo:*?action=create&action=delete"); 217 + let result = intersect_scopes( 218 + "repo:app.bsky.feed.post", 219 + "repo:*?action=create&action=delete", 220 + ); 219 221 assert_eq!(result, "repo:app.bsky.feed.post"); 220 222 } 221 223 ··· 231 233 #[test] 232 234 fn test_scope_covers_base_only() { 233 235 assert!(scope_covers("repo:*", "repo:app.bsky.feed.post")); 234 - assert!(scope_covers("repo:*", "repo:app.bsky.feed.post?action=create")); 236 + assert!(scope_covers( 237 + "repo:*", 238 + "repo:app.bsky.feed.post?action=create" 239 + )); 235 240 assert!(!scope_covers("blob:*/*", "repo:app.bsky.feed.post")); 236 241 } 237 242 238 243 #[test] 239 244 fn test_scope_covers_params() { 240 - assert!(scope_covers( 241 - "repo:*?action=create", 242 - "repo:*?action=create" 243 - )); 245 + assert!(scope_covers("repo:*?action=create", "repo:*?action=create")); 244 246 assert!(!scope_covers( 245 247 "repo:*?action=create", 246 248 "repo:*?action=delete"
+1 -7
crates/tranquil-pds/src/lib.rs
··· 29 29 pub mod validation; 30 30 31 31 use api::proxy::XrpcProxyLayer; 32 - use axum::{ 33 - Json, Router, 34 - extract::DefaultBodyLimit, 35 - http::Method, 36 - middleware, 37 - routing::get, 38 - }; 32 + use axum::{Json, Router, extract::DefaultBodyLimit, http::Method, middleware, routing::get}; 39 33 use http::StatusCode; 40 34 use serde_json::json; 41 35 use state::AppState;
+32 -18
crates/tranquil-pds/src/oauth/client.rs
··· 93 93 state_key: &str, 94 94 ) -> Result<CrossPdsAuthState, CrossPdsError> { 95 95 let cache_key = format!("cross_pds_state:{}", state_key); 96 - let encrypted_bytes = self 97 - .cache 98 - .get_bytes(&cache_key) 99 - .await 100 - .ok_or_else(|| CrossPdsError::TokenExchangeFailed("auth state expired or not found".into()))?; 96 + let encrypted_bytes = self.cache.get_bytes(&cache_key).await.ok_or_else(|| { 97 + CrossPdsError::TokenExchangeFailed("auth state expired or not found".into()) 98 + })?; 101 99 let _ = self.cache.delete(&cache_key).await; 102 - let decrypted = crate::config::decrypt_key( 103 - &encrypted_bytes, 104 - Some(crate::config::ENCRYPTION_VERSION), 105 - ) 106 - .map_err(|e| CrossPdsError::TokenExchangeFailed(format!("decrypt auth state: {}", e)))?; 107 - serde_json::from_slice(&decrypted) 108 - .map_err(|e| CrossPdsError::TokenExchangeFailed(format!("deserialize auth state: {}", e))) 100 + let decrypted = 101 + crate::config::decrypt_key(&encrypted_bytes, Some(crate::config::ENCRYPTION_VERSION)) 102 + .map_err(|e| { 103 + CrossPdsError::TokenExchangeFailed(format!("decrypt auth state: {}", e)) 104 + })?; 105 + serde_json::from_slice(&decrypted).map_err(|e| { 106 + CrossPdsError::TokenExchangeFailed(format!("deserialize auth state: {}", e)) 107 + }) 109 108 } 110 109 111 110 pub async fn check_remote_is_delegated(&self, pds_url: &str, did: &str) -> Option<bool> { ··· 142 141 .map_err(|e| format!("{:?}", e)) 143 142 }; 144 143 145 - let resp = self.http.post(url).header("DPoP", &make_proof(None)?).form(params) 146 - .send().await.map_err(|e| e.to_string())?; 144 + let resp = self 145 + .http 146 + .post(url) 147 + .header("DPoP", &make_proof(None)?) 148 + .form(params) 149 + .send() 150 + .await 151 + .map_err(|e| e.to_string())?; 147 152 148 - let nonce = resp.headers().get("dpop-nonce") 149 - .and_then(|v| v.to_str().ok()).map(|s| s.to_string()); 153 + let nonce = resp 154 + .headers() 155 + .get("dpop-nonce") 156 + .and_then(|v| v.to_str().ok()) 157 + .map(|s| s.to_string()); 150 158 let needs_retry = matches!( 151 159 resp.status(), 152 160 reqwest::StatusCode::BAD_REQUEST | reqwest::StatusCode::UNAUTHORIZED 153 161 ); 154 162 155 163 if needs_retry && nonce.is_some() { 156 - return self.http.post(url).header("DPoP", &make_proof(nonce.as_deref())?) 157 - .form(params).send().await.map_err(|e| e.to_string()); 164 + return self 165 + .http 166 + .post(url) 167 + .header("DPoP", &make_proof(nonce.as_deref())?) 168 + .form(params) 169 + .send() 170 + .await 171 + .map_err(|e| e.to_string()); 158 172 } 159 173 Ok(resp) 160 174 }
+4 -294
crates/tranquil-pds/src/scheduled.rs
··· 10 10 use tokio_util::sync::CancellationToken; 11 11 use tracing::{debug, error, info, warn}; 12 12 use tranquil_db_traits::{ 13 - BackupRepository, BlobRepository, BrokenGenesisCommit, RepoRepository, SequenceNumber, 14 - SsoRepository, UserRepository, 13 + BlobRepository, BrokenGenesisCommit, RepoRepository, SequenceNumber, SsoRepository, 14 + UserRepository, 15 15 }; 16 16 use tranquil_types::{AtUri, CidLink, Did}; 17 17 18 18 use crate::repo::PostgresBlockStore; 19 - use crate::storage::{BackupStorage, BlobStorage, backup_interval_secs, backup_retention_count}; 19 + use crate::storage::BlobStorage; 20 20 use crate::sync::car::encode_car_header; 21 21 22 22 #[derive(Debug)] ··· 571 571 Ok(()) 572 572 } 573 573 574 - pub async fn start_backup_tasks( 575 - repo_repo: Arc<dyn RepoRepository>, 576 - backup_repo: Arc<dyn BackupRepository>, 577 - block_store: PostgresBlockStore, 578 - backup_storage: Arc<dyn BackupStorage>, 579 - shutdown: CancellationToken, 580 - ) { 581 - let backup_interval = Duration::from_secs(backup_interval_secs()); 582 - 583 - info!( 584 - interval_secs = backup_interval.as_secs(), 585 - retention_count = backup_retention_count(), 586 - "Starting backup service" 587 - ); 588 - 589 - let mut ticker = interval(backup_interval); 590 - ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); 591 - 592 - loop { 593 - tokio::select! { 594 - _ = shutdown.cancelled() => { 595 - info!("Backup service shutting down"); 596 - break; 597 - } 598 - _ = ticker.tick() => { 599 - if let Err(e) = process_scheduled_backups( 600 - repo_repo.as_ref(), 601 - backup_repo.as_ref(), 602 - &block_store, 603 - backup_storage.as_ref(), 604 - ).await { 605 - error!("Error processing scheduled backups: {}", e); 606 - } 607 - } 608 - } 609 - } 610 - } 611 - 612 - struct BackupResult { 613 - did: Did, 614 - repo_rev: String, 615 - size_bytes: i64, 616 - block_count: i32, 617 - user_id: uuid::Uuid, 618 - } 619 - 620 - enum BackupOutcome { 621 - Success(BackupResult), 622 - Skipped(Did, &'static str), 623 - Failed(Did, String), 624 - } 625 - 626 - #[allow(clippy::too_many_arguments)] 627 - async fn process_single_backup( 628 - repo_repo: &dyn RepoRepository, 629 - backup_repo: &dyn BackupRepository, 630 - block_store: &PostgresBlockStore, 631 - backup_storage: &dyn BackupStorage, 632 - user_id: uuid::Uuid, 633 - did: Did, 634 - repo_root_cid: String, 635 - repo_rev: Option<String>, 636 - ) -> BackupOutcome { 637 - let repo_rev = match repo_rev { 638 - Some(rev) => rev, 639 - None => return BackupOutcome::Skipped(did, "no repo_rev"), 640 - }; 641 - 642 - let head_cid = match Cid::from_str(&repo_root_cid) { 643 - Ok(c) => c, 644 - Err(_) => return BackupOutcome::Skipped(did, "invalid repo_root_cid"), 645 - }; 646 - 647 - let car_bytes = match generate_full_backup(repo_repo, block_store, user_id, &head_cid).await { 648 - Ok(bytes) => bytes, 649 - Err(e) => return BackupOutcome::Failed(did, format!("CAR generation: {}", e)), 650 - }; 651 - 652 - let block_count = count_car_blocks(&car_bytes); 653 - let size_bytes = i64::try_from(car_bytes.len()).unwrap_or(i64::MAX); 654 - 655 - let storage_key = match backup_storage 656 - .put_backup(did.as_str(), &repo_rev, &car_bytes) 657 - .await 658 - { 659 - Ok(key) => key, 660 - Err(e) => return BackupOutcome::Failed(did, format!("S3 upload: {}", e)), 661 - }; 662 - 663 - if let Err(e) = backup_repo 664 - .insert_backup( 665 - user_id, 666 - &storage_key, 667 - &repo_root_cid, 668 - &repo_rev, 669 - block_count, 670 - size_bytes, 671 - ) 672 - .await 673 - { 674 - if let Err(rollback_err) = backup_storage.delete_backup(&storage_key).await { 675 - error!( 676 - did = %did, 677 - storage_key = %storage_key, 678 - error = %rollback_err, 679 - "Failed to rollback orphaned backup from S3" 680 - ); 681 - } 682 - return BackupOutcome::Failed(did, format!("DB insert: {:?}", e)); 683 - } 684 - 685 - BackupOutcome::Success(BackupResult { 686 - did, 687 - repo_rev, 688 - size_bytes, 689 - block_count, 690 - user_id, 691 - }) 692 - } 693 - 694 - async fn process_scheduled_backups( 695 - repo_repo: &dyn RepoRepository, 696 - backup_repo: &dyn BackupRepository, 697 - block_store: &PostgresBlockStore, 698 - backup_storage: &dyn BackupStorage, 699 - ) -> anyhow::Result<()> { 700 - let interval_secs = i64::try_from(backup_interval_secs()).unwrap_or(i64::MAX); 701 - let retention = backup_retention_count(); 702 - 703 - let users_needing_backup = backup_repo 704 - .get_users_needing_backup(interval_secs, 50) 705 - .await 706 - .context("DB error fetching users for backup")?; 707 - 708 - if users_needing_backup.is_empty() { 709 - debug!("No accounts need backup"); 710 - return Ok(()); 711 - } 712 - 713 - info!( 714 - count = users_needing_backup.len(), 715 - "Processing scheduled backups" 716 - ); 717 - 718 - let results = futures::future::join_all(users_needing_backup.into_iter().map(|user| { 719 - process_single_backup( 720 - repo_repo, 721 - backup_repo, 722 - block_store, 723 - backup_storage, 724 - user.id, 725 - user.did, 726 - user.repo_root_cid.to_string(), 727 - user.repo_rev, 728 - ) 729 - })) 730 - .await; 731 - 732 - futures::future::join_all(results.into_iter().map(|outcome| async move { 733 - match outcome { 734 - BackupOutcome::Success(result) => { 735 - info!( 736 - did = %result.did, 737 - rev = %result.repo_rev, 738 - size_bytes = result.size_bytes, 739 - block_count = result.block_count, 740 - "Created backup" 741 - ); 742 - if let Err(e) = 743 - cleanup_old_backups(backup_repo, backup_storage, result.user_id, retention) 744 - .await 745 - { 746 - warn!(did = %result.did, error = %e, "Failed to cleanup old backups"); 747 - } 748 - } 749 - BackupOutcome::Skipped(did, reason) => { 750 - warn!(did = %did, reason = reason, "Skipped backup"); 751 - } 752 - BackupOutcome::Failed(did, error) => { 753 - warn!(did = %did, error = %error, "Failed backup"); 754 - } 755 - } 756 - })) 757 - .await; 758 - 759 - Ok(()) 760 - } 761 - 762 574 pub async fn generate_repo_car( 763 575 block_store: &PostgresBlockStore, 764 576 head_cid: &Cid, ··· 771 583 .filter_map(|b| match Cid::try_from(b.as_slice()) { 772 584 Ok(cid) => Some(cid), 773 585 Err(e) => { 774 - tracing::warn!(error = %e, "skipping unparseable CID in backup generation"); 586 + tracing::warn!(error = %e, "skipping unparseable CID in CAR generation"); 775 587 None 776 588 } 777 589 }) ··· 831 643 832 644 generate_repo_car(block_store, &actual_head_cid).await 833 645 } 834 - 835 - pub async fn generate_full_backup( 836 - repo_repo: &dyn tranquil_db_traits::RepoRepository, 837 - block_store: &PostgresBlockStore, 838 - user_id: uuid::Uuid, 839 - head_cid: &Cid, 840 - ) -> anyhow::Result<Vec<u8>> { 841 - generate_repo_car_from_user_blocks(repo_repo, block_store, user_id, head_cid).await 842 - } 843 - 844 - pub fn count_car_blocks(car_bytes: &[u8]) -> i32 { 845 - let mut count: i32 = 0; 846 - let mut pos: usize = 0; 847 - 848 - if let Some((header_len, header_varint_len)) = read_varint(&car_bytes[pos..]) { 849 - let Some(header_size) = usize::try_from(header_len).ok() else { 850 - return 0; 851 - }; 852 - let Some(next_pos) = header_varint_len 853 - .checked_add(header_size) 854 - .and_then(|skip| pos.checked_add(skip)) 855 - else { 856 - return 0; 857 - }; 858 - pos = next_pos; 859 - } else { 860 - return 0; 861 - } 862 - 863 - while pos < car_bytes.len() { 864 - if let Some((block_len, varint_len)) = read_varint(&car_bytes[pos..]) { 865 - let Some(block_size) = usize::try_from(block_len).ok() else { 866 - break; 867 - }; 868 - let Some(next_pos) = varint_len 869 - .checked_add(block_size) 870 - .and_then(|skip| pos.checked_add(skip)) 871 - else { 872 - break; 873 - }; 874 - pos = next_pos; 875 - count = count.saturating_add(1); 876 - } else { 877 - break; 878 - } 879 - } 880 - 881 - count 882 - } 883 - 884 - fn read_varint(data: &[u8]) -> Option<(u64, usize)> { 885 - let mut value: u64 = 0; 886 - let mut shift = 0; 887 - let mut pos = 0; 888 - 889 - while pos < data.len() && pos < 10 { 890 - let byte = data[pos]; 891 - value |= ((byte & 0x7f) as u64) << shift; 892 - pos += 1; 893 - if byte & 0x80 == 0 { 894 - return Some((value, pos)); 895 - } 896 - shift += 7; 897 - } 898 - 899 - None 900 - } 901 - 902 - async fn cleanup_old_backups( 903 - backup_repo: &dyn BackupRepository, 904 - backup_storage: &dyn BackupStorage, 905 - user_id: uuid::Uuid, 906 - retention_count: u32, 907 - ) -> anyhow::Result<()> { 908 - let old_backups = backup_repo 909 - .get_old_backups(user_id, i64::from(retention_count)) 910 - .await 911 - .context("DB error fetching old backups")?; 912 - 913 - let results = futures::future::join_all(old_backups.into_iter().map(|backup| async move { 914 - match backup_storage.delete_backup(&backup.storage_key).await { 915 - Ok(()) => backup_repo 916 - .delete_backup(backup.id) 917 - .await 918 - .with_context(|| format!("DB delete failed for {}", backup.storage_key)), 919 - Err(e) => { 920 - warn!( 921 - storage_key = %backup.storage_key, 922 - error = %e, 923 - "Failed to delete old backup from storage, skipping DB cleanup to avoid orphan" 924 - ); 925 - Ok(()) 926 - } 927 - } 928 - })) 929 - .await; 930 - 931 - results 932 - .into_iter() 933 - .find_map(|r| r.err()) 934 - .map_or(Ok(()), Err) 935 - }
+5 -10
crates/tranquil-pds/src/state.rs
··· 9 9 use crate::repo::PostgresBlockStore; 10 10 use crate::repo_write_lock::RepoWriteLocks; 11 11 use crate::sso::{SsoConfig, SsoManager}; 12 - use crate::storage::{BackupStorage, BlobStorage, create_backup_storage, create_blob_storage}; 13 - use tranquil_db_traits::SequencedEvent; 12 + use crate::storage::{BlobStorage, create_blob_storage}; 14 13 use sqlx::PgPool; 15 14 use std::error::Error; 16 15 use std::sync::Arc; ··· 18 17 use tokio::sync::broadcast; 19 18 use tokio_util::sync::CancellationToken; 20 19 use tranquil_db::{ 21 - BacklinkRepository, BackupRepository, BlobRepository, DelegationRepository, InfraRepository, 22 - OAuthRepository, PostgresRepositories, RepoEventNotifier, RepoRepository, SessionRepository, 23 - SsoRepository, UserRepository, 20 + BacklinkRepository, BlobRepository, DelegationRepository, InfraRepository, OAuthRepository, 21 + PostgresRepositories, RepoEventNotifier, RepoRepository, SessionRepository, SsoRepository, 22 + UserRepository, 24 23 }; 24 + use tranquil_db_traits::SequencedEvent; 25 25 26 26 static RATE_LIMITING_DISABLED: AtomicBool = AtomicBool::new(false); 27 27 ··· 43 43 pub repo_repo: Arc<dyn RepoRepository>, 44 44 pub blob_repo: Arc<dyn BlobRepository>, 45 45 pub infra_repo: Arc<dyn InfraRepository>, 46 - pub backup_repo: Arc<dyn BackupRepository>, 47 46 pub backlink_repo: Arc<dyn BacklinkRepository>, 48 47 pub event_notifier: Arc<dyn RepoEventNotifier>, 49 48 pub block_store: PostgresBlockStore, 50 49 pub blob_store: Arc<dyn BlobStorage>, 51 - pub backup_storage: Option<Arc<dyn BackupStorage>>, 52 50 pub firehose_tx: broadcast::Sender<SequencedEvent>, 53 51 pub rate_limiters: Arc<RateLimiters>, 54 52 pub repo_write_locks: Arc<RepoWriteLocks>, ··· 269 267 let repos = Arc::new(PostgresRepositories::new(db.clone())); 270 268 let block_store = PostgresBlockStore::new(db); 271 269 let blob_store = create_blob_storage().await; 272 - let backup_storage = create_backup_storage().await; 273 270 274 271 let firehose_buffer_size = tranquil_config::get().firehose.buffer_size; 275 272 ··· 295 292 repo_repo: repos.repo.clone(), 296 293 blob_repo: repos.blob.clone(), 297 294 infra_repo: repos.infra.clone(), 298 - backup_repo: repos.backup.clone(), 299 295 backlink_repo: repos.backlink.clone(), 300 296 event_notifier: repos.event_notifier.clone(), 301 297 sso_repo: repos.sso.clone(), 302 298 repos, 303 299 block_store, 304 300 blob_store, 305 - backup_storage, 306 301 firehose_tx, 307 302 rate_limiters, 308 303 repo_write_locks,
+2 -4
crates/tranquil-pds/src/storage/mod.rs
··· 1 1 pub use tranquil_storage::{ 2 - BackupStorage, BlobStorage, FilesystemBackupStorage, FilesystemBlobStorage, StorageError, 3 - StreamUploadResult, backup_interval_secs, backup_retention_count, create_backup_storage, 4 - create_blob_storage, 2 + BlobStorage, FilesystemBlobStorage, StorageError, StreamUploadResult, create_blob_storage, 5 3 }; 6 4 7 5 #[cfg(feature = "s3-storage")] 8 - pub use tranquil_storage::{S3BackupStorage, S3BlobStorage}; 6 + pub use tranquil_storage::S3BlobStorage;
-3
crates/tranquil-pds/src/sync/mod.rs
··· 5 5 pub mod util; 6 6 pub mod verify; 7 7 8 - #[cfg(test)] 9 - mod verify_tests; 10 - 11 8 pub use firehose::SequencedEvent; 12 9 pub use util::{ 13 10 RepoAccessLevel, RepoAccount, RepoAvailabilityError, assert_repo_availability,
+4 -1
crates/tranquil-pds/src/sync/verify.rs
··· 130 130 .ok_or(VerifyError::NoSigningKey) 131 131 } 132 132 133 - pub(crate) async fn resolve_did_document(&self, did: &Did) -> Result<DidDocument<'static>, VerifyError> { 133 + pub(crate) async fn resolve_did_document( 134 + &self, 135 + did: &Did, 136 + ) -> Result<DidDocument<'static>, VerifyError> { 134 137 let did_str = did.as_str(); 135 138 if did_str.starts_with("did:plc:") { 136 139 self.resolve_plc_did(did_str).await
+21 -21
crates/tranquil-pds/src/util.rs
··· 210 210 } 211 211 } 212 212 213 + pub(crate) fn gen_invite_random_token() -> String { 214 + let mut rng = rand::thread_rng(); 215 + let chars: Vec<char> = BASE32_ALPHABET.chars().collect(); 216 + let gen_segment = |rng: &mut rand::rngs::ThreadRng, len: usize| -> String { 217 + (0..len) 218 + .map(|_| chars[rng.gen_range(0..chars.len())]) 219 + .collect() 220 + }; 221 + format!("{}-{}", gen_segment(&mut rng, 5), gen_segment(&mut rng, 5)) 222 + } 223 + 224 + pub fn gen_invite_code() -> String { 225 + let hostname = &tranquil_config::get().server.hostname; 226 + let hostname_prefix = hostname.replace('.', "-"); 227 + format!("{}-{}", hostname_prefix, gen_invite_random_token()) 228 + } 229 + 230 + pub fn is_self_hosted_did_web_enabled() -> bool { 231 + tranquil_config::get().server.enable_pds_hosted_did_web 232 + } 233 + 213 234 #[cfg(test)] 214 235 mod tests { 215 236 use super::*; ··· 548 569 ); 549 570 } 550 571 } 551 - 552 - pub(crate) fn gen_invite_random_token() -> String { 553 - let mut rng = rand::thread_rng(); 554 - let chars: Vec<char> = BASE32_ALPHABET.chars().collect(); 555 - let gen_segment = |rng: &mut rand::rngs::ThreadRng, len: usize| -> String { 556 - (0..len) 557 - .map(|_| chars[rng.gen_range(0..chars.len())]) 558 - .collect() 559 - }; 560 - format!("{}-{}", gen_segment(&mut rng, 5), gen_segment(&mut rng, 5)) 561 - } 562 - 563 - pub fn gen_invite_code() -> String { 564 - let hostname = &tranquil_config::get().server.hostname; 565 - let hostname_prefix = hostname.replace('.', "-"); 566 - format!("{}-{}", hostname_prefix, gen_invite_random_token()) 567 - } 568 - 569 - pub fn is_self_hosted_did_web_enabled() -> bool { 570 - tranquil_config::get().server.enable_pds_hosted_did_web 571 - }
-325
crates/tranquil-pds/tests/backup.rs
··· 1 - mod common; 2 - mod helpers; 3 - 4 - use common::*; 5 - use reqwest::{StatusCode, header}; 6 - use serde_json::{Value, json}; 7 - 8 - #[tokio::test] 9 - async fn test_list_backups_empty() { 10 - let client = client(); 11 - let (token, _did) = create_account_and_login(&client).await; 12 - 13 - let res = client 14 - .get(format!("{}/xrpc/_backup.listBackups", base_url().await)) 15 - .bearer_auth(&token) 16 - .send() 17 - .await 18 - .expect("listBackups request failed"); 19 - 20 - assert_eq!(res.status(), StatusCode::OK); 21 - let body: Value = res.json().await.expect("Invalid JSON"); 22 - assert!(body["backups"].is_array()); 23 - assert_eq!(body["backups"].as_array().unwrap().len(), 0); 24 - assert!(body["backupEnabled"].as_bool().unwrap_or(false)); 25 - } 26 - 27 - #[tokio::test] 28 - async fn test_create_and_list_backup() { 29 - let client = client(); 30 - let (token, _did) = create_account_and_login(&client).await; 31 - 32 - let create_res = client 33 - .post(format!("{}/xrpc/_backup.createBackup", base_url().await)) 34 - .bearer_auth(&token) 35 - .send() 36 - .await 37 - .expect("createBackup request failed"); 38 - 39 - assert_eq!(create_res.status(), StatusCode::OK, "createBackup failed"); 40 - let create_body: Value = create_res.json().await.expect("Invalid JSON"); 41 - assert!(create_body["id"].is_string()); 42 - assert!(create_body["repoRev"].is_string()); 43 - assert!(create_body["sizeBytes"].is_i64()); 44 - assert!(create_body["blockCount"].is_i64()); 45 - 46 - let list_res = client 47 - .get(format!("{}/xrpc/_backup.listBackups", base_url().await)) 48 - .bearer_auth(&token) 49 - .send() 50 - .await 51 - .expect("listBackups request failed"); 52 - 53 - assert_eq!(list_res.status(), StatusCode::OK); 54 - let list_body: Value = list_res.json().await.expect("Invalid JSON"); 55 - let backups = list_body["backups"].as_array().unwrap(); 56 - assert!(!backups.is_empty()); 57 - } 58 - 59 - #[tokio::test] 60 - async fn test_download_backup() { 61 - let client = client(); 62 - let (token, _did) = create_account_and_login(&client).await; 63 - 64 - let create_res = client 65 - .post(format!("{}/xrpc/_backup.createBackup", base_url().await)) 66 - .bearer_auth(&token) 67 - .send() 68 - .await 69 - .expect("createBackup request failed"); 70 - 71 - assert_eq!(create_res.status(), StatusCode::OK); 72 - let create_body: Value = create_res.json().await.expect("Invalid JSON"); 73 - let backup_id = create_body["id"].as_str().unwrap(); 74 - 75 - let get_res = client 76 - .get(format!( 77 - "{}/xrpc/_backup.getBackup?id={}", 78 - base_url().await, 79 - backup_id 80 - )) 81 - .bearer_auth(&token) 82 - .send() 83 - .await 84 - .expect("getBackup request failed"); 85 - 86 - assert_eq!(get_res.status(), StatusCode::OK); 87 - let content_type = get_res.headers().get(header::CONTENT_TYPE).unwrap(); 88 - assert_eq!(content_type, "application/vnd.ipld.car"); 89 - 90 - let bytes = get_res.bytes().await.expect("Failed to read body"); 91 - assert!(bytes.len() > 100, "CAR file should have content"); 92 - assert_eq!( 93 - bytes[1], 0xa2, 94 - "CAR file should have valid header structure" 95 - ); 96 - } 97 - 98 - #[tokio::test] 99 - async fn test_delete_backup() { 100 - let client = client(); 101 - let (token, _did) = create_account_and_login(&client).await; 102 - 103 - let create_res = client 104 - .post(format!("{}/xrpc/_backup.createBackup", base_url().await)) 105 - .bearer_auth(&token) 106 - .send() 107 - .await 108 - .expect("createBackup request failed"); 109 - 110 - assert_eq!(create_res.status(), StatusCode::OK); 111 - let create_body: Value = create_res.json().await.expect("Invalid JSON"); 112 - let backup_id = create_body["id"].as_str().unwrap(); 113 - 114 - let delete_res = client 115 - .post(format!( 116 - "{}/xrpc/_backup.deleteBackup?id={}", 117 - base_url().await, 118 - backup_id 119 - )) 120 - .bearer_auth(&token) 121 - .send() 122 - .await 123 - .expect("deleteBackup request failed"); 124 - 125 - assert_eq!(delete_res.status(), StatusCode::OK); 126 - 127 - let get_res = client 128 - .get(format!( 129 - "{}/xrpc/_backup.getBackup?id={}", 130 - base_url().await, 131 - backup_id 132 - )) 133 - .bearer_auth(&token) 134 - .send() 135 - .await 136 - .expect("getBackup request failed"); 137 - 138 - assert_eq!(get_res.status(), StatusCode::NOT_FOUND); 139 - } 140 - 141 - #[tokio::test] 142 - async fn test_toggle_backup_enabled() { 143 - let client = client(); 144 - let (token, _did) = create_account_and_login(&client).await; 145 - 146 - let list_res = client 147 - .get(format!("{}/xrpc/_backup.listBackups", base_url().await)) 148 - .bearer_auth(&token) 149 - .send() 150 - .await 151 - .expect("listBackups request failed"); 152 - 153 - assert_eq!(list_res.status(), StatusCode::OK); 154 - let list_body: Value = list_res.json().await.expect("Invalid JSON"); 155 - assert!(list_body["backupEnabled"].as_bool().unwrap()); 156 - 157 - let disable_res = client 158 - .post(format!("{}/xrpc/_backup.setEnabled", base_url().await)) 159 - .bearer_auth(&token) 160 - .json(&json!({"enabled": false})) 161 - .send() 162 - .await 163 - .expect("setEnabled request failed"); 164 - 165 - assert_eq!(disable_res.status(), StatusCode::OK); 166 - let disable_body: Value = disable_res.json().await.expect("Invalid JSON"); 167 - assert!(!disable_body["enabled"].as_bool().unwrap()); 168 - 169 - let list_res2 = client 170 - .get(format!("{}/xrpc/_backup.listBackups", base_url().await)) 171 - .bearer_auth(&token) 172 - .send() 173 - .await 174 - .expect("listBackups request failed"); 175 - 176 - let list_body2: Value = list_res2.json().await.expect("Invalid JSON"); 177 - assert!(!list_body2["backupEnabled"].as_bool().unwrap()); 178 - 179 - let enable_res = client 180 - .post(format!("{}/xrpc/_backup.setEnabled", base_url().await)) 181 - .bearer_auth(&token) 182 - .json(&json!({"enabled": true})) 183 - .send() 184 - .await 185 - .expect("setEnabled request failed"); 186 - 187 - assert_eq!(enable_res.status(), StatusCode::OK); 188 - } 189 - 190 - #[tokio::test] 191 - async fn test_backup_includes_blobs() { 192 - let client = client(); 193 - let (token, did) = create_account_and_login(&client).await; 194 - 195 - let blob_data = b"Hello, this is test blob data for backup testing!"; 196 - let upload_res = client 197 - .post(format!( 198 - "{}/xrpc/com.atproto.repo.uploadBlob", 199 - base_url().await 200 - )) 201 - .header(header::CONTENT_TYPE, "text/plain") 202 - .bearer_auth(&token) 203 - .body(blob_data.to_vec()) 204 - .send() 205 - .await 206 - .expect("uploadBlob request failed"); 207 - 208 - assert_eq!(upload_res.status(), StatusCode::OK); 209 - let upload_body: Value = upload_res.json().await.expect("Invalid JSON"); 210 - let blob = &upload_body["blob"]; 211 - 212 - let record = json!({ 213 - "$type": "app.bsky.feed.post", 214 - "text": "Test post with blob", 215 - "createdAt": chrono::Utc::now().to_rfc3339(), 216 - "embed": { 217 - "$type": "app.bsky.embed.images", 218 - "images": [{ 219 - "alt": "test image", 220 - "image": blob 221 - }] 222 - } 223 - }); 224 - 225 - let create_record_res = client 226 - .post(format!( 227 - "{}/xrpc/com.atproto.repo.createRecord", 228 - base_url().await 229 - )) 230 - .bearer_auth(&token) 231 - .json(&json!({ 232 - "repo": did, 233 - "collection": "app.bsky.feed.post", 234 - "record": record 235 - })) 236 - .send() 237 - .await 238 - .expect("createRecord request failed"); 239 - 240 - assert_eq!(create_record_res.status(), StatusCode::OK); 241 - 242 - let create_backup_res = client 243 - .post(format!("{}/xrpc/_backup.createBackup", base_url().await)) 244 - .bearer_auth(&token) 245 - .send() 246 - .await 247 - .expect("createBackup request failed"); 248 - 249 - assert_eq!(create_backup_res.status(), StatusCode::OK); 250 - let backup_body: Value = create_backup_res.json().await.expect("Invalid JSON"); 251 - let backup_id = backup_body["id"].as_str().unwrap(); 252 - 253 - let get_backup_res = client 254 - .get(format!( 255 - "{}/xrpc/_backup.getBackup?id={}", 256 - base_url().await, 257 - backup_id 258 - )) 259 - .bearer_auth(&token) 260 - .send() 261 - .await 262 - .expect("getBackup request failed"); 263 - 264 - assert_eq!(get_backup_res.status(), StatusCode::OK); 265 - let car_bytes = get_backup_res.bytes().await.expect("Failed to read body"); 266 - 267 - let blob_cid = blob["ref"]["$link"].as_str().unwrap(); 268 - let blob_found = String::from_utf8_lossy(&car_bytes).contains("Hello, this is test blob data"); 269 - assert!( 270 - blob_found || car_bytes.len() > 500, 271 - "Backup should contain blob data (cid: {})", 272 - blob_cid 273 - ); 274 - } 275 - 276 - #[tokio::test] 277 - async fn test_backup_unauthorized() { 278 - let client = client(); 279 - 280 - let res = client 281 - .get(format!("{}/xrpc/_backup.listBackups", base_url().await)) 282 - .send() 283 - .await 284 - .expect("listBackups request failed"); 285 - 286 - assert_eq!(res.status(), StatusCode::UNAUTHORIZED); 287 - } 288 - 289 - #[tokio::test] 290 - async fn test_get_nonexistent_backup() { 291 - let client = client(); 292 - let (token, _did) = create_account_and_login(&client).await; 293 - 294 - let fake_id = uuid::Uuid::new_v4(); 295 - let res = client 296 - .get(format!( 297 - "{}/xrpc/_backup.getBackup?id={}", 298 - base_url().await, 299 - fake_id 300 - )) 301 - .bearer_auth(&token) 302 - .send() 303 - .await 304 - .expect("getBackup request failed"); 305 - 306 - assert_eq!(res.status(), StatusCode::NOT_FOUND); 307 - } 308 - 309 - #[tokio::test] 310 - async fn test_backup_invalid_id() { 311 - let client = client(); 312 - let (token, _did) = create_account_and_login(&client).await; 313 - 314 - let res = client 315 - .get(format!( 316 - "{}/xrpc/_backup.getBackup?id=not-a-uuid", 317 - base_url().await 318 - )) 319 - .bearer_auth(&token) 320 - .send() 321 - .await 322 - .expect("getBackup request failed"); 323 - 324 - assert_eq!(res.status(), StatusCode::BAD_REQUEST); 325 - }
-103
crates/tranquil-pds/tests/whole_story.rs
··· 134 134 .expect("Edit post failed"); 135 135 assert_eq!(edit_res.status(), StatusCode::OK); 136 136 137 - let backup_res = client 138 - .post(format!("{}/xrpc/_backup.createBackup", base)) 139 - .bearer_auth(&jwt) 140 - .send() 141 - .await 142 - .expect("Backup creation failed"); 143 - assert_eq!(backup_res.status(), StatusCode::OK); 144 - let backup_body: Value = backup_res.json().await.unwrap(); 145 - let backup_id = backup_body["id"].as_str().unwrap(); 146 - 147 - let download_res = client 148 - .get(format!("{}/xrpc/_backup.getBackup?id={}", base, backup_id)) 149 - .bearer_auth(&jwt) 150 - .send() 151 - .await 152 - .expect("Backup download failed"); 153 - assert_eq!(download_res.status(), StatusCode::OK); 154 - let backup_bytes = download_res.bytes().await.unwrap(); 155 - assert!(backup_bytes.len() > 100, "Backup should have content"); 156 - 157 137 let delete_res = client 158 138 .post(format!("{}/xrpc/com.atproto.server.deleteSession", base)) 159 139 .bearer_auth(&jwt) ··· 1188 1168 .await 1189 1169 .unwrap(); 1190 1170 assert_eq!(profile_res.status(), StatusCode::OK); 1191 - 1192 - let backup1_res = client 1193 - .post(format!("{}/xrpc/_backup.createBackup", base)) 1194 - .bearer_auth(&jwt) 1195 - .send() 1196 - .await 1197 - .expect("Backup 1 failed"); 1198 - assert_eq!(backup1_res.status(), StatusCode::OK); 1199 - let backup1: Value = backup1_res.json().await.unwrap(); 1200 - let backup1_id = backup1["id"].as_str().unwrap(); 1201 - let backup1_rev = backup1["repoRev"].as_str().unwrap(); 1202 - 1203 - create_post(&client, &did, &jwt, "Post 4 after first backup").await; 1204 - create_post(&client, &did, &jwt, "Post 5 after first backup").await; 1205 - 1206 - let backup2_res = client 1207 - .post(format!("{}/xrpc/_backup.createBackup", base)) 1208 - .bearer_auth(&jwt) 1209 - .send() 1210 - .await 1211 - .expect("Backup 2 failed"); 1212 - assert_eq!(backup2_res.status(), StatusCode::OK); 1213 - let backup2: Value = backup2_res.json().await.unwrap(); 1214 - let backup2_id = backup2["id"].as_str().unwrap(); 1215 - let backup2_rev = backup2["repoRev"].as_str().unwrap(); 1216 - 1217 - assert_ne!( 1218 - backup1_rev, backup2_rev, 1219 - "Backups should have different revs" 1220 - ); 1221 - 1222 - let list_res = client 1223 - .get(format!("{}/xrpc/_backup.listBackups", base)) 1224 - .bearer_auth(&jwt) 1225 - .send() 1226 - .await 1227 - .expect("List backups failed"); 1228 - let list_body: Value = list_res.json().await.unwrap(); 1229 - let backups = list_body["backups"].as_array().unwrap(); 1230 - assert_eq!(backups.len(), 2, "Should have 2 backups"); 1231 - 1232 - let download1 = client 1233 - .get(format!("{}/xrpc/_backup.getBackup?id={}", base, backup1_id)) 1234 - .bearer_auth(&jwt) 1235 - .send() 1236 - .await 1237 - .expect("Download backup 1 failed"); 1238 - assert_eq!(download1.status(), StatusCode::OK); 1239 - let backup1_bytes = download1.bytes().await.unwrap(); 1240 - 1241 - let download2 = client 1242 - .get(format!("{}/xrpc/_backup.getBackup?id={}", base, backup2_id)) 1243 - .bearer_auth(&jwt) 1244 - .send() 1245 - .await 1246 - .expect("Download backup 2 failed"); 1247 - assert_eq!(download2.status(), StatusCode::OK); 1248 - let backup2_bytes = download2.bytes().await.unwrap(); 1249 - 1250 - assert!( 1251 - backup2_bytes.len() > backup1_bytes.len(), 1252 - "Second backup should be larger (more posts)" 1253 - ); 1254 - 1255 - let delete_old = client 1256 - .post(format!( 1257 - "{}/xrpc/_backup.deleteBackup?id={}", 1258 - base, backup1_id 1259 - )) 1260 - .bearer_auth(&jwt) 1261 - .send() 1262 - .await 1263 - .expect("Delete backup failed"); 1264 - assert_eq!(delete_old.status(), StatusCode::OK); 1265 - 1266 - let final_list = client 1267 - .get(format!("{}/xrpc/_backup.listBackups", base)) 1268 - .bearer_auth(&jwt) 1269 - .send() 1270 - .await 1271 - .unwrap(); 1272 - let final_body: Value = final_list.json().await.unwrap(); 1273 - assert_eq!(final_body["backups"].as_array().unwrap().len(), 1); 1274 1171 } 1275 1172 1276 1173 #[tokio::test]
+1 -19
crates/tranquil-server/src/main.rs
··· 11 11 use tranquil_pds::crawlers::{Crawlers, start_crawlers_service}; 12 12 use tranquil_pds::scheduled::{ 13 13 backfill_genesis_commit_blocks, backfill_record_blobs, backfill_repo_rev, backfill_user_blocks, 14 - start_backup_tasks, start_scheduled_tasks, 14 + start_scheduled_tasks, 15 15 }; 16 16 use tranquil_pds::state::AppState; 17 17 ··· 233 233 None 234 234 }; 235 235 236 - let backup_handle = if let Some(backup_storage) = state.backup_storage.clone() { 237 - info!("Backup service enabled"); 238 - Some(tokio::spawn(start_backup_tasks( 239 - state.repo_repo.clone(), 240 - state.backup_repo.clone(), 241 - state.block_store.clone(), 242 - backup_storage, 243 - shutdown.clone(), 244 - ))) 245 - } else { 246 - warn!("Backup service disabled (BACKUP_S3_BUCKET not set or BACKUP_ENABLED=false)"); 247 - None 248 - }; 249 - 250 236 let scheduled_handle = tokio::spawn(start_scheduled_tasks( 251 237 state.user_repo.clone(), 252 238 state.blob_repo.clone(), ··· 310 296 handle.await.ok(); 311 297 } 312 298 313 - if let Some(handle) = backup_handle { 314 - handle.await.ok(); 315 - } 316 - 317 299 scheduled_handle.await.ok(); 318 300 319 301 if let Err(e) = server_result {
+2 -186
crates/tranquil-storage/src/lib.rs
··· 1 - pub use tranquil_infra::{ 2 - BackupStorage, BlobStorage, StorageError, StreamUploadResult, backup_interval_secs, 3 - backup_retention_count, 4 - }; 1 + pub use tranquil_infra::{BlobStorage, StorageError, StreamUploadResult}; 5 2 6 3 use async_trait::async_trait; 7 4 use bytes::Bytes; ··· 161 158 ) 162 159 } 163 160 164 - pub struct S3BackupStorage { 165 - client: Client, 166 - bucket: String, 167 - } 168 - 169 - impl S3BackupStorage { 170 - pub async fn new() -> Option<Self> { 171 - let bucket = tranquil_config::get().backup.s3_bucket.clone()?; 172 - let client = create_s3_client().await; 173 - Some(Self { client, bucket }) 174 - } 175 - } 176 - 177 - #[async_trait] 178 - impl BackupStorage for S3BackupStorage { 179 - async fn put_backup( 180 - &self, 181 - did: &str, 182 - rev: &str, 183 - data: &[u8], 184 - ) -> Result<String, StorageError> { 185 - let key = format!("{}/{}.car", did, rev); 186 - self.client 187 - .put_object() 188 - .bucket(&self.bucket) 189 - .key(&key) 190 - .body(ByteStream::from(Bytes::copy_from_slice(data))) 191 - .send() 192 - .await 193 - .map_err(|e| StorageError::Backend(e.to_string()))?; 194 - 195 - Ok(key) 196 - } 197 - 198 - async fn get_backup(&self, storage_key: &str) -> Result<Bytes, StorageError> { 199 - let resp = self 200 - .client 201 - .get_object() 202 - .bucket(&self.bucket) 203 - .key(storage_key) 204 - .send() 205 - .await 206 - .map_err(|e| StorageError::Backend(e.to_string()))?; 207 - 208 - resp.body 209 - .collect() 210 - .await 211 - .map(|agg| agg.into_bytes()) 212 - .map_err(|e| StorageError::Backend(e.to_string())) 213 - } 214 - 215 - async fn delete_backup(&self, storage_key: &str) -> Result<(), StorageError> { 216 - self.client 217 - .delete_object() 218 - .bucket(&self.bucket) 219 - .key(storage_key) 220 - .send() 221 - .await 222 - .map_err(|e| StorageError::Backend(e.to_string()))?; 223 - 224 - Ok(()) 225 - } 226 - } 227 - 228 161 #[async_trait] 229 162 impl BlobStorage for S3BlobStorage { 230 163 async fn put(&self, key: &str, data: &[u8]) -> Result<(), StorageError> { ··· 488 421 } 489 422 490 423 #[cfg(feature = "s3")] 491 - pub use s3::{S3BackupStorage, S3BlobStorage}; 424 + pub use s3::S3BlobStorage; 492 425 493 426 pub struct FilesystemBlobStorage { 494 427 base_path: PathBuf, ··· 634 567 } 635 568 } 636 569 637 - pub struct FilesystemBackupStorage { 638 - base_path: PathBuf, 639 - tmp_path: PathBuf, 640 - } 641 - 642 - impl FilesystemBackupStorage { 643 - pub async fn new(base_path: impl Into<PathBuf>) -> Result<Self, StorageError> { 644 - let base_path = base_path.into(); 645 - let tmp_path = base_path.join(".tmp"); 646 - tokio::fs::create_dir_all(&base_path).await?; 647 - tokio::fs::create_dir_all(&tmp_path).await?; 648 - cleanup_orphaned_tmp_files(&tmp_path).await; 649 - Ok(Self { 650 - base_path, 651 - tmp_path, 652 - }) 653 - } 654 - 655 - fn resolve_path(&self, key: &str) -> Result<PathBuf, StorageError> { 656 - validate_key(key)?; 657 - Ok(self.base_path.join(key)) 658 - } 659 - } 660 - 661 - #[async_trait] 662 - impl BackupStorage for FilesystemBackupStorage { 663 - async fn put_backup(&self, did: &str, rev: &str, data: &[u8]) -> Result<String, StorageError> { 664 - use tokio::io::AsyncWriteExt; 665 - 666 - let key = format!("{}/{}.car", did, rev); 667 - let final_path = self.resolve_path(&key)?; 668 - ensure_parent_dir(&final_path).await?; 669 - 670 - let tmp_file_name = uuid::Uuid::new_v4().to_string(); 671 - let tmp_path = self.tmp_path.join(&tmp_file_name); 672 - 673 - let mut file = tokio::fs::File::create(&tmp_path).await?; 674 - file.write_all(data).await?; 675 - file.sync_all().await?; 676 - drop(file); 677 - 678 - rename_with_fallback(&tmp_path, &final_path).await?; 679 - Ok(key) 680 - } 681 - 682 - async fn get_backup(&self, storage_key: &str) -> Result<Bytes, StorageError> { 683 - let path = self.resolve_path(storage_key)?; 684 - tokio::fs::read(&path) 685 - .await 686 - .map(Bytes::from) 687 - .map_err(map_io_not_found(storage_key)) 688 - } 689 - 690 - async fn delete_backup(&self, storage_key: &str) -> Result<(), StorageError> { 691 - let path = self.resolve_path(storage_key)?; 692 - tokio::fs::remove_file(&path).await.or_else(|e| { 693 - (e.kind() == std::io::ErrorKind::NotFound) 694 - .then_some(()) 695 - .ok_or(StorageError::Io(e)) 696 - }) 697 - } 698 - } 699 - 700 570 pub async fn create_blob_storage() -> Arc<dyn BlobStorage> { 701 571 let cfg = tranquil_config::get(); 702 572 let backend = &cfg.storage.backend; ··· 731 601 } 732 602 } 733 603 734 - pub async fn create_backup_storage() -> Option<Arc<dyn BackupStorage>> { 735 - let cfg = tranquil_config::get(); 736 - 737 - if !cfg.backup.enabled { 738 - tracing::info!("Backup storage disabled via config"); 739 - return None; 740 - } 741 - 742 - let backend = &cfg.backup.backend; 743 - 744 - match backend.as_str() { 745 - #[cfg(feature = "s3")] 746 - "s3" => S3BackupStorage::new().await.map_or_else( 747 - || { 748 - tracing::error!( 749 - "BACKUP_STORAGE_BACKEND=s3 but BACKUP_S3_BUCKET is not set. \ 750 - Backups will be disabled." 751 - ); 752 - None 753 - }, 754 - |storage| { 755 - tracing::info!("Initialized S3 backup storage"); 756 - Some(Arc::new(storage) as Arc<dyn BackupStorage>) 757 - }, 758 - ), 759 - #[cfg(not(feature = "s3"))] 760 - "s3" => { 761 - tracing::error!( 762 - "BACKUP_STORAGE_BACKEND=s3 but binary was compiled without s3 feature. \ 763 - Backups will be disabled." 764 - ); 765 - None 766 - } 767 - _ => { 768 - let path = cfg.backup.path.clone(); 769 - FilesystemBackupStorage::new(path).await.map_or_else( 770 - |e| { 771 - tracing::error!( 772 - "Failed to initialize filesystem backup storage: {}. \ 773 - Set BACKUP_STORAGE_PATH to a valid directory path. \ 774 - Backups will be disabled.", 775 - e 776 - ); 777 - None 778 - }, 779 - |storage| { 780 - tracing::info!("Initialized filesystem backup storage"); 781 - Some(Arc::new(storage) as Arc<dyn BackupStorage>) 782 - }, 783 - ) 784 - } 785 - } 786 - } 787 - 788 604 trait Pipe: Sized { 789 605 fn pipe<F, R>(self, f: F) -> R 790 606 where
+3 -3
crates/tranquil-sync/src/blob.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::state::AppState; 3 - use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability}; 4 1 use axum::{ 5 2 Json, 6 3 body::Body, ··· 11 8 }; 12 9 use serde::{Deserialize, Serialize}; 13 10 use tracing::error; 11 + use tranquil_pds::api::error::ApiError; 12 + use tranquil_pds::state::AppState; 13 + use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability}; 14 14 use tranquil_types::{CidLink, Did}; 15 15 16 16 #[derive(Deserialize)]
+5 -3
crates/tranquil-sync/src/commit.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::state::AppState; 3 - use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability, get_account_with_status}; 4 1 use axum::{ 5 2 Json, 6 3 extract::{Query, State}, ··· 14 11 use std::str::FromStr; 15 12 use tracing::error; 16 13 use tranquil_db_traits::AccountStatus; 14 + use tranquil_pds::api::error::ApiError; 15 + use tranquil_pds::state::AppState; 16 + use tranquil_pds::sync::util::{ 17 + RepoAccessLevel, assert_repo_availability, get_account_with_status, 18 + }; 17 19 use tranquil_types::Did; 18 20 19 21 async fn get_rev_from_commit(state: &AppState, cid_str: &str) -> Option<String> {
+2 -2
crates/tranquil-sync/src/crawl.rs
··· 1 - use tranquil_pds::api::EmptyResponse; 2 - use tranquil_pds::state::AppState; 3 1 use axum::{ 4 2 Json, 5 3 extract::{Query, State}, ··· 7 5 }; 8 6 use serde::Deserialize; 9 7 use tracing::info; 8 + use tranquil_pds::api::EmptyResponse; 9 + use tranquil_pds::state::AppState; 10 10 11 11 #[derive(Deserialize)] 12 12 pub struct NotifyOfUpdateParams {
+7 -8
crates/tranquil-sync/src/deprecated.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::state::AppState; 3 - use tranquil_pds::sync::car::{encode_car_block, encode_car_header}; 4 - use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability}; 5 1 use axum::{ 6 2 Json, 7 3 extract::{Query, State}, ··· 13 9 use jacquard_repo::storage::BlockStore; 14 10 use serde::{Deserialize, Serialize}; 15 11 use std::str::FromStr; 12 + use tranquil_pds::api::error::ApiError; 13 + use tranquil_pds::state::AppState; 14 + use tranquil_pds::sync::car::{encode_car_block, encode_car_header}; 15 + use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability}; 16 16 use tranquil_types::Did; 17 17 18 18 const MAX_REPO_BLOCKS_TRAVERSAL: usize = 20_000; 19 19 20 20 async fn check_admin_or_self(state: &AppState, headers: &HeaderMap, did: &Did) -> bool { 21 - let extracted = match tranquil_pds::auth::extract_auth_token_from_header(tranquil_pds::util::get_header_str( 22 - headers, 23 - axum::http::header::AUTHORIZATION, 24 - )) { 21 + let extracted = match tranquil_pds::auth::extract_auth_token_from_header( 22 + tranquil_pds::util::get_header_str(headers, axum::http::header::AUTHORIZATION), 23 + ) { 25 24 Some(t) => t, 26 25 None => return false, 27 26 };
+1 -4
crates/tranquil-sync/src/lib.rs
··· 29 29 .route("/com.atproto.sync.getBlocks", get(get_blocks)) 30 30 .route("/com.atproto.sync.getRepo", get(get_repo)) 31 31 .route("/com.atproto.sync.getRecord", get(get_record)) 32 - .route( 33 - "/com.atproto.sync.subscribeRepos", 34 - get(subscribe_repos), 35 - ) 32 + .route("/com.atproto.sync.subscribeRepos", get(subscribe_repos)) 36 33 .route("/com.atproto.sync.getHead", get(get_head)) 37 34 .route("/com.atproto.sync.getCheckout", get(get_checkout)) 38 35 }
+2 -2
crates/tranquil-sync/src/listener.rs
··· 1 - use tranquil_pds::state::AppState; 2 - use tranquil_pds::sync::firehose::SequencedEvent; 3 1 use std::sync::atomic::{AtomicI64, Ordering}; 4 2 use tracing::{debug, error, info, warn}; 5 3 use tranquil_db_traits::SequenceNumber; 4 + use tranquil_pds::state::AppState; 5 + use tranquil_pds::sync::firehose::SequencedEvent; 6 6 7 7 static LAST_BROADCAST_SEQ: AtomicI64 = AtomicI64::new(0); 8 8
+5 -5
crates/tranquil-sync/src/repo.rs
··· 1 - use tranquil_pds::api::error::ApiError; 2 - use tranquil_pds::scheduled::generate_repo_car_from_user_blocks; 3 - use tranquil_pds::state::AppState; 4 - use tranquil_pds::sync::car::{encode_car_block, encode_car_header}; 5 - use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability}; 6 1 use axum::{ 7 2 extract::{Query, RawQuery, State}, 8 3 http::StatusCode, ··· 13 8 use serde::Deserialize; 14 9 use std::str::FromStr; 15 10 use tracing::error; 11 + use tranquil_pds::api::error::ApiError; 12 + use tranquil_pds::scheduled::generate_repo_car_from_user_blocks; 13 + use tranquil_pds::state::AppState; 14 + use tranquil_pds::sync::car::{encode_car_block, encode_car_header}; 15 + use tranquil_pds::sync::util::{RepoAccessLevel, assert_repo_availability}; 16 16 use tranquil_types::Did; 17 17 18 18 struct GetBlocksParams {
+7 -7
crates/tranquil-sync/src/subscribe_repos.rs
··· 1 - use tranquil_pds::state::AppState; 2 - use tranquil_pds::sync::firehose::SequencedEvent; 3 - use tranquil_pds::sync::frame::{ErrorFrameName, InfoFrameName}; 4 - use tranquil_pds::sync::util::{ 5 - format_error_frame, format_event_for_sending, format_event_with_prefetched_blocks, 6 - format_info_frame, prefetch_blocks_for_events, 7 - }; 8 1 use axum::{ 9 2 extract::{Query, State, ws::Message, ws::WebSocket, ws::WebSocketUpgrade}, 10 3 response::Response, ··· 15 8 use tokio::sync::broadcast::error::RecvError; 16 9 use tracing::{error, info, warn}; 17 10 use tranquil_db_traits::SequenceNumber; 11 + use tranquil_pds::state::AppState; 12 + use tranquil_pds::sync::firehose::SequencedEvent; 13 + use tranquil_pds::sync::frame::{ErrorFrameName, InfoFrameName}; 14 + use tranquil_pds::sync::util::{ 15 + format_error_frame, format_event_for_sending, format_event_with_prefetched_blocks, 16 + format_info_frame, prefetch_blocks_for_events, 17 + }; 18 18 19 19 const BACKFILL_BATCH_SIZE: i64 = 1000; 20 20
-2
docker-compose.prod.yaml
··· 10 10 volumes: 11 11 - ./config.toml:/etc/tranquil-pds/config.toml:ro 12 12 - blob_data:/var/lib/tranquil/blobs 13 - - backup_data:/var/lib/tranquil/backups 14 13 depends_on: 15 14 db: 16 15 condition: service_healthy ··· 95 94 volumes: 96 95 postgres_data: 97 96 blob_data: 98 - backup_data: 99 97 prometheus_data: 100 98 acme_challenge:
-2
docker-compose.yaml
··· 11 11 volumes: 12 12 - ./config.toml:/etc/tranquil-pds/config.toml:ro 13 13 - blob_data:/var/lib/tranquil/blobs 14 - - backup_data:/var/lib/tranquil/backups 15 14 depends_on: 16 15 - db 17 16 ··· 52 51 volumes: 53 52 postgres_data: 54 53 blob_data: 55 - backup_data: 56 54 prometheus_data:
+2 -2
docs/install-containers.md
··· 110 110 111 111 ```bash 112 112 mkdir -p /etc/containers/systemd 113 - mkdir -p /srv/tranquil-pds/{postgres,blobs,backups,certs,acme,config} 113 + mkdir -p /srv/tranquil-pds/{postgres,blobs,certs,acme,config} 114 114 ``` 115 115 116 116 ## Create a configuration file ··· 254 254 255 255 ```sh 256 256 mkdir -p /srv/tranquil-pds/{data,config} 257 - mkdir -p /srv/tranquil-pds/data/{postgres,blobs,backups,certs,acme} 257 + mkdir -p /srv/tranquil-pds/data/{postgres,blobs,certs,acme} 258 258 ``` 259 259 260 260 ## Clone the repo and build images
+1 -1
docs/install-debian.md
··· 44 44 ## Create blob storage directories 45 45 46 46 ```bash 47 - mkdir -p /var/lib/tranquil/blobs /var/lib/tranquil/backups 47 + mkdir -p /var/lib/tranquil/blobs 48 48 ``` 49 49 50 50 We'll set ownership after creating the service user.
-1
docs/install-kubernetes.md
··· 12 12 - A TOML config file mounted at `/etc/tranquil-pds/config.toml` (or passed via `--config`) 13 13 - `DATABASE_URL` - postgres connection string 14 14 - `BLOB_STORAGE_PATH` - path to blob storage (mount a PV here) 15 - - `BACKUP_STORAGE_PATH` - path for repo backups (optional but recommended) 16 15 - `PDS_HOSTNAME` - your PDS hostname (without protocol) 17 16 - `JWT_SECRET`, `DPOP_SECRET`, `MASTER_KEY` - generate with `openssl rand -base64 48` 18 17 - `CRAWLERS` - typically `https://bsky.network`
-41
example.toml
··· 180 180 # Can also be specified via environment variable `S3_ENDPOINT`. 181 181 #s3_endpoint = 182 182 183 - [backup] 184 - # Enable automatic backups. 185 - # 186 - # Can also be specified via environment variable `BACKUP_ENABLED`. 187 - # 188 - # Default value: true 189 - #enabled = true 190 - 191 - # Backup storage backend: `filesystem` or `s3`. 192 - # 193 - # Can also be specified via environment variable `BACKUP_STORAGE_BACKEND`. 194 - # 195 - # Default value: "filesystem" 196 - #backend = "filesystem" 197 - 198 - # Path on disk for the filesystem backup backend. 199 - # 200 - # Can also be specified via environment variable `BACKUP_STORAGE_PATH`. 201 - # 202 - # Default value: "/var/lib/tranquil-pds/backups" 203 - #path = "/var/lib/tranquil-pds/backups" 204 - 205 - # S3 bucket name for backups. 206 - # 207 - # Can also be specified via environment variable `BACKUP_S3_BUCKET`. 208 - #s3_bucket = 209 - 210 - # Number of backup revisions to keep per account. 211 - # 212 - # Can also be specified via environment variable `BACKUP_RETENTION_COUNT`. 213 - # 214 - # Default value: 7 215 - #retention_count = 7 216 - 217 - # Seconds between backup runs. 218 - # 219 - # Can also be specified via environment variable `BACKUP_INTERVAL_SECS`. 220 - # 221 - # Default value: 86400 222 - #interval_secs = 86400 223 - 224 183 [cache] 225 184 # Cache backend: `ripple` (default, built-in gossip) or `valkey`. 226 185 #
+3 -206
frontend/src/components/dashboard/SettingsContent.svelte
··· 31 31 selectedDomain = info.availableUserDomains[0] 32 32 } 33 33 } catch {} 34 - loadBackups() 35 34 } 36 35 init() 37 36 return () => stopEmailPolling() ··· 167 166 } 168 167 169 168 let exportLoading = $state(false) 170 - let exportBlobsLoading = $state(false) 171 169 172 170 async function handleExportRepo() { 173 171 exportLoading = true ··· 190 188 } 191 189 } 192 190 193 - async function handleExportBlobs() { 194 - exportBlobsLoading = true 195 - try { 196 - const blob = await api.exportBlobs(session.accessJwt) 197 - if (blob.size === 0) { 198 - toast.success($_('settings.messages.noBlobsToExport')) 199 - return 200 - } 201 - const url = URL.createObjectURL(blob) 202 - const a = document.createElement('a') 203 - a.href = url 204 - a.download = `${session.handle}-blobs.zip` 205 - document.body.appendChild(a) 206 - a.click() 207 - document.body.removeChild(a) 208 - URL.revokeObjectURL(url) 209 - toast.success($_('settings.messages.blobsExported')) 210 - } catch (e) { 211 - toast.error(e instanceof ApiError ? e.message : $_('settings.messages.exportFailed')) 212 - } finally { 213 - exportBlobsLoading = false 214 - } 215 - } 216 - 217 - interface BackupInfo { 218 - id: string 219 - repoRev: string 220 - sizeBytes: number 221 - createdAt: string 222 - } 223 - let backups = $state<BackupInfo[]>([]) 224 - let backupEnabled = $state(true) 225 - let backupsLoading = $state(false) 226 - let createBackupLoading = $state(false) 227 - let restoreFile = $state<File | null>(null) 228 - let restoreLoading = $state(false) 229 - 230 - async function loadBackups() { 231 - backupsLoading = true 232 - try { 233 - const result = await api.listBackups(session.accessJwt) 234 - backups = result.backups 235 - backupEnabled = result.backupEnabled 236 - } catch {} 237 - backupsLoading = false 238 - } 239 - 240 - async function handleCreateBackup() { 241 - createBackupLoading = true 242 - try { 243 - await api.createBackup(session.accessJwt) 244 - await loadBackups() 245 - toast.success($_('settings.backups.created')) 246 - } catch (e) { 247 - toast.error(e instanceof ApiError ? e.message : $_('settings.backups.createFailed')) 248 - } finally { 249 - createBackupLoading = false 250 - } 251 - } 252 - 253 - async function handleDownloadBackup(id: string, rev: string) { 254 - try { 255 - const blob = await api.getBackup(session.accessJwt, id) 256 - const url = URL.createObjectURL(blob) 257 - const a = document.createElement('a') 258 - a.href = url 259 - a.download = `${session.handle}-${rev}.car` 260 - document.body.appendChild(a) 261 - a.click() 262 - document.body.removeChild(a) 263 - URL.revokeObjectURL(url) 264 - } catch (e) { 265 - toast.error(e instanceof ApiError ? e.message : $_('settings.backups.downloadFailed')) 266 - } 267 - } 268 - 269 - function handleRestoreFileChange(e: Event) { 270 - const input = e.target as HTMLInputElement 271 - if (input.files && input.files[0]) { 272 - restoreFile = input.files[0] 273 - } 274 - } 275 - 276 - async function handleRestore() { 277 - if (!restoreFile) return 278 - restoreLoading = true 279 - try { 280 - const buffer = await restoreFile.arrayBuffer() 281 - const car = new Uint8Array(buffer) 282 - await api.importRepo(session.accessJwt, car) 283 - toast.success($_('settings.backups.restored')) 284 - restoreFile = null 285 - await loadBackups() 286 - } catch (e) { 287 - toast.error(e instanceof ApiError ? e.message : $_('settings.backups.restoreFailed')) 288 - } finally { 289 - restoreLoading = false 290 - } 291 - } 292 - 293 - async function handleToggleBackup() { 294 - const newEnabled = !backupEnabled 295 - backupsLoading = true 296 - try { 297 - await api.setBackupEnabled(session.accessJwt, newEnabled) 298 - backupEnabled = newEnabled 299 - toast.success(newEnabled ? $_('settings.backups.enabled') : $_('settings.backups.disabled')) 300 - } catch (e) { 301 - toast.error(e instanceof ApiError ? e.message : $_('settings.backups.toggleFailed')) 302 - } finally { 303 - backupsLoading = false 304 - } 305 - } 306 - 307 - async function handleDeleteBackup(id: string) { 308 - try { 309 - await api.deleteBackup(session.accessJwt, id) 310 - await loadBackups() 311 - toast.success($_('settings.backups.deleted')) 312 - } catch (e) { 313 - toast.error(e instanceof ApiError ? e.message : $_('settings.backups.deleteFailed')) 314 - } 315 - } 316 - 317 - function formatBytes(bytes: number): string { 318 - if (bytes < 1024) return `${bytes} B` 319 - if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB` 320 - return `${(bytes / (1024 * 1024)).toFixed(1)} MB` 321 - } 322 - 323 191 let deleteLoading = $state(false) 324 192 let deletePassword = $state('') 325 193 let deleteToken = $state('') ··· 505 373 506 374 <section> 507 375 <h3>{$_('settings.exportData')}</h3> 508 - <div class="export-buttons"> 509 - <button onclick={handleExportRepo} disabled={exportLoading}> 510 - {exportLoading ? $_('settings.exporting') : $_('settings.downloadRepo')} 511 - </button> 512 - <button class="secondary" onclick={handleExportBlobs} disabled={exportBlobsLoading}> 513 - {exportBlobsLoading ? $_('settings.exporting') : $_('settings.downloadBlobs')} 514 - </button> 515 - </div> 516 - </section> 517 - 518 - <section> 519 - <h3>{$_('settings.backups.title')}</h3> 520 - {#if backupsLoading} 521 - <div class="loading">{$_('common.loading')}</div> 522 - {:else} 523 - {#if backups.length > 0} 524 - <ul class="backup-list"> 525 - {#each backups as backup} 526 - <li class="backup-item"> 527 - <div class="backup-info"> 528 - <span class="backup-date">{formatDate(backup.createdAt)}</span> 529 - <span class="backup-size">{formatBytes(backup.sizeBytes)}</span> 530 - </div> 531 - <div class="backup-item-actions"> 532 - <button class="sm" onclick={() => handleDownloadBackup(backup.id, backup.repoRev)}> 533 - {$_('settings.backups.download')} 534 - </button> 535 - <button class="sm danger-outline" onclick={() => handleDeleteBackup(backup.id)}> 536 - {$_('settings.backups.delete')} 537 - </button> 538 - </div> 539 - </li> 540 - {/each} 541 - </ul> 542 - {:else} 543 - <p class="empty">{$_('settings.backups.noBackups')}</p> 544 - {/if} 545 - <div class="backup-toggle"> 546 - <label class="toggle-label"> 547 - <input type="checkbox" checked={backupEnabled} onchange={handleToggleBackup} disabled={backupsLoading} /> 548 - {$_('settings.backups.autoBackup')} 549 - </label> 550 - </div> 551 - <div class="backup-actions"> 552 - <button onclick={handleCreateBackup} disabled={createBackupLoading || !backupEnabled}> 553 - {createBackupLoading ? $_('common.creating') : $_('settings.backups.createNow')} 554 - </button> 555 - </div> 556 - 557 - <div class="restore-section"> 558 - <h4>{$_('settings.backups.restoreTitle')}</h4> 559 - <p class="hint">{$_('settings.backups.restoreHint')}</p> 560 - <div class="restore-form"> 561 - <input 562 - type="file" 563 - accept=".car" 564 - onchange={handleRestoreFileChange} 565 - disabled={restoreLoading} 566 - /> 567 - <button 568 - onclick={handleRestore} 569 - disabled={restoreLoading || !restoreFile} 570 - > 571 - {restoreLoading ? $_('settings.backups.restoring') : $_('settings.backups.restore')} 572 - </button> 573 - </div> 574 - {#if restoreFile} 575 - <div class="restore-preview"> 576 - <span class="file-name">{$_('settings.backups.selectedFile')}: {restoreFile.name}</span> 577 - <span class="file-size">({formatBytes(restoreFile.size)})</span> 578 - </div> 579 - {/if} 580 - </div> 581 - {/if} 376 + <button onclick={handleExportRepo} disabled={exportLoading}> 377 + {exportLoading ? $_('settings.exporting') : $_('settings.downloadRepo')} 378 + </button> 582 379 </section> 583 380 584 381 <section class="danger-zone">
+3 -1
frontend/src/components/ui/Input.svelte
··· 5 5 label?: string 6 6 hint?: string 7 7 error?: string 8 + value?: string 8 9 } 9 10 10 11 let { ··· 12 13 hint, 13 14 error, 14 15 id, 16 + value = $bindable(''), 15 17 ...rest 16 18 }: Props = $props() 17 19 ··· 23 25 {#if label} 24 26 <label for={inputId}>{label}</label> 25 27 {/if} 26 - <input id={inputId} class:has-error={!!error} {...rest} /> 28 + <input id={inputId} class:has-error={!!error} bind:value {...rest} /> 27 29 {#if error} 28 30 <span class="hint error">{error}</span> 29 31 {:else if hint}
-22
frontend/src/lib/api-validated.ts
··· 11 11 import { 12 12 accountInfoSchema, 13 13 appPasswordSchema, 14 - createBackupResponseSchema, 15 14 createdAppPasswordSchema, 16 15 createRecordResponseSchema, 17 16 didDocumentSchema, 18 17 enableTotpResponseSchema, 19 18 legacyLoginPreferenceSchema, 20 - listBackupsResponseSchema, 21 19 listPasskeysResponseSchema, 22 20 listRecordsResponseSchema, 23 21 listSessionsResponseSchema, ··· 37 35 totpStatusSchema, 38 36 type ValidatedAccountInfo, 39 37 type ValidatedAppPassword, 40 - type ValidatedCreateBackupResponse, 41 38 type ValidatedCreatedAppPassword, 42 39 type ValidatedCreateRecordResponse, 43 40 type ValidatedDidDocument, 44 41 type ValidatedEnableTotpResponse, 45 42 type ValidatedLegacyLoginPreference, 46 - type ValidatedListBackupsResponse, 47 43 type ValidatedListPasskeysResponse, 48 44 type ValidatedListRecordsResponse, 49 45 type ValidatedListSessionsResponse, ··· 439 435 ); 440 436 }, 441 437 442 - listBackups( 443 - token: AccessToken, 444 - ): Promise<Result<ValidatedListBackupsResponse, ApiError | ValidationError>> { 445 - return xrpcValidated("_backup.listBackups", listBackupsResponseSchema, { 446 - token, 447 - }); 448 - }, 449 - 450 - createBackup( 451 - token: AccessToken, 452 - ): Promise< 453 - Result<ValidatedCreateBackupResponse, ApiError | ValidationError> 454 - > { 455 - return xrpcValidated("_backup.createBackup", createBackupResponseSchema, { 456 - method: "POST", 457 - token, 458 - }); 459 - }, 460 438 }; 461 439 462 440 export { ValidationError };
-96
frontend/src/lib/api.ts
··· 33 33 ContactState, 34 34 CreateAccountParams, 35 35 CreateAccountResult, 36 - CreateBackupResponse, 37 36 CreatedAppPassword, 38 37 CreateRecordResponse, 39 38 DelegationAuditEntry, ··· 48 47 GetInviteCodesResponse, 49 48 InviteCodeInfo, 50 49 LegacyLoginPreference, 51 - ListBackupsResponse, 52 50 ListPasskeysResponse, 53 51 ListRecordsResponse, 54 52 ListReposResponse, ··· 72 70 ServerDescription, 73 71 ServerStats, 74 72 Session, 75 - SetBackupEnabledResponse, 76 73 SsoLinkedAccount, 77 74 StartPasskeyRegistrationResponse, 78 75 SuccessResponse, ··· 1287 1284 return res.arrayBuffer(); 1288 1285 }, 1289 1286 1290 - listBackups(token: AccessToken): Promise<ListBackupsResponse> { 1291 - return xrpc("_backup.listBackups", { token }); 1292 - }, 1293 - 1294 - async getBackup(token: AccessToken, id: string): Promise<Blob> { 1295 - const url = `${API_BASE}/_backup.getBackup?id=${encodeURIComponent(id)}`; 1296 - const res = await authenticatedFetch(url, { token }); 1297 - if (!res.ok) { 1298 - const errData = await res.json().catch(() => ({ 1299 - error: "Unknown", 1300 - message: res.statusText, 1301 - })); 1302 - throw new ApiError(res.status, errData.error, errData.message); 1303 - } 1304 - return res.blob(); 1305 - }, 1306 - 1307 - createBackup(token: AccessToken): Promise<CreateBackupResponse> { 1308 - return xrpc("_backup.createBackup", { 1309 - method: "POST", 1310 - token, 1311 - }); 1312 - }, 1313 - 1314 - async deleteBackup(token: AccessToken, id: string): Promise<void> { 1315 - await xrpc("_backup.deleteBackup", { 1316 - method: "POST", 1317 - token, 1318 - params: { id }, 1319 - }); 1320 - }, 1321 - 1322 - setBackupEnabled( 1323 - token: AccessToken, 1324 - enabled: boolean, 1325 - ): Promise<SetBackupEnabledResponse> { 1326 - return xrpc("_backup.setEnabled", { 1327 - method: "POST", 1328 - token, 1329 - body: { enabled }, 1330 - }); 1331 - }, 1332 - 1333 1287 async importRepo(token: AccessToken, car: Uint8Array): Promise<void> { 1334 1288 const res = await authenticatedFetch( 1335 1289 `${API_BASE}/com.atproto.repo.importRepo`, ··· 1542 1496 }); 1543 1497 }, 1544 1498 1545 - async exportBlobs(token: AccessToken): Promise<Blob> { 1546 - const res = await authenticatedFetch(`${API_BASE}/_backup.exportBlobs`, { 1547 - token, 1548 - }); 1549 - if (!res.ok) { 1550 - const errData = await res.json().catch(() => ({ 1551 - error: "Unknown", 1552 - message: res.statusText, 1553 - })); 1554 - throw new ApiError(res.status, errData.error, errData.message); 1555 - } 1556 - return res.blob(); 1557 - }, 1558 1499 }; 1559 1500 1560 1501 export const typedApi = { ··· 1781 1722 return xrpcResult("_admin.getServerStats", { token }); 1782 1723 }, 1783 1724 1784 - listBackups( 1785 - token: AccessToken, 1786 - ): Promise<Result<ListBackupsResponse, ApiError>> { 1787 - return xrpcResult("_backup.listBackups", { token }); 1788 - }, 1789 - 1790 - createBackup( 1791 - token: AccessToken, 1792 - ): Promise<Result<CreateBackupResponse, ApiError>> { 1793 - return xrpcResult("_backup.createBackup", { 1794 - method: "POST", 1795 - token, 1796 - }); 1797 - }, 1798 - 1799 1725 getDidDocument(token: AccessToken): Promise<Result<DidDocument, ApiError>> { 1800 1726 return xrpcResult("_account.getDidDocument", { token }); 1801 1727 }, ··· 2050 1976 }); 2051 1977 }, 2052 1978 2053 - setBackupEnabled( 2054 - token: AccessToken, 2055 - enabled: boolean, 2056 - ): Promise<Result<SetBackupEnabledResponse, ApiError>> { 2057 - return xrpcResult("_backup.setEnabled", { 2058 - method: "POST", 2059 - token, 2060 - body: { enabled }, 2061 - }); 2062 - }, 2063 - 2064 - deleteBackup( 2065 - token: AccessToken, 2066 - id: string, 2067 - ): Promise<Result<void, ApiError>> { 2068 - return xrpcResult<void>("_backup.deleteBackup", { 2069 - method: "POST", 2070 - token, 2071 - params: { id }, 2072 - }); 2073 - }, 2074 - 2075 1979 createRecord( 2076 1980 token: AccessToken, 2077 1981 repo: Did,
-3
frontend/src/lib/authenticated-client.ts
··· 50 50 Result<{ entries: DelegationAuditEntry[]; total: number }, ApiError> 51 51 >; 52 52 53 - exportBlobs(): Promise<Blob>; 54 53 } 55 54 56 55 export function createAuthenticatedClient( ··· 76 75 api.createDelegatedAccount(token, handle, email, controllerScopes), 77 76 getDelegationAuditLog: (limit, offset) => 78 77 api.getDelegationAuditLog(token, limit, offset), 79 - 80 - exportBlobs: () => api.exportBlobs(token), 81 78 }; 82 79 } 83 80
-25
frontend/src/lib/types/api.ts
··· 495 495 channel: VerificationChannel; 496 496 } 497 497 498 - export interface BackupInfo { 499 - id: string; 500 - repoRev: string; 501 - repoRootCid: Cid; 502 - blockCount: number; 503 - sizeBytes: number; 504 - createdAt: ISODateString; 505 - } 506 - 507 - export interface ListBackupsResponse { 508 - backups: BackupInfo[]; 509 - backupEnabled: boolean; 510 - } 511 - 512 - export interface CreateBackupResponse { 513 - id: string; 514 - repoRev: string; 515 - sizeBytes: number; 516 - blockCount: number; 517 - } 518 - 519 - export interface SetBackupEnabledResponse { 520 - enabled: boolean; 521 - } 522 - 523 498 export interface EmailUpdateResponse { 524 499 tokenRequired: boolean; 525 500 }
-28
frontend/src/lib/types/schemas.ts
··· 276 276 accounts: z.array(accountInfoSchema), 277 277 }); 278 278 279 - export const backupInfoSchema = z.object({ 280 - id: z.string(), 281 - repoRev: z.string(), 282 - repoRootCid: cid, 283 - blockCount: z.number(), 284 - sizeBytes: z.number(), 285 - createdAt: isoDate, 286 - }); 287 - 288 - export const listBackupsResponseSchema = z.object({ 289 - backups: z.array(backupInfoSchema), 290 - backupEnabled: z.boolean(), 291 - }); 292 - 293 - export const createBackupResponseSchema = z.object({ 294 - id: z.string(), 295 - repoRev: z.string(), 296 - sizeBytes: z.number(), 297 - blockCount: z.number(), 298 - }); 299 - 300 279 export type ValidatedSession = z.infer<typeof sessionSchema>; 301 280 export type ValidatedServerDescription = z.infer< 302 281 typeof serverDescriptionSchema ··· 348 327 export type ValidatedSearchAccountsResponse = z.infer< 349 328 typeof searchAccountsResponseSchema 350 329 >; 351 - export type ValidatedBackupInfo = z.infer<typeof backupInfoSchema>; 352 - export type ValidatedListBackupsResponse = z.infer< 353 - typeof listBackupsResponseSchema 354 - >; 355 - export type ValidatedCreateBackupResponse = z.infer< 356 - typeof createBackupResponseSchema 357 - >;
-26
frontend/src/locales/en.json
··· 213 213 "changeHandleButton": "Change Handle", 214 214 "exportData": "Export Data", 215 215 "downloadRepo": "Download Repository", 216 - "downloadBlobs": "Download Media", 217 216 "exporting": "Exporting...", 218 - "backups": { 219 - "title": "Backups", 220 - "autoBackup": "Automatic backups", 221 - "enabled": "Automatic backups enabled", 222 - "disabled": "Automatic backups disabled", 223 - "toggleFailed": "Failed to update backup setting", 224 - "noBackups": "No backups available yet.", 225 - "download": "Download", 226 - "delete": "Delete", 227 - "createNow": "Create Backup Now", 228 - "created": "Backup created successfully", 229 - "createFailed": "Failed to create backup", 230 - "downloadFailed": "Failed to download backup", 231 - "deleted": "Backup deleted", 232 - "deleteFailed": "Failed to delete backup", 233 - "restoreTitle": "Restore from Backup", 234 - "restoreHint": "Upload a CAR file to restore your repository", 235 - "selectedFile": "Selected file", 236 - "restore": "Restore", 237 - "restoring": "Restoring...", 238 - "restored": "Repository restored successfully", 239 - "restoreFailed": "Failed to restore repository" 240 - }, 241 217 "deleteAccount": "Delete Account", 242 218 "deleteWarning": "This action is irreversible. All your data will be permanently deleted.", 243 219 "requestDeletion": "Request Account Deletion", ··· 258 234 "deleteConfirmation": "Are you absolutely sure you want to delete your account? This cannot be undone.", 259 235 "deletionFailed": "Failed to delete account", 260 236 "repoExported": "Repository exported successfully", 261 - "blobsExported": "Media files exported successfully", 262 - "noBlobsToExport": "No media files to export", 263 237 "exportFailed": "Failed to export" 264 238 } 265 239 },
-26
frontend/src/locales/fi.json
··· 213 213 "changeHandleButton": "Vaihda kรคyttรคjรคnimi", 214 214 "exportData": "Vie tiedot", 215 215 "downloadRepo": "Lataa tietovarasto", 216 - "downloadBlobs": "Lataa media", 217 216 "exporting": "Viedรครคn...", 218 - "backups": { 219 - "title": "Varmuuskopiot", 220 - "enabled": "Automaattiset varmuuskopiot kรคytรถssรค", 221 - "disabled": "Automaattiset varmuuskopiot pois kรคytรถstรค", 222 - "toggleFailed": "Varmuuskopioasetuksen pรคivitys epรคonnistui", 223 - "noBackups": "Varmuuskopioita ei ole vielรค saatavilla.", 224 - "download": "Lataa", 225 - "delete": "Poista", 226 - "createNow": "Luo varmuuskopio nyt", 227 - "created": "Varmuuskopio luotu onnistuneesti", 228 - "createFailed": "Varmuuskopion luonti epรคonnistui", 229 - "downloadFailed": "Varmuuskopion lataus epรคonnistui", 230 - "deleted": "Varmuuskopio poistettu", 231 - "deleteFailed": "Varmuuskopion poisto epรคonnistui", 232 - "restoreTitle": "Palauta varmuuskopiosta", 233 - "selectedFile": "Valittu tiedosto", 234 - "restore": "Palauta", 235 - "restoring": "Palautetaan...", 236 - "restored": "Tietovarasto palautettu onnistuneesti", 237 - "restoreFailed": "Tietovaraston palautus epรคonnistui", 238 - "autoBackup": "Automaattiset varmuuskopiot", 239 - "restoreHint": "Lataa CAR-tiedosto palauttaaksesi tietovaraston" 240 - }, 241 217 "deleteAccount": "Poista tili", 242 218 "deleteWarning": "Tรคmรค toiminto on peruuttamaton. Kaikki tietosi poistetaan pysyvรคsti.", 243 219 "requestDeletion": "Pyydรค tilin poistoa", ··· 258 234 "deleteConfirmation": "Oletko tรคysin varma, ettรค haluat poistaa tilisi? Tรคtรค ei voi perua.", 259 235 "deletionFailed": "Tilin poisto epรคonnistui", 260 236 "repoExported": "Tietovarasto viety", 261 - "blobsExported": "Mediatiedostot viety", 262 - "noBlobsToExport": "Ei vietรคviรค mediatiedostoja", 263 237 "exportFailed": "Vienti epรคonnistui" 264 238 } 265 239 },
-26
frontend/src/locales/ja.json
··· 213 213 "changeHandleButton": "ใƒใƒณใƒ‰ใƒซใ‚’ๅค‰ๆ›ด", 214 214 "exportData": "ใƒ‡ใƒผใ‚ฟใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ", 215 215 "downloadRepo": "ใƒชใƒใ‚ธใƒˆใƒชใ‚’ใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰", 216 - "downloadBlobs": "ใƒกใƒ‡ใ‚ฃใ‚ขใ‚’ใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰", 217 216 "exporting": "ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆไธญ...", 218 - "backups": { 219 - "title": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—", 220 - "enabled": "่‡ชๅ‹•ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใŒๆœ‰ๅŠนใงใ™", 221 - "disabled": "่‡ชๅ‹•ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใŒ็„กๅŠนใงใ™", 222 - "toggleFailed": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—่จญๅฎšใฎๆ›ดๆ–ฐใซๅคฑๆ•—ใ—ใพใ—ใŸ", 223 - "noBackups": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใฏใพใ ใ‚ใ‚Šใพใ›ใ‚“ใ€‚", 224 - "download": "ใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰", 225 - "delete": "ๅ‰Š้™ค", 226 - "createNow": "ไปŠใ™ใใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใ‚’ไฝœๆˆ", 227 - "created": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใŒๆญฃๅธธใซไฝœๆˆใ•ใ‚Œใพใ—ใŸ", 228 - "createFailed": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใฎไฝœๆˆใซๅคฑๆ•—ใ—ใพใ—ใŸ", 229 - "downloadFailed": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใฎใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใซๅคฑๆ•—ใ—ใพใ—ใŸ", 230 - "deleted": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใŒๅ‰Š้™คใ•ใ‚Œใพใ—ใŸ", 231 - "deleteFailed": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใฎๅ‰Š้™คใซๅคฑๆ•—ใ—ใพใ—ใŸ", 232 - "restoreTitle": "ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—ใ‹ใ‚‰ๅพฉๅ…ƒ", 233 - "selectedFile": "้ธๆŠžใ•ใ‚ŒใŸใƒ•ใ‚กใ‚คใƒซ", 234 - "restore": "ๅพฉๅ…ƒ", 235 - "restoring": "ๅพฉๅ…ƒไธญ...", 236 - "restored": "ใƒชใƒใ‚ธใƒˆใƒชใŒๆญฃๅธธใซๅพฉๅ…ƒใ•ใ‚Œใพใ—ใŸ", 237 - "restoreFailed": "ใƒชใƒใ‚ธใƒˆใƒชใฎๅพฉๅ…ƒใซๅคฑๆ•—ใ—ใพใ—ใŸ", 238 - "autoBackup": "่‡ชๅ‹•ใƒใƒƒใ‚ฏใ‚ขใƒƒใƒ—", 239 - "restoreHint": "CARใƒ•ใ‚กใ‚คใƒซใ‚’ใ‚ขใƒƒใƒ—ใƒญใƒผใƒ‰ใ—ใฆใƒชใƒใ‚ธใƒˆใƒชใ‚’ๅพฉๅ…ƒ" 240 - }, 241 217 "deleteAccount": "ใ‚ขใ‚ซใ‚ฆใƒณใƒˆๅ‰Š้™ค", 242 218 "deleteWarning": "ใ“ใฎๆ“ไฝœใฏๅ–ใ‚Šๆถˆใ›ใพใ›ใ‚“ใ€‚ใ™ในใฆใฎใƒ‡ใƒผใ‚ฟใŒๅฎŒๅ…จใซๅ‰Š้™คใ•ใ‚Œใพใ™ใ€‚", 243 219 "requestDeletion": "ใ‚ขใ‚ซใ‚ฆใƒณใƒˆๅ‰Š้™คใ‚’ใƒชใ‚ฏใ‚จใ‚นใƒˆ", ··· 258 234 "deleteConfirmation": "ๆœฌๅฝ“ใซใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‚’ๅ‰Š้™คใ—ใพใ™ใ‹๏ผŸใ“ใฎๆ“ไฝœใฏๅ–ใ‚Šๆถˆใ›ใพใ›ใ‚“ใ€‚", 259 235 "deletionFailed": "ใ‚ขใ‚ซใ‚ฆใƒณใƒˆใฎๅ‰Š้™คใซๅคฑๆ•—ใ—ใพใ—ใŸ", 260 236 "repoExported": "ใƒชใƒใ‚ธใƒˆใƒชใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ—ใพใ—ใŸ", 261 - "blobsExported": "ใƒกใƒ‡ใ‚ฃใ‚ขใƒ•ใ‚กใ‚คใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ—ใพใ—ใŸ", 262 - "noBlobsToExport": "ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ™ใ‚‹ใƒกใƒ‡ใ‚ฃใ‚ขใƒ•ใ‚กใ‚คใƒซใŒใ‚ใ‚Šใพใ›ใ‚“", 263 237 "exportFailed": "ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใซๅคฑๆ•—ใ—ใพใ—ใŸ" 264 238 } 265 239 },
-26
frontend/src/locales/ko.json
··· 213 213 "changeHandleButton": "ํ•ธ๋“ค ๋ณ€๊ฒฝ", 214 214 "exportData": "๋ฐ์ดํ„ฐ ๋‚ด๋ณด๋‚ด๊ธฐ", 215 215 "downloadRepo": "์ €์žฅ์†Œ ๋‹ค์šด๋กœ๋“œ", 216 - "downloadBlobs": "๋ฏธ๋””์–ด ๋‹ค์šด๋กœ๋“œ", 217 216 "exporting": "๋‚ด๋ณด๋‚ด๊ธฐ ์ค‘...", 218 - "backups": { 219 - "title": "๋ฐฑ์—…", 220 - "enabled": "ํ™œ์„ฑํ™”๋จ", 221 - "disabled": "๋น„ํ™œ์„ฑํ™”๋จ", 222 - "toggleFailed": "๋ฐฑ์—… ์„ค์ • ๋ณ€๊ฒฝ ์‹คํŒจ", 223 - "noBackups": "์•„์ง ๋ฐฑ์—…์ด ์—†์Šต๋‹ˆ๋‹ค", 224 - "download": "๋‹ค์šด๋กœ๋“œ", 225 - "delete": "์‚ญ์ œ", 226 - "createNow": "์ง€๊ธˆ ๋ฐฑ์—… ์ƒ์„ฑ", 227 - "created": "๋ฐฑ์—…์ด ์ƒ์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค", 228 - "createFailed": "๋ฐฑ์—… ์ƒ์„ฑ ์‹คํŒจ", 229 - "downloadFailed": "๋ฐฑ์—… ๋‹ค์šด๋กœ๋“œ ์‹คํŒจ", 230 - "deleted": "๋ฐฑ์—…์ด ์‚ญ์ œ๋˜์—ˆ์Šต๋‹ˆ๋‹ค", 231 - "deleteFailed": "๋ฐฑ์—… ์‚ญ์ œ ์‹คํŒจ", 232 - "restoreTitle": "๋ฐฑ์—…์—์„œ ๋ณต์›", 233 - "selectedFile": "์„ ํƒ๋œ ํŒŒ์ผ", 234 - "restore": "๋ฐฑ์—… ๋ณต์›", 235 - "restoring": "๋ณต์› ์ค‘...", 236 - "restored": "๋ฐฑ์—…์ด ์„ฑ๊ณต์ ์œผ๋กœ ๋ณต์›๋˜์—ˆ์Šต๋‹ˆ๋‹ค", 237 - "restoreFailed": "๋ฐฑ์—… ๋ณต์› ์‹คํŒจ", 238 - "autoBackup": "์ž๋™ ๋ฐฑ์—…", 239 - "restoreHint": "CAR ํŒŒ์ผ์„ ์—…๋กœ๋“œํ•˜์—ฌ ์ €์žฅ์†Œ ๋ณต์›" 240 - }, 241 217 "deleteAccount": "๊ณ„์ • ์‚ญ์ œ", 242 218 "deleteWarning": "์ด ์ž‘์—…์€ ๋˜๋Œ๋ฆด ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ๋ชจ๋“  ๋ฐ์ดํ„ฐ๊ฐ€ ์˜๊ตฌ์ ์œผ๋กœ ์‚ญ์ œ๋ฉ๋‹ˆ๋‹ค.", 243 219 "requestDeletion": "๊ณ„์ • ์‚ญ์ œ ์š”์ฒญ", ··· 258 234 "deleteConfirmation": "์ •๋ง๋กœ ๊ณ„์ •์„ ์‚ญ์ œํ•˜์‹œ๊ฒ ์Šต๋‹ˆ๊นŒ? ์ด ์ž‘์—…์€ ๋˜๋Œ๋ฆด ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", 259 235 "deletionFailed": "๊ณ„์ • ์‚ญ์ œ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค", 260 236 "repoExported": "์ €์žฅ์†Œ๋ฅผ ๋‚ด๋ณด๋ƒˆ์Šต๋‹ˆ๋‹ค", 261 - "blobsExported": "๋ฏธ๋””์–ด ํŒŒ์ผ์„ ๋‚ด๋ณด๋ƒˆ์Šต๋‹ˆ๋‹ค", 262 - "noBlobsToExport": "๋‚ด๋ณด๋‚ผ ๋ฏธ๋””์–ด ํŒŒ์ผ์ด ์—†์Šต๋‹ˆ๋‹ค", 263 237 "exportFailed": "๋‚ด๋ณด๋‚ด๊ธฐ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค" 264 238 } 265 239 },
-26
frontend/src/locales/sv.json
··· 213 213 "changeHandleButton": "ร„ndra anvรคndarnamn", 214 214 "exportData": "Exportera data", 215 215 "downloadRepo": "Ladda ner arkiv", 216 - "downloadBlobs": "Ladda ner media", 217 216 "exporting": "Exporterar...", 218 - "backups": { 219 - "title": "Sรคkerhetskopior", 220 - "enabled": "Aktiverad", 221 - "disabled": "Inaktiverad", 222 - "toggleFailed": "Kunde inte รคndra sรคkerhetskopieringsinstรคllning", 223 - "noBackups": "Inga sรคkerhetskopior รคnnu", 224 - "download": "Ladda ner", 225 - "delete": "Radera", 226 - "createNow": "Skapa sรคkerhetskopia nu", 227 - "created": "Sรคkerhetskopia skapad", 228 - "createFailed": "Kunde inte skapa sรคkerhetskopia", 229 - "downloadFailed": "Kunde inte ladda ner sรคkerhetskopia", 230 - "deleted": "Sรคkerhetskopia raderad", 231 - "deleteFailed": "Kunde inte radera sรคkerhetskopia", 232 - "restoreTitle": "ร…terstรคll frรฅn sรคkerhetskopia", 233 - "selectedFile": "Vald fil", 234 - "restore": "ร…terstรคll sรคkerhetskopia", 235 - "restoring": "ร…terstรคller...", 236 - "restored": "Sรคkerhetskopia รฅterstรคlld", 237 - "restoreFailed": "Kunde inte รฅterstรคlla sรคkerhetskopia", 238 - "autoBackup": "Automatiska sรคkerhetskopior", 239 - "restoreHint": "Ladda upp en CAR-fil fรถr att รฅterstรคlla ditt arkiv" 240 - }, 241 217 "deleteAccount": "Radera konto", 242 218 "deleteWarning": "Denna รฅtgรคrd รคr oรฅterkallelig. All din data kommer att raderas permanent.", 243 219 "requestDeletion": "Begรคr kontoradering", ··· 258 234 "deleteConfirmation": "ร„r du helt sรคker pรฅ att du vill radera ditt konto? Detta kan inte รฅngras.", 259 235 "deletionFailed": "Kunde inte radera kontot", 260 236 "repoExported": "Arkiv exporterat", 261 - "blobsExported": "Mediafiler exporterade", 262 - "noBlobsToExport": "Inga mediafiler att exportera", 263 237 "exportFailed": "Export misslyckades" 264 238 } 265 239 },
-26
frontend/src/locales/zh.json
··· 213 213 "changeHandleButton": "ๆ›ดๆ”น็”จๆˆทๅ", 214 214 "exportData": "ๅฏผๅ‡บๆ•ฐๆฎ", 215 215 "downloadRepo": "ไธ‹่ฝฝๆ•ฐๆฎ", 216 - "downloadBlobs": "ไธ‹่ฝฝๅช’ไฝ“ๆ–‡ไปถ", 217 216 "exporting": "ๅฏผๅ‡บไธญ...", 218 - "backups": { 219 - "title": "ๅค‡ไปฝ", 220 - "enabled": "ๅทฒๅฏ็”จ", 221 - "disabled": "ๅทฒ็ฆ็”จ", 222 - "toggleFailed": "ๆ›ดๆ”นๅค‡ไปฝ่ฎพ็ฝฎๅคฑ่ดฅ", 223 - "noBackups": "ๆš‚ๆ— ๅค‡ไปฝ", 224 - "download": "ไธ‹่ฝฝ", 225 - "delete": "ๅˆ ้™ค", 226 - "createNow": "็ซ‹ๅณๅˆ›ๅปบๅค‡ไปฝ", 227 - "created": "ๅค‡ไปฝๅทฒๅˆ›ๅปบ", 228 - "createFailed": "ๅˆ›ๅปบๅค‡ไปฝๅคฑ่ดฅ", 229 - "downloadFailed": "ไธ‹่ฝฝๅค‡ไปฝๅคฑ่ดฅ", 230 - "deleted": "ๅค‡ไปฝๅทฒๅˆ ้™ค", 231 - "deleteFailed": "ๅˆ ้™คๅค‡ไปฝๅคฑ่ดฅ", 232 - "restoreTitle": "ไปŽๅค‡ไปฝๆขๅค", 233 - "selectedFile": "ๅทฒ้€‰ๆ–‡ไปถ", 234 - "restore": "ๆขๅคๅค‡ไปฝ", 235 - "restoring": "ๆขๅคไธญ...", 236 - "restored": "ๅค‡ไปฝๆขๅคๆˆๅŠŸ", 237 - "restoreFailed": "ๅค‡ไปฝๆขๅคๅคฑ่ดฅ", 238 - "autoBackup": "่‡ชๅŠจๅค‡ไปฝ", 239 - "restoreHint": "ไธŠไผ  CAR ๆ–‡ไปถไปฅๆขๅคๆ‚จ็š„ไป“ๅบ“" 240 - }, 241 217 "deleteAccount": "ๅˆ ้™ค่ดฆๆˆท", 242 218 "deleteWarning": "ๆญคๆ“ไฝœไธๅฏ้€†ใ€‚ๆ‚จ็š„ๆ‰€ๆœ‰ๆ•ฐๆฎๅฐ†่ขซๆฐธไน…ๅˆ ้™คใ€‚", 243 219 "requestDeletion": "่ฏทๆฑ‚ๅˆ ้™ค่ดฆๆˆท", ··· 258 234 "deleteConfirmation": "ๆ‚จ็กฎๅฎš่ฆๅˆ ้™ค่ดฆๆˆทๅ—๏ผŸๆญคๆ“ไฝœๆ— ๆณ•ๆ’ค้”€ใ€‚", 259 235 "deletionFailed": "่ดฆๆˆทๅˆ ้™คๅคฑ่ดฅ", 260 236 "repoExported": "ๆ•ฐๆฎๅฏผๅ‡บๆˆๅŠŸ", 261 - "blobsExported": "ๅช’ไฝ“ๆ–‡ไปถๅฏผๅ‡บๆˆๅŠŸ", 262 - "noBlobsToExport": "ๆฒกๆœ‰ๅฏๅฏผๅ‡บ็š„ๅช’ไฝ“ๆ–‡ไปถ", 263 237 "exportFailed": "ๅฏผๅ‡บๅคฑ่ดฅ" 264 238 } 265 239 },
+1 -1
frontend/src/routes/UiTest.svelte
··· 7 7 8 8 let inputValue = $state('') 9 9 let inputError = $state('') 10 - let inputDisabled = $state(false) 10 + let inputDisabled = $state('') 11 11 12 12 const serverConfig = getServerConfigState() 13 13
-98
frontend/src/styles/dashboard.css
··· 440 440 width: 100%; 441 441 } 442 442 443 - .backup-list, 444 443 .passkey-list, 445 444 .sso-list, 446 445 .did-editor .list { ··· 452 451 gap: var(--space-2); 453 452 } 454 453 455 - .backup-item { 456 - display: flex; 457 - justify-content: space-between; 458 - align-items: center; 459 - padding: var(--space-3); 460 - background: var(--bg-card); 461 - } 462 - 463 - .backup-info { 464 - display: flex; 465 - gap: var(--space-3); 466 - font-size: var(--text-sm); 467 - } 468 - 469 - .backup-date { 470 - font-weight: var(--font-medium); 471 - } 472 - 473 - .backup-size { 474 - color: var(--text-secondary); 475 - } 476 - 477 - .backup-actions { 478 - display: flex; 479 - justify-content: space-between; 480 - align-items: center; 481 - margin-bottom: var(--space-4); 482 - gap: var(--space-3); 483 - flex-wrap: wrap; 484 - } 485 - 486 - .backup-toggle { 487 - margin-bottom: var(--space-3); 488 - } 489 - 490 - .backup-toggle .toggle-label { 491 - display: flex; 492 - align-items: center; 493 - gap: var(--space-2); 494 - cursor: pointer; 495 - font-size: var(--text-sm); 496 - white-space: nowrap; 497 - } 498 - 499 - .backup-toggle .toggle-label input[type="checkbox"] { 500 - width: 16px; 501 - height: 16px; 502 - flex-shrink: 0; 503 - } 504 - 505 - .backup-item-actions, 506 454 .controllers .item-actions { 507 455 display: flex; 508 456 gap: var(--space-2); 509 457 } 510 458 511 - .restore-section { 512 - margin-top: var(--space-5); 513 - padding-top: var(--space-4); 514 - border-top: 1px solid var(--border-color); 515 - } 516 - 517 - .restore-section h4 { 518 - margin: 0 0 var(--space-2) 0; 519 - font-size: var(--text-sm); 520 - font-weight: var(--font-medium); 521 - } 522 - 523 - .restore-section .hint { 524 - margin-bottom: var(--space-3); 525 - } 526 - 527 - .restore-form { 528 - display: flex; 529 - gap: var(--space-2); 530 - flex-wrap: wrap; 531 - } 532 - 533 - .restore-form input[type="file"] { 534 - flex: 1; 535 - min-width: 200px; 536 - } 537 - 538 - .restore-preview { 539 - margin-top: var(--space-2); 540 - font-size: var(--text-sm); 541 - color: var(--text-secondary); 542 - display: flex; 543 - gap: var(--space-2); 544 - flex-wrap: wrap; 545 - } 546 - 547 - .restore-preview .file-name { 548 - font-weight: var(--font-medium); 549 - color: var(--text-primary); 550 - } 551 - 552 459 .danger-zone h3 { 553 460 color: var(--error-text); 554 461 } ··· 701 608 702 609 .password-actions, 703 610 .totp-actions, 704 - .export-buttons { 705 - display: flex; 706 - gap: var(--space-2); 707 - flex-wrap: wrap; 708 - } 709 611 710 612 .remove-password-form { 711 613 background: var(--error-bg);
-4
frontend/src/tests/mocks.ts
··· 367 367 "com.atproto.repo.listRecords", 368 368 () => jsonResponse({ records: [] }), 369 369 ); 370 - mockEndpoint( 371 - "_backup.listBackups", 372 - () => jsonResponse({ backups: [] }), 373 - ); 374 370 } 375 371 export function setupAuthenticatedUser( 376 372 sessionOverrides?: Partial<Session>,
+2
migrations/20260318_drop_backups.sql
··· 1 + DROP TABLE IF EXISTS account_backups; 2 + ALTER TABLE users DROP COLUMN IF EXISTS backup_enabled;
+1 -11
module.nix
··· 37 37 dataDir = mkOption { 38 38 type = types.str; 39 39 default = "/var/lib/tranquil-pds"; 40 - description = "Working directory for tranquil-pds. Also expected to be used for data (blobs, backups)"; 40 + description = "Working directory for tranquil-pds. Also expected to be used for data (blobs)"; 41 41 }; 42 42 43 43 environmentFiles = mkOption { ··· 116 116 }; 117 117 }; 118 118 119 - backup = { 120 - path = mkOption { 121 - type = types.path; 122 - default = "/var/lib/tranquil-pds/backups"; 123 - description = "Directory for storing backups"; 124 - }; 125 - }; 126 - 127 119 email = { 128 120 sendmail_path = mkOption { 129 121 type = types.path; ··· 189 181 [ 190 182 cfg.dataDir 191 183 cfg.settings.storage.path 192 - cfg.settings.backup.path 193 184 ] 194 185 (_: { 195 186 d = { ··· 242 233 243 234 ReadWritePaths = [ 244 235 cfg.settings.storage.path 245 - cfg.settings.backup.path 246 236 ]; 247 237 }; 248 238 };
+2 -4
scripts/install-debian.sh
··· 72 72 echo " - PostgreSQL database 'pds' and all data" 73 73 echo " - All Tranquil PDS configuration and credentials" 74 74 echo " - All source code in /opt/tranquil-pds" 75 - echo " - All blobs and backups in /var/lib/tranquil/" 75 + echo " - All blobs in /var/lib/tranquil/" 76 76 echo "" 77 77 read -p "Type 'NUKE' to confirm: " CONFIRM_NUKE 78 78 if [[ "$CONFIRM_NUKE" == "NUKE" ]]; then ··· 195 195 log_success "postgres configured" 196 196 197 197 log_info "Creating blob storage directories..." 198 - mkdir -p /var/lib/tranquil/blobs /var/lib/tranquil/backups 198 + mkdir -p /var/lib/tranquil/blobs 199 199 log_success "Blob storage directories created" 200 200 201 201 log_info "Installing rust..." ··· 302 302 DATABASE_MAX_CONNECTIONS=100 303 303 DATABASE_MIN_CONNECTIONS=10 304 304 BLOB_STORAGE_PATH=/var/lib/tranquil/blobs 305 - BACKUP_STORAGE_PATH=/var/lib/tranquil/backups 306 305 JWT_SECRET=${JWT_SECRET} 307 306 DPOP_SECRET=${DPOP_SECRET} 308 307 MASTER_KEY=${MASTER_KEY} ··· 501 500 echo "" 502 501 echo "Data locations:" 503 502 echo " Blobs: /var/lib/tranquil/blobs" 504 - echo " Backups: /var/lib/tranquil/backups" 505 503 echo "" 506 504 echo "Commands:" 507 505 echo " journalctl -u tranquil-pds -f # logs"
-1
test.nix
··· 122 122 123 123 with subtest("data directories exist"): 124 124 server.succeed("test -d /var/lib/tranquil-pds/blobs") 125 - server.succeed("test -d /var/lib/tranquil-pds/backups") 126 125 127 126 with subtest("postgres database created"): 128 127 server.succeed("sudo -u tranquil-pds psql -d tranquil-pds -c 'SELECT 1'")

History

1 round 0 comments
sign up or login to add to the discussion
oyster.cafe submitted #0
1 commit
expand
fix(backups): remove useless backups concept
expand 0 comments
pull request successfully merged