+2
-2
dist/index.html
+2
-2
dist/index.html
···
21
21
ATLast: Sync Your TikTok Follows → ATmosphere (Skylight, Bluesky,
22
22
etc.)
23
23
</title>
24
-
<script type="module" crossorigin src="/assets/index-Dx_AzG_Q.js"></script>
25
-
<link rel="stylesheet" crossorigin href="/assets/index-C69tQ_4S.css">
24
+
<script type="module" crossorigin src="/assets/index-BmU3Lkw-.js"></script>
25
+
<link rel="stylesheet" crossorigin href="/assets/index-DQCpc624.css">
26
26
</head>
27
27
<body>
28
28
<div id="root"></div>
+6
-1
netlify.toml
+6
-1
netlify.toml
···
4
4
publish = "dist"
5
5
6
6
[[redirects]]
7
+
from = "/oauth-client-metadata.json"
8
+
to = "/.netlify/functions/client-metadata"
9
+
status = 200
10
+
11
+
[[redirects]]
7
12
from = "/oauth/callback"
8
13
to = "/.netlify/functions/oauth-callback"
9
14
status = 200
10
15
11
16
[[headers]]
12
-
for = "/client-metadata.json"
17
+
for = "/oauth-client-metadata.json"
13
18
[headers.values]
14
19
Access-Control-Allow-Origin = "*"
15
20
Cache-Control = "public, max-age=3600"
+131
-206
netlify/functions/batch-follow-users.ts
+131
-206
netlify/functions/batch-follow-users.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { SessionManager } from "./session-manager";
3
-
import { getDbClient } from "./db";
4
-
import cookie from "cookie";
1
+
import { AuthenticatedHandler } from "./shared/types";
2
+
import { SessionService } from "./shared/services/session";
3
+
import { MatchRepository } from "./shared/repositories";
4
+
import { successResponse } from "./shared/utils";
5
+
import { withAuthErrorHandling } from "./shared/middleware";
6
+
import { ValidationError } from "./shared/constants/errors";
5
7
6
-
export const handler: Handler = async (
7
-
event: HandlerEvent,
8
-
): Promise<HandlerResponse> => {
9
-
// Only allow POST
10
-
if (event.httpMethod !== "POST") {
11
-
return {
12
-
statusCode: 405,
13
-
headers: { "Content-Type": "application/json" },
14
-
body: JSON.stringify({ error: "Method not allowed" }),
15
-
};
8
+
const batchFollowHandler: AuthenticatedHandler = async (context) => {
9
+
// Parse request body
10
+
const body = JSON.parse(context.event.body || "{}");
11
+
const dids: string[] = body.dids || [];
12
+
const followLexicon: string = body.followLexicon || "app.bsky.graph.follow";
13
+
14
+
if (!Array.isArray(dids) || dids.length === 0) {
15
+
throw new ValidationError("dids array is required and must not be empty");
16
16
}
17
17
18
-
try {
19
-
// Parse request body
20
-
const body = JSON.parse(event.body || "{}");
21
-
const dids: string[] = body.dids || [];
22
-
const followLexicon: string = body.followLexicon || "app.bsky.graph.follow";
18
+
// Limit batch size to prevent timeouts and respect rate limits
19
+
if (dids.length > 100) {
20
+
throw new ValidationError("Maximum 100 DIDs per batch");
21
+
}
23
22
24
-
if (!Array.isArray(dids) || dids.length === 0) {
25
-
return {
26
-
statusCode: 400,
27
-
headers: { "Content-Type": "application/json" },
28
-
body: JSON.stringify({
29
-
error: "dids array is required and must not be empty",
30
-
}),
31
-
};
32
-
}
23
+
// Get authenticated agent using SessionService
24
+
const { agent } = await SessionService.getAgentForSession(context.sessionId);
33
25
34
-
// Limit batch size to prevent timeouts and respect rate limits
35
-
if (dids.length > 100) {
36
-
return {
37
-
statusCode: 400,
38
-
headers: { "Content-Type": "application/json" },
39
-
body: JSON.stringify({ error: "Maximum 100 DIDs per batch" }),
40
-
};
41
-
}
26
+
// Check existing follows before attempting to follow
27
+
const alreadyFollowing = new Set<string>();
28
+
try {
29
+
let cursor: string | undefined = undefined;
30
+
let hasMore = true;
31
+
const didsSet = new Set(dids);
42
32
43
-
// Get session from cookie
44
-
const cookies = event.headers.cookie
45
-
? cookie.parse(event.headers.cookie)
46
-
: {};
47
-
const sessionId = cookies.atlast_session;
33
+
while (hasMore && didsSet.size > 0) {
34
+
const response = await agent.api.com.atproto.repo.listRecords({
35
+
repo: context.did,
36
+
collection: followLexicon,
37
+
limit: 100,
38
+
cursor,
39
+
});
48
40
49
-
if (!sessionId) {
50
-
return {
51
-
statusCode: 401,
52
-
headers: { "Content-Type": "application/json" },
53
-
body: JSON.stringify({ error: "No session cookie" }),
54
-
};
55
-
}
56
-
57
-
// Get authenticated agent using SessionManager
58
-
const { agent, did: userDid } =
59
-
await SessionManager.getAgentForSession(sessionId);
60
-
61
-
// Check existing follows before attempting to follow
62
-
const alreadyFollowing = new Set<string>();
63
-
try {
64
-
let cursor: string | undefined = undefined;
65
-
let hasMore = true;
66
-
const didsSet = new Set(dids);
67
-
68
-
while (hasMore && didsSet.size > 0) {
69
-
const response = await agent.api.com.atproto.repo.listRecords({
70
-
repo: userDid,
71
-
collection: followLexicon,
72
-
limit: 100,
73
-
cursor,
74
-
});
75
-
76
-
for (const record of response.data.records) {
77
-
const followRecord = record.value as any;
78
-
if (followRecord?.subject && didsSet.has(followRecord.subject)) {
79
-
alreadyFollowing.add(followRecord.subject);
80
-
didsSet.delete(followRecord.subject);
81
-
}
41
+
for (const record of response.data.records) {
42
+
const followRecord = record.value as any;
43
+
if (followRecord?.subject && didsSet.has(followRecord.subject)) {
44
+
alreadyFollowing.add(followRecord.subject);
45
+
didsSet.delete(followRecord.subject);
82
46
}
47
+
}
83
48
84
-
cursor = response.data.cursor;
85
-
hasMore = !!cursor;
49
+
cursor = response.data.cursor;
50
+
hasMore = !!cursor;
86
51
87
-
if (didsSet.size === 0) {
88
-
break;
89
-
}
52
+
if (didsSet.size === 0) {
53
+
break;
90
54
}
91
-
} catch (error) {
92
-
console.error("Error checking existing follows:", error);
93
-
// Continue - we'll handle duplicates in the follow loop
94
55
}
56
+
} catch (error) {
57
+
console.error("Error checking existing follows:", error);
58
+
// Continue - we'll handle duplicates in the follow loop
59
+
}
95
60
96
-
// Follow all users
97
-
const results = [];
98
-
let consecutiveErrors = 0;
99
-
const MAX_CONSECUTIVE_ERRORS = 3;
100
-
const sql = getDbClient();
61
+
// Follow all users
62
+
const results = [];
63
+
let consecutiveErrors = 0;
64
+
const MAX_CONSECUTIVE_ERRORS = 3;
65
+
const matchRepo = new MatchRepository();
101
66
102
-
for (const did of dids) {
103
-
// Skip if already following
104
-
if (alreadyFollowing.has(did)) {
105
-
results.push({
106
-
did,
107
-
success: true,
108
-
alreadyFollowing: true,
109
-
error: null,
110
-
});
67
+
for (const did of dids) {
68
+
// Skip if already following
69
+
if (alreadyFollowing.has(did)) {
70
+
results.push({
71
+
did,
72
+
success: true,
73
+
alreadyFollowing: true,
74
+
error: null,
75
+
});
111
76
112
-
// Update database follow status
113
-
try {
114
-
await sql`
115
-
UPDATE atproto_matches
116
-
SET follow_status = follow_status || jsonb_build_object(${followLexicon}, true),
117
-
last_follow_check = NOW()
118
-
WHERE atproto_did = ${did}
119
-
`;
120
-
} catch (dbError) {
121
-
console.error("Failed to update follow status in DB:", dbError);
122
-
}
77
+
// Update database follow status
78
+
try {
79
+
await matchRepo.updateFollowStatus(did, followLexicon, true);
80
+
} catch (dbError) {
81
+
console.error("Failed to update follow status in DB:", dbError);
82
+
}
123
83
124
-
continue;
125
-
}
84
+
continue;
85
+
}
126
86
127
-
try {
128
-
await agent.api.com.atproto.repo.createRecord({
129
-
repo: userDid,
130
-
collection: followLexicon,
131
-
record: {
132
-
$type: followLexicon,
133
-
subject: did,
134
-
createdAt: new Date().toISOString(),
135
-
},
136
-
});
87
+
try {
88
+
await agent.api.com.atproto.repo.createRecord({
89
+
repo: context.did,
90
+
collection: followLexicon,
91
+
record: {
92
+
$type: followLexicon,
93
+
subject: did,
94
+
createdAt: new Date().toISOString(),
95
+
},
96
+
});
137
97
138
-
results.push({
139
-
did,
140
-
success: true,
141
-
alreadyFollowing: false,
142
-
error: null,
143
-
});
98
+
results.push({
99
+
did,
100
+
success: true,
101
+
alreadyFollowing: false,
102
+
error: null,
103
+
});
144
104
145
-
// Update database follow status
146
-
try {
147
-
await sql`
148
-
UPDATE atproto_matches
149
-
SET follow_status = follow_status || jsonb_build_object(${followLexicon}, true),
150
-
last_follow_check = NOW()
151
-
WHERE atproto_did = ${did}
152
-
`;
153
-
} catch (dbError) {
154
-
console.error("Failed to update follow status in DB:", dbError);
155
-
}
105
+
// Update database follow status
106
+
try {
107
+
await matchRepo.updateFollowStatus(did, followLexicon, true);
108
+
} catch (dbError) {
109
+
console.error("Failed to update follow status in DB:", dbError);
110
+
}
156
111
157
-
// Reset error counter on success
158
-
consecutiveErrors = 0;
159
-
} catch (error) {
160
-
consecutiveErrors++;
112
+
// Reset error counter on success
113
+
consecutiveErrors = 0;
114
+
} catch (error) {
115
+
consecutiveErrors++;
161
116
162
-
results.push({
163
-
did,
164
-
success: false,
165
-
alreadyFollowing: false,
166
-
error: error instanceof Error ? error.message : "Follow failed",
167
-
});
117
+
results.push({
118
+
did,
119
+
success: false,
120
+
alreadyFollowing: false,
121
+
error: error instanceof Error ? error.message : "Follow failed",
122
+
});
168
123
169
-
// If we hit rate limits, implement exponential backoff
170
-
if (
171
-
error instanceof Error &&
172
-
(error.message.includes("rate limit") ||
173
-
error.message.includes("429"))
174
-
) {
175
-
const backoffDelay = Math.min(
176
-
200 * Math.pow(2, consecutiveErrors),
177
-
2000,
178
-
);
179
-
console.log(`Rate limit hit. Backing off for ${backoffDelay}ms...`);
180
-
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
181
-
} else if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
182
-
// For other repeated errors, small backoff
183
-
await new Promise((resolve) => setTimeout(resolve, 500));
184
-
}
124
+
// If we hit rate limits, implement exponential backoff
125
+
if (
126
+
error instanceof Error &&
127
+
(error.message.includes("rate limit") || error.message.includes("429"))
128
+
) {
129
+
const backoffDelay = Math.min(
130
+
200 * Math.pow(2, consecutiveErrors),
131
+
2000,
132
+
);
133
+
console.log(`Rate limit hit. Backing off for ${backoffDelay}ms...`);
134
+
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
135
+
} else if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
136
+
// For other repeated errors, small backoff
137
+
await new Promise((resolve) => setTimeout(resolve, 500));
185
138
}
186
139
}
187
-
188
-
const successCount = results.filter((r) => r.success).length;
189
-
const failCount = results.filter((r) => !r.success).length;
190
-
const alreadyFollowingCount = results.filter(
191
-
(r) => r.alreadyFollowing,
192
-
).length;
193
-
194
-
return {
195
-
statusCode: 200,
196
-
headers: {
197
-
"Content-Type": "application/json",
198
-
"Access-Control-Allow-Origin": "*",
199
-
},
200
-
body: JSON.stringify({
201
-
success: true,
202
-
total: dids.length,
203
-
succeeded: successCount,
204
-
failed: failCount,
205
-
alreadyFollowing: alreadyFollowingCount,
206
-
results,
207
-
}),
208
-
};
209
-
} catch (error) {
210
-
console.error("Batch follow error:", error);
140
+
}
211
141
212
-
// Handle authentication errors specifically
213
-
if (error instanceof Error && error.message.includes("session")) {
214
-
return {
215
-
statusCode: 401,
216
-
headers: { "Content-Type": "application/json" },
217
-
body: JSON.stringify({
218
-
error: "Invalid or expired session",
219
-
details: error.message,
220
-
}),
221
-
};
222
-
}
142
+
const successCount = results.filter((r) => r.success).length;
143
+
const failCount = results.filter((r) => !r.success).length;
144
+
const alreadyFollowingCount = results.filter(
145
+
(r) => r.alreadyFollowing,
146
+
).length;
223
147
224
-
return {
225
-
statusCode: 500,
226
-
headers: { "Content-Type": "application/json" },
227
-
body: JSON.stringify({
228
-
error: "Failed to follow users",
229
-
details: error instanceof Error ? error.message : "Unknown error",
230
-
}),
231
-
};
232
-
}
148
+
return successResponse({
149
+
success: true,
150
+
total: dids.length,
151
+
succeeded: successCount,
152
+
failed: failCount,
153
+
alreadyFollowing: alreadyFollowingCount,
154
+
results,
155
+
});
233
156
};
157
+
158
+
export const handler = withAuthErrorHandling(batchFollowHandler);
+146
-203
netlify/functions/batch-search-actors.ts
+146
-203
netlify/functions/batch-search-actors.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { SessionManager } from "./session-manager";
3
-
import cookie from "cookie";
1
+
import { AuthenticatedHandler } from "./shared/types";
2
+
import { SessionService } from "./shared/services/session";
3
+
import { successResponse } from "./shared/utils";
4
+
import { withAuthErrorHandling } from "./shared/middleware";
5
+
import { ValidationError } from "./shared/constants/errors";
4
6
5
-
export const handler: Handler = async (
6
-
event: HandlerEvent,
7
-
): Promise<HandlerResponse> => {
8
-
try {
9
-
// Parse batch request
10
-
const body = JSON.parse(event.body || "{}");
11
-
const usernames: string[] = body.usernames || [];
7
+
const batchSearchHandler: AuthenticatedHandler = async (context) => {
8
+
// Parse batch request
9
+
const body = JSON.parse(context.event.body || "{}");
10
+
const usernames: string[] = body.usernames || [];
12
11
13
-
if (!Array.isArray(usernames) || usernames.length === 0) {
12
+
if (!Array.isArray(usernames) || usernames.length === 0) {
13
+
throw new ValidationError(
14
+
"usernames array is required and must not be empty",
15
+
);
16
+
}
17
+
18
+
// Limit batch size to prevent timeouts
19
+
if (usernames.length > 50) {
20
+
throw new ValidationError("Maximum 50 usernames per batch");
21
+
}
22
+
23
+
// Get authenticated agent using SessionService
24
+
const { agent } = await SessionService.getAgentForSession(context.sessionId);
25
+
26
+
// Search all usernames in parallel
27
+
const searchPromises = usernames.map(async (username) => {
28
+
try {
29
+
const response = await agent.app.bsky.actor.searchActors({
30
+
q: username,
31
+
limit: 20,
32
+
});
33
+
34
+
// Filter and rank matches
35
+
const normalize = (s: string) => s.toLowerCase().replace(/[._-]/g, "");
36
+
const normalizedUsername = normalize(username);
37
+
38
+
const rankedActors = response.data.actors
39
+
.map((actor: any) => {
40
+
const handlePart = actor.handle.split(".")[0];
41
+
const normalizedHandle = normalize(handlePart);
42
+
const normalizedFullHandle = normalize(actor.handle);
43
+
const normalizedDisplayName = normalize(actor.displayName || "");
44
+
45
+
let score = 0;
46
+
if (normalizedHandle === normalizedUsername) score = 100;
47
+
else if (normalizedFullHandle === normalizedUsername) score = 90;
48
+
else if (normalizedDisplayName === normalizedUsername) score = 80;
49
+
else if (normalizedHandle.includes(normalizedUsername)) score = 60;
50
+
else if (normalizedFullHandle.includes(normalizedUsername))
51
+
score = 50;
52
+
else if (normalizedDisplayName.includes(normalizedUsername))
53
+
score = 40;
54
+
else if (normalizedUsername.includes(normalizedHandle)) score = 30;
55
+
56
+
return {
57
+
...actor,
58
+
matchScore: score,
59
+
did: actor.did,
60
+
};
61
+
})
62
+
.filter((actor: any) => actor.matchScore > 0)
63
+
.sort((a: any, b: any) => b.matchScore - a.matchScore)
64
+
.slice(0, 5);
65
+
14
66
return {
15
-
statusCode: 400,
16
-
headers: { "Content-Type": "application/json" },
17
-
body: JSON.stringify({
18
-
error: "usernames array is required and must not be empty",
19
-
}),
67
+
username,
68
+
actors: rankedActors,
69
+
error: null,
20
70
};
21
-
}
22
-
23
-
// Limit batch size to prevent timeouts
24
-
if (usernames.length > 50) {
71
+
} catch (error) {
25
72
return {
26
-
statusCode: 400,
27
-
headers: { "Content-Type": "application/json" },
28
-
body: JSON.stringify({ error: "Maximum 50 usernames per batch" }),
73
+
username,
74
+
actors: [],
75
+
error: error instanceof Error ? error.message : "Search failed",
29
76
};
30
77
}
78
+
});
31
79
32
-
// Get session from cookie
33
-
const cookies = event.headers.cookie
34
-
? cookie.parse(event.headers.cookie)
35
-
: {};
36
-
const sessionId = cookies.atlast_session;
80
+
const results = await Promise.all(searchPromises);
37
81
38
-
if (!sessionId) {
39
-
return {
40
-
statusCode: 401,
41
-
headers: { "Content-Type": "application/json" },
42
-
body: JSON.stringify({ error: "No session cookie" }),
43
-
};
44
-
}
82
+
// Enrich results with follower and post counts using getProfiles
83
+
const allDids = results
84
+
.flatMap((r) => r.actors.map((a: any) => a.did))
85
+
.filter((did): did is string => !!did);
45
86
46
-
// Get authenticated agent using SessionManager
47
-
const { agent } = await SessionManager.getAgentForSession(sessionId);
87
+
if (allDids.length > 0) {
88
+
// Create a map to store enriched profile data
89
+
const profileDataMap = new Map<
90
+
string,
91
+
{ postCount: number; followerCount: number }
92
+
>();
48
93
49
-
// Search all usernames in parallel
50
-
const searchPromises = usernames.map(async (username) => {
94
+
// Batch fetch profiles (25 at a time - API limit)
95
+
const PROFILE_BATCH_SIZE = 25;
96
+
for (let i = 0; i < allDids.length; i += PROFILE_BATCH_SIZE) {
97
+
const batch = allDids.slice(i, i + PROFILE_BATCH_SIZE);
51
98
try {
52
-
const response = await agent.app.bsky.actor.searchActors({
53
-
q: username,
54
-
limit: 20,
99
+
const profilesResponse = await agent.app.bsky.actor.getProfiles({
100
+
actors: batch,
55
101
});
56
102
57
-
// Filter and rank matches (same logic as before)
58
-
const normalize = (s: string) => s.toLowerCase().replace(/[._-]/g, "");
59
-
const normalizedUsername = normalize(username);
103
+
profilesResponse.data.profiles.forEach((profile: any) => {
104
+
profileDataMap.set(profile.did, {
105
+
postCount: profile.postsCount || 0,
106
+
followerCount: profile.followersCount || 0,
107
+
});
108
+
});
109
+
} catch (error) {
110
+
console.error("Failed to fetch profile batch:", error);
111
+
// Continue even if one batch fails
112
+
}
113
+
}
60
114
61
-
const rankedActors = response.data.actors
62
-
.map((actor: any) => {
63
-
const handlePart = actor.handle.split(".")[0];
64
-
const normalizedHandle = normalize(handlePart);
65
-
const normalizedFullHandle = normalize(actor.handle);
66
-
const normalizedDisplayName = normalize(actor.displayName || "");
67
-
68
-
let score = 0;
69
-
if (normalizedHandle === normalizedUsername) score = 100;
70
-
else if (normalizedFullHandle === normalizedUsername) score = 90;
71
-
else if (normalizedDisplayName === normalizedUsername) score = 80;
72
-
else if (normalizedHandle.includes(normalizedUsername)) score = 60;
73
-
else if (normalizedFullHandle.includes(normalizedUsername))
74
-
score = 50;
75
-
else if (normalizedDisplayName.includes(normalizedUsername))
76
-
score = 40;
77
-
else if (normalizedUsername.includes(normalizedHandle)) score = 30;
78
-
79
-
return {
80
-
...actor,
81
-
matchScore: score,
82
-
did: actor.did,
83
-
};
84
-
})
85
-
.filter((actor: any) => actor.matchScore > 0)
86
-
.sort((a: any, b: any) => b.matchScore - a.matchScore)
87
-
.slice(0, 5);
88
-
115
+
// Merge enriched data back into results
116
+
results.forEach((result) => {
117
+
result.actors = result.actors.map((actor: any) => {
118
+
const enrichedData = profileDataMap.get(actor.did);
89
119
return {
90
-
username,
91
-
actors: rankedActors,
92
-
error: null,
93
-
};
94
-
} catch (error) {
95
-
return {
96
-
username,
97
-
actors: [],
98
-
error: error instanceof Error ? error.message : "Search failed",
120
+
...actor,
121
+
postCount: enrichedData?.postCount || 0,
122
+
followerCount: enrichedData?.followerCount || 0,
99
123
};
100
-
}
124
+
});
101
125
});
126
+
}
102
127
103
-
const results = await Promise.all(searchPromises);
104
-
105
-
// Enrich results with follower and post counts using getProfiles
106
-
const allDids = results
107
-
.flatMap((r) => r.actors.map((a: any) => a.did))
108
-
.filter((did): did is string => !!did);
128
+
// Check follow status for all matched DIDs in chosen lexicon
129
+
const followLexicon = body.followLexicon || "app.bsky.graph.follow";
109
130
110
-
if (allDids.length > 0) {
111
-
// Create a map to store enriched profile data
112
-
const profileDataMap = new Map<
113
-
string,
114
-
{ postCount: number; followerCount: number }
115
-
>();
131
+
if (allDids.length > 0) {
132
+
try {
133
+
let cursor: string | undefined = undefined;
134
+
let hasMore = true;
135
+
const didsSet = new Set(allDids);
136
+
const followedDids = new Set<string>();
116
137
117
-
// Batch fetch profiles (25 at a time - API limit)
118
-
const PROFILE_BATCH_SIZE = 25;
119
-
for (let i = 0; i < allDids.length; i += PROFILE_BATCH_SIZE) {
120
-
const batch = allDids.slice(i, i + PROFILE_BATCH_SIZE);
121
-
try {
122
-
const profilesResponse = await agent.app.bsky.actor.getProfiles({
123
-
actors: batch,
124
-
});
125
-
126
-
profilesResponse.data.profiles.forEach((profile: any) => {
127
-
profileDataMap.set(profile.did, {
128
-
postCount: profile.postsCount || 0,
129
-
followerCount: profile.followersCount || 0,
130
-
});
131
-
});
132
-
} catch (error) {
133
-
console.error("Failed to fetch profile batch:", error);
134
-
// Continue even if one batch fails
135
-
}
136
-
}
137
-
138
-
// Merge enriched data back into results
139
-
results.forEach((result) => {
140
-
result.actors = result.actors.map((actor: any) => {
141
-
const enrichedData = profileDataMap.get(actor.did);
142
-
return {
143
-
...actor,
144
-
postCount: enrichedData?.postCount || 0,
145
-
followerCount: enrichedData?.followerCount || 0,
146
-
};
138
+
// Query user's follow graph
139
+
while (hasMore && didsSet.size > 0) {
140
+
const response = await agent.api.com.atproto.repo.listRecords({
141
+
repo: context.did,
142
+
collection: followLexicon,
143
+
limit: 100,
144
+
cursor,
147
145
});
148
-
});
149
-
}
150
146
151
-
// Check follow status for all matched DIDs in chosen lexicon
152
-
const followLexicon = body.followLexicon || "app.bsky.graph.follow";
153
-
154
-
if (allDids.length > 0) {
155
-
try {
156
-
let cursor: string | undefined = undefined;
157
-
let hasMore = true;
158
-
const didsSet = new Set(allDids);
159
-
const followedDids = new Set<string>();
160
-
const repoDid = await SessionManager.getDIDForSession(sessionId);
161
-
162
-
if (repoDid === null) {
163
-
throw new Error("Could not retrieve DID for session.");
164
-
}
165
-
166
-
// Query user's follow graph
167
-
while (hasMore && didsSet.size > 0) {
168
-
const response = await agent.api.com.atproto.repo.listRecords({
169
-
repo: repoDid,
170
-
collection: followLexicon,
171
-
limit: 100,
172
-
cursor,
173
-
});
174
-
175
-
// Check each record
176
-
for (const record of response.data.records) {
177
-
const followRecord = record.value as any;
178
-
if (followRecord?.subject && didsSet.has(followRecord.subject)) {
179
-
followedDids.add(followRecord.subject);
180
-
}
147
+
// Check each record
148
+
for (const record of response.data.records) {
149
+
const followRecord = record.value as any;
150
+
if (followRecord?.subject && didsSet.has(followRecord.subject)) {
151
+
followedDids.add(followRecord.subject);
181
152
}
182
-
183
-
cursor = response.data.cursor;
184
-
hasMore = !!cursor;
185
153
}
186
154
187
-
// Add follow status to results
188
-
results.forEach((result) => {
189
-
result.actors = result.actors.map((actor: any) => ({
190
-
...actor,
191
-
followStatus: {
192
-
[followLexicon]: followedDids.has(actor.did),
193
-
},
194
-
}));
195
-
});
196
-
} catch (error) {
197
-
console.error("Failed to check follow status during search:", error);
198
-
// Continue without follow status - non-critical
155
+
cursor = response.data.cursor;
156
+
hasMore = !!cursor;
199
157
}
200
-
}
201
158
202
-
return {
203
-
statusCode: 200,
204
-
headers: {
205
-
"Content-Type": "application/json",
206
-
"Access-Control-Allow-Origin": "*",
207
-
},
208
-
body: JSON.stringify({ results }),
209
-
};
210
-
} catch (error) {
211
-
console.error("Batch search error:", error);
212
-
213
-
// Handle authentication errors specifically
214
-
if (error instanceof Error && error.message.includes("session")) {
215
-
return {
216
-
statusCode: 401,
217
-
headers: { "Content-Type": "application/json" },
218
-
body: JSON.stringify({
219
-
error: "Invalid or expired session",
220
-
details: error.message,
221
-
}),
222
-
};
159
+
// Add follow status to results
160
+
results.forEach((result) => {
161
+
result.actors = result.actors.map((actor: any) => ({
162
+
...actor,
163
+
followStatus: {
164
+
[followLexicon]: followedDids.has(actor.did),
165
+
},
166
+
}));
167
+
});
168
+
} catch (error) {
169
+
console.error("Failed to check follow status during search:", error);
170
+
// Continue without follow status - non-critical
223
171
}
172
+
}
224
173
225
-
return {
226
-
statusCode: 500,
227
-
headers: { "Content-Type": "application/json" },
228
-
body: JSON.stringify({
229
-
error: "Failed to search actors",
230
-
details: error instanceof Error ? error.message : "Unknown error",
231
-
}),
232
-
};
233
-
}
174
+
return successResponse({ results });
234
175
};
176
+
177
+
export const handler = withAuthErrorHandling(batchSearchHandler);
+55
-118
netlify/functions/check-follow-status.ts
+55
-118
netlify/functions/check-follow-status.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { SessionManager } from "./session-manager";
3
-
import cookie from "cookie";
1
+
import { AuthenticatedHandler } from "./shared/types";
2
+
import { SessionService } from "./shared/services/session";
3
+
import { successResponse } from "./shared/utils";
4
+
import { withAuthErrorHandling } from "./shared/middleware";
5
+
import { ValidationError } from "./shared/constants/errors";
4
6
5
-
export const handler: Handler = async (
6
-
event: HandlerEvent,
7
-
): Promise<HandlerResponse> => {
8
-
if (event.httpMethod !== "POST") {
9
-
return {
10
-
statusCode: 405,
11
-
headers: { "Content-Type": "application/json" },
12
-
body: JSON.stringify({ error: "Method not allowed" }),
13
-
};
7
+
const checkFollowStatusHandler: AuthenticatedHandler = async (context) => {
8
+
// Parse request body
9
+
const body = JSON.parse(context.event.body || "{}");
10
+
const dids: string[] = body.dids || [];
11
+
const followLexicon: string = body.followLexicon || "app.bsky.graph.follow";
12
+
13
+
if (!Array.isArray(dids) || dids.length === 0) {
14
+
throw new ValidationError("dids array is required and must not be empty");
14
15
}
15
16
16
-
try {
17
-
// Parse request body
18
-
const body = JSON.parse(event.body || "{}");
19
-
const dids: string[] = body.dids || [];
20
-
const followLexicon: string = body.followLexicon || "app.bsky.graph.follow";
17
+
// Limit batch size
18
+
if (dids.length > 100) {
19
+
throw new ValidationError("Maximum 100 DIDs per batch");
20
+
}
21
21
22
-
if (!Array.isArray(dids) || dids.length === 0) {
23
-
return {
24
-
statusCode: 400,
25
-
headers: { "Content-Type": "application/json" },
26
-
body: JSON.stringify({
27
-
error: "dids array is required and must not be empty",
28
-
}),
29
-
};
30
-
}
22
+
// Get authenticated agent using SessionService
23
+
const { agent } = await SessionService.getAgentForSession(context.sessionId);
31
24
32
-
// Limit batch size
33
-
if (dids.length > 100) {
34
-
return {
35
-
statusCode: 400,
36
-
headers: { "Content-Type": "application/json" },
37
-
body: JSON.stringify({ error: "Maximum 100 DIDs per batch" }),
38
-
};
39
-
}
25
+
// Build follow status map
26
+
const followStatus: Record<string, boolean> = {};
40
27
41
-
// Get session from cookie
42
-
const cookies = event.headers.cookie
43
-
? cookie.parse(event.headers.cookie)
44
-
: {};
45
-
const sessionId = cookies.atlast_session;
28
+
// Initialize all as not following
29
+
dids.forEach((did) => {
30
+
followStatus[did] = false;
31
+
});
46
32
47
-
if (!sessionId) {
48
-
return {
49
-
statusCode: 401,
50
-
headers: { "Content-Type": "application/json" },
51
-
body: JSON.stringify({ error: "No session cookie" }),
52
-
};
53
-
}
33
+
// Query user's follow graph for the specific lexicon
34
+
try {
35
+
let cursor: string | undefined = undefined;
36
+
let hasMore = true;
37
+
const didsSet = new Set(dids);
54
38
55
-
// Get authenticated agent using SessionManager
56
-
const { agent, did: userDid } =
57
-
await SessionManager.getAgentForSession(sessionId);
39
+
while (hasMore && didsSet.size > 0) {
40
+
const response = await agent.api.com.atproto.repo.listRecords({
41
+
repo: context.did,
42
+
collection: followLexicon,
43
+
limit: 100,
44
+
cursor,
45
+
});
58
46
59
-
// Build follow status map
60
-
const followStatus: Record<string, boolean> = {};
61
-
62
-
// Initialize all as not following
63
-
dids.forEach((did) => {
64
-
followStatus[did] = false;
65
-
});
66
-
67
-
// Query user's follow graph for the specific lexicon
68
-
try {
69
-
let cursor: string | undefined = undefined;
70
-
let hasMore = true;
71
-
const didsSet = new Set(dids);
72
-
73
-
while (hasMore && didsSet.size > 0) {
74
-
const response = await agent.api.com.atproto.repo.listRecords({
75
-
repo: userDid,
76
-
collection: followLexicon,
77
-
limit: 100,
78
-
cursor,
79
-
});
80
-
81
-
// Check each record
82
-
for (const record of response.data.records) {
83
-
const followRecord = record.value as any;
84
-
if (followRecord?.subject && didsSet.has(followRecord.subject)) {
85
-
followStatus[followRecord.subject] = true;
86
-
didsSet.delete(followRecord.subject); // Found it, no need to keep checking
87
-
}
47
+
// Check each record
48
+
for (const record of response.data.records) {
49
+
const followRecord = record.value as any;
50
+
if (followRecord?.subject && didsSet.has(followRecord.subject)) {
51
+
followStatus[followRecord.subject] = true;
52
+
didsSet.delete(followRecord.subject); // Found it, no need to keep checking
88
53
}
54
+
}
89
55
90
-
cursor = response.data.cursor;
91
-
hasMore = !!cursor;
56
+
cursor = response.data.cursor;
57
+
hasMore = !!cursor;
92
58
93
-
// If we've found all DIDs, break early
94
-
if (didsSet.size === 0) {
95
-
break;
96
-
}
59
+
// If we've found all DIDs, break early
60
+
if (didsSet.size === 0) {
61
+
break;
97
62
}
98
-
} catch (error) {
99
-
console.error("Error querying follow graph:", error);
100
-
// On error, return all as false (not following) - fail safe
101
63
}
102
-
103
-
return {
104
-
statusCode: 200,
105
-
headers: {
106
-
"Content-Type": "application/json",
107
-
"Access-Control-Allow-Origin": "*",
108
-
},
109
-
body: JSON.stringify({ followStatus }),
110
-
};
111
64
} catch (error) {
112
-
console.error("Check follow status error:", error);
65
+
console.error("Error querying follow graph:", error);
66
+
// On error, return all as false (not following) - fail safe
67
+
}
113
68
114
-
// Handle authentication errors specifically
115
-
if (error instanceof Error && error.message.includes("session")) {
116
-
return {
117
-
statusCode: 401,
118
-
headers: { "Content-Type": "application/json" },
119
-
body: JSON.stringify({
120
-
error: "Invalid or expired session",
121
-
details: error.message,
122
-
}),
123
-
};
124
-
}
69
+
return successResponse({ followStatus });
70
+
};
125
71
126
-
return {
127
-
statusCode: 500,
128
-
headers: { "Content-Type": "application/json" },
129
-
body: JSON.stringify({
130
-
error: "Failed to check follow status",
131
-
details: error instanceof Error ? error.message : "Unknown error",
132
-
}),
133
-
};
134
-
}
135
-
};
72
+
export const handler = withAuthErrorHandling(checkFollowStatusHandler);
-304
netlify/functions/db-helpers.ts
-304
netlify/functions/db-helpers.ts
···
1
-
import { getDbClient } from "./db";
2
-
3
-
export async function createUpload(
4
-
uploadId: string,
5
-
did: string,
6
-
sourcePlatform: string,
7
-
totalUsers: number,
8
-
matchedUsers: number,
9
-
) {
10
-
const sql = getDbClient();
11
-
await sql`
12
-
INSERT INTO user_uploads (upload_id, did, source_platform, total_users, matched_users, unmatched_users)
13
-
VALUES (${uploadId}, ${did}, ${sourcePlatform}, ${totalUsers}, ${matchedUsers}, ${totalUsers - matchedUsers})
14
-
ON CONFLICT (upload_id) DO NOTHING
15
-
`;
16
-
}
17
-
18
-
export async function getOrCreateSourceAccount(
19
-
sourcePlatform: string,
20
-
sourceUsername: string,
21
-
): Promise<number> {
22
-
const sql = getDbClient();
23
-
const normalized = sourceUsername.toLowerCase().replace(/[._-]/g, "");
24
-
25
-
const result = await sql`
26
-
INSERT INTO source_accounts (source_platform, source_username, normalized_username)
27
-
VALUES (${sourcePlatform}, ${sourceUsername}, ${normalized})
28
-
ON CONFLICT (source_platform, normalized_username) DO UPDATE SET
29
-
source_username = ${sourceUsername}
30
-
RETURNING id
31
-
`;
32
-
33
-
return (result as any[])[0].id;
34
-
}
35
-
36
-
export async function linkUserToSourceAccount(
37
-
uploadId: string,
38
-
did: string,
39
-
sourceAccountId: number,
40
-
sourceDate: string,
41
-
) {
42
-
const sql = getDbClient();
43
-
await sql`
44
-
INSERT INTO user_source_follows (upload_id, did, source_account_id, source_date)
45
-
VALUES (${uploadId}, ${did}, ${sourceAccountId}, ${sourceDate})
46
-
ON CONFLICT (upload_id, source_account_id) DO NOTHING
47
-
`;
48
-
}
49
-
50
-
export async function storeAtprotoMatch(
51
-
sourceAccountId: number,
52
-
atprotoDid: string,
53
-
atprotoHandle: string,
54
-
atprotoDisplayName: string | undefined,
55
-
atprotoAvatar: string | undefined,
56
-
matchScore: number,
57
-
postCount: number,
58
-
followerCount: number,
59
-
followStatus?: Record<string, boolean>,
60
-
): Promise<number> {
61
-
const sql = getDbClient();
62
-
const result = await sql`
63
-
INSERT INTO atproto_matches (
64
-
source_account_id, atproto_did, atproto_handle,
65
-
atproto_display_name, atproto_avatar, match_score,
66
-
post_count, follower_count, follow_status
67
-
)
68
-
VALUES (
69
-
${sourceAccountId}, ${atprotoDid}, ${atprotoHandle},
70
-
${atprotoDisplayName || null}, ${atprotoAvatar || null}, ${matchScore},
71
-
${postCount || 0}, ${followerCount || 0}, ${JSON.stringify(followStatus || {})}
72
-
)
73
-
ON CONFLICT (source_account_id, atproto_did) DO UPDATE SET
74
-
atproto_handle = ${atprotoHandle},
75
-
atproto_display_name = ${atprotoDisplayName || null},
76
-
atproto_avatar = ${atprotoAvatar || null},
77
-
match_score = ${matchScore},
78
-
post_count = ${postCount},
79
-
follower_count = ${followerCount},
80
-
follow_status = COALESCE(atproto_matches.follow_status, '{}'::jsonb) || ${JSON.stringify(followStatus || {})},
81
-
last_verified = NOW()
82
-
RETURNING id
83
-
`;
84
-
85
-
return (result as any[])[0].id;
86
-
}
87
-
88
-
export async function markSourceAccountMatched(sourceAccountId: number) {
89
-
const sql = getDbClient();
90
-
await sql`
91
-
UPDATE source_accounts
92
-
SET match_found = true, match_found_at = NOW()
93
-
WHERE id = ${sourceAccountId}
94
-
`;
95
-
}
96
-
97
-
export async function createUserMatchStatus(
98
-
did: string,
99
-
atprotoMatchId: number,
100
-
sourceAccountId: number,
101
-
viewed: boolean = false,
102
-
) {
103
-
const sql = getDbClient();
104
-
await sql`
105
-
INSERT INTO user_match_status (did, atproto_match_id, source_account_id, viewed, viewed_at)
106
-
VALUES (${did}, ${atprotoMatchId}, ${sourceAccountId}, ${viewed}, ${viewed ? "NOW()" : null})
107
-
ON CONFLICT (did, atproto_match_id) DO UPDATE SET
108
-
viewed = ${viewed},
109
-
viewed_at = CASE WHEN ${viewed} THEN NOW() ELSE user_match_status.viewed_at END
110
-
`;
111
-
}
112
-
113
-
// NEW: Bulk operations for Phase 2
114
-
export async function bulkCreateSourceAccounts(
115
-
sourcePlatform: string,
116
-
usernames: string[],
117
-
): Promise<Map<string, number>> {
118
-
const sql = getDbClient();
119
-
120
-
// Prepare bulk insert values
121
-
const values = usernames.map((username) => ({
122
-
platform: sourcePlatform,
123
-
username: username,
124
-
normalized: username.toLowerCase().replace(/[._-]/g, ""),
125
-
}));
126
-
127
-
// Build bulk insert query with unnest
128
-
const platforms = values.map((v) => v.platform);
129
-
const source_usernames = values.map((v) => v.username);
130
-
const normalized = values.map((v) => v.normalized);
131
-
132
-
const result = await sql`
133
-
INSERT INTO source_accounts (source_platform, source_username, normalized_username)
134
-
SELECT *
135
-
FROM UNNEST(
136
-
${platforms}::text[],
137
-
${source_usernames}::text[],
138
-
${normalized}::text[]
139
-
) AS t(source_platform, source_username, normalized_username)
140
-
ON CONFLICT (source_platform, normalized_username) DO UPDATE
141
-
SET source_username = EXCLUDED.source_username
142
-
RETURNING id, normalized_username
143
-
`;
144
-
145
-
// Create map of normalized username to ID
146
-
const idMap = new Map<string, number>();
147
-
for (const row of result as any[]) {
148
-
idMap.set(row.normalized_username, row.id);
149
-
}
150
-
151
-
return idMap;
152
-
}
153
-
154
-
export async function bulkLinkUserToSourceAccounts(
155
-
uploadId: string,
156
-
did: string,
157
-
links: Array<{ sourceAccountId: number; sourceDate: string }>,
158
-
) {
159
-
const sql = getDbClient();
160
-
161
-
const numLinks = links.length;
162
-
if (numLinks === 0) return;
163
-
164
-
// Extract arrays for columns that change
165
-
const sourceAccountIds = links.map((l) => l.sourceAccountId);
166
-
const sourceDates = links.map((l) => l.sourceDate);
167
-
168
-
// Create arrays for the static columns
169
-
const uploadIds = Array(numLinks).fill(uploadId);
170
-
const dids = Array(numLinks).fill(did);
171
-
172
-
// Use the parallel UNNEST pattern, which is proven to work in other functions
173
-
await sql`
174
-
INSERT INTO user_source_follows (upload_id, did, source_account_id, source_date)
175
-
SELECT * FROM UNNEST(
176
-
${uploadIds}::text[],
177
-
${dids}::text[],
178
-
${sourceAccountIds}::integer[],
179
-
${sourceDates}::text[]
180
-
) AS t(upload_id, did, source_account_id, source_date)
181
-
ON CONFLICT (upload_id, source_account_id) DO NOTHING
182
-
`;
183
-
}
184
-
// ====================================================================
185
-
186
-
export async function bulkStoreAtprotoMatches(
187
-
matches: Array<{
188
-
sourceAccountId: number;
189
-
atprotoDid: string;
190
-
atprotoHandle: string;
191
-
atprotoDisplayName?: string;
192
-
atprotoAvatar?: string;
193
-
atprotoDescription?: string;
194
-
matchScore: number;
195
-
postCount?: number;
196
-
followerCount?: number;
197
-
followStatus?: Record<string, boolean>;
198
-
}>,
199
-
): Promise<Map<string, number>> {
200
-
const sql = getDbClient();
201
-
202
-
if (matches.length === 0) return new Map();
203
-
204
-
const sourceAccountId = matches.map((m) => m.sourceAccountId);
205
-
const atprotoDid = matches.map((m) => m.atprotoDid);
206
-
const atprotoHandle = matches.map((m) => m.atprotoHandle);
207
-
const atprotoDisplayName = matches.map((m) => m.atprotoDisplayName || null);
208
-
const atprotoAvatar = matches.map((m) => m.atprotoAvatar || null);
209
-
const atprotoDescription = matches.map((m) => m.atprotoDescription || null);
210
-
const matchScore = matches.map((m) => m.matchScore);
211
-
const postCount = matches.map((m) => m.postCount || 0);
212
-
const followerCount = matches.map((m) => m.followerCount || 0);
213
-
const followStatus = matches.map((m) => JSON.stringify(m.followStatus || {}));
214
-
215
-
const result = await sql`
216
-
INSERT INTO atproto_matches (
217
-
source_account_id, atproto_did, atproto_handle,
218
-
atproto_display_name, atproto_avatar, atproto_description,
219
-
match_score, post_count, follower_count, follow_status
220
-
)
221
-
SELECT * FROM UNNEST(
222
-
${sourceAccountId}::integer[],
223
-
${atprotoDid}::text[],
224
-
${atprotoHandle}::text[],
225
-
${atprotoDisplayName}::text[],
226
-
${atprotoAvatar}::text[],
227
-
${atprotoDescription}::text[],
228
-
${matchScore}::integer[],
229
-
${postCount}::integer[],
230
-
${followerCount}::integer[],
231
-
${followStatus}::jsonb[]
232
-
) AS t(
233
-
source_account_id, atproto_did, atproto_handle,
234
-
atproto_display_name, atproto_avatar, match_score,
235
-
post_count, follower_count, follow_status
236
-
)
237
-
ON CONFLICT (source_account_id, atproto_did) DO UPDATE SET
238
-
atproto_handle = EXCLUDED.atproto_handle,
239
-
atproto_display_name = EXCLUDED.atproto_display_name,
240
-
atproto_avatar = EXCLUDED.atproto_avatar,
241
-
atproto_description = EXCLUDED.atproto_description,
242
-
match_score = EXCLUDED.match_score,
243
-
post_count = EXCLUDED.post_count,
244
-
follower_count = EXCLUDED.follower_count,
245
-
follow_status = COALESCE(atproto_matches.follow_status, '{}'::jsonb) || EXCLUDED.follow_status,
246
-
last_verified = NOW()
247
-
RETURNING id, source_account_id, atproto_did
248
-
`;
249
-
250
-
// Create map of "sourceAccountId:atprotoDid" to match ID
251
-
const idMap = new Map<string, number>();
252
-
for (const row of result as any[]) {
253
-
idMap.set(`${row.source_account_id}:${row.atproto_did}`, row.id);
254
-
}
255
-
256
-
return idMap;
257
-
}
258
-
259
-
export async function bulkMarkSourceAccountsMatched(
260
-
sourceAccountIds: number[],
261
-
) {
262
-
const sql = getDbClient();
263
-
264
-
if (sourceAccountIds.length === 0) return;
265
-
266
-
await sql`
267
-
UPDATE source_accounts
268
-
SET match_found = true, match_found_at = NOW()
269
-
WHERE id = ANY(${sourceAccountIds})
270
-
`;
271
-
}
272
-
273
-
export async function bulkCreateUserMatchStatus(
274
-
statuses: Array<{
275
-
did: string;
276
-
atprotoMatchId: number;
277
-
sourceAccountId: number;
278
-
viewed: boolean;
279
-
}>,
280
-
) {
281
-
const sql = getDbClient();
282
-
283
-
if (statuses.length === 0) return;
284
-
285
-
const did = statuses.map((s) => s.did);
286
-
const atprotoMatchId = statuses.map((s) => s.atprotoMatchId);
287
-
const sourceAccountId = statuses.map((s) => s.sourceAccountId);
288
-
const viewedFlags = statuses.map((s) => s.viewed);
289
-
const viewedDates = statuses.map((s) => (s.viewed ? new Date() : null));
290
-
291
-
await sql`
292
-
INSERT INTO user_match_status (did, atproto_match_id, source_account_id, viewed, viewed_at)
293
-
SELECT * FROM UNNEST(
294
-
${did}::text[],
295
-
${atprotoMatchId}::integer[],
296
-
${sourceAccountId}::integer[],
297
-
${viewedFlags}::boolean[],
298
-
${viewedDates}::timestamp[]
299
-
) AS t(did, atproto_match_id, source_account_id, viewed, viewed_at)
300
-
ON CONFLICT (did, atproto_match_id) DO UPDATE SET
301
-
viewed = EXCLUDED.viewed,
302
-
viewed_at = CASE WHEN EXCLUDED.viewed THEN NOW() ELSE user_match_status.viewed_at END
303
-
`;
304
-
}
-212
netlify/functions/db.ts
-212
netlify/functions/db.ts
···
1
-
import { neon, NeonQueryFunction } from "@neondatabase/serverless";
2
-
3
-
let sql: NeonQueryFunction<any, any> | undefined = undefined;
4
-
let connectionInitialized = false;
5
-
6
-
export function getDbClient() {
7
-
if (!sql) {
8
-
sql = neon(process.env.NETLIFY_DATABASE_URL!);
9
-
connectionInitialized = true;
10
-
}
11
-
return sql;
12
-
}
13
-
14
-
export async function initDB() {
15
-
const sql = getDbClient();
16
-
17
-
console.log("🧠 Connecting to DB:", process.env.NETLIFY_DATABASE_URL);
18
-
19
-
try {
20
-
const res: any =
21
-
await sql`SELECT current_database() AS db, current_user AS user, NOW() AS now`;
22
-
console.log("✅ Connected:", res[0]);
23
-
} catch (e) {
24
-
console.error("❌ Connection failed:", e);
25
-
throw e;
26
-
}
27
-
28
-
// OAuth Tables
29
-
await sql`
30
-
CREATE TABLE IF NOT EXISTS oauth_states (
31
-
key TEXT PRIMARY KEY,
32
-
data JSONB NOT NULL,
33
-
created_at TIMESTAMP DEFAULT NOW(),
34
-
expires_at TIMESTAMP NOT NULL
35
-
)
36
-
`;
37
-
38
-
await sql`
39
-
CREATE TABLE IF NOT EXISTS oauth_sessions (
40
-
key TEXT PRIMARY KEY,
41
-
data JSONB NOT NULL,
42
-
created_at TIMESTAMP DEFAULT NOW(),
43
-
expires_at TIMESTAMP NOT NULL
44
-
)
45
-
`;
46
-
47
-
await sql`
48
-
CREATE TABLE IF NOT EXISTS user_sessions (
49
-
session_id TEXT PRIMARY KEY,
50
-
did TEXT NOT NULL,
51
-
created_at TIMESTAMP DEFAULT NOW(),
52
-
expires_at TIMESTAMP NOT NULL
53
-
)
54
-
`;
55
-
56
-
// User + Match Tracking
57
-
await sql`
58
-
CREATE TABLE IF NOT EXISTS user_uploads (
59
-
upload_id TEXT PRIMARY KEY,
60
-
did TEXT NOT NULL,
61
-
source_platform TEXT NOT NULL,
62
-
created_at TIMESTAMP DEFAULT NOW(),
63
-
last_checked TIMESTAMP,
64
-
total_users INTEGER NOT NULL,
65
-
matched_users INTEGER DEFAULT 0,
66
-
unmatched_users INTEGER DEFAULT 0
67
-
)
68
-
`;
69
-
70
-
await sql`
71
-
CREATE TABLE IF NOT EXISTS source_accounts (
72
-
id SERIAL PRIMARY KEY,
73
-
source_platform TEXT NOT NULL,
74
-
source_username TEXT NOT NULL,
75
-
normalized_username TEXT NOT NULL,
76
-
last_checked TIMESTAMP,
77
-
match_found BOOLEAN DEFAULT FALSE,
78
-
match_found_at TIMESTAMP,
79
-
created_at TIMESTAMP DEFAULT NOW(),
80
-
UNIQUE(source_platform, normalized_username)
81
-
)
82
-
`;
83
-
84
-
await sql`
85
-
CREATE TABLE IF NOT EXISTS user_source_follows (
86
-
id SERIAL PRIMARY KEY,
87
-
upload_id TEXT NOT NULL REFERENCES user_uploads(upload_id) ON DELETE CASCADE,
88
-
did TEXT NOT NULL,
89
-
source_account_id INTEGER NOT NULL REFERENCES source_accounts(id) ON DELETE CASCADE,
90
-
source_date TEXT,
91
-
created_at TIMESTAMP DEFAULT NOW(),
92
-
UNIQUE(upload_id, source_account_id)
93
-
)
94
-
`;
95
-
96
-
await sql`
97
-
CREATE TABLE IF NOT EXISTS atproto_matches (
98
-
id SERIAL PRIMARY KEY,
99
-
source_account_id INTEGER NOT NULL REFERENCES source_accounts(id) ON DELETE CASCADE,
100
-
atproto_did TEXT NOT NULL,
101
-
atproto_handle TEXT NOT NULL,
102
-
atproto_display_name TEXT,
103
-
atproto_avatar TEXT,
104
-
atproto_description TEXT,
105
-
post_count INTEGER,
106
-
follower_count INTEGER,
107
-
match_score INTEGER NOT NULL,
108
-
found_at TIMESTAMP DEFAULT NOW(),
109
-
last_verified TIMESTAMP,
110
-
is_active BOOLEAN DEFAULT TRUE,
111
-
follow_status JSONB DEFAULT '{}',
112
-
last_follow_check TIMESTAMP,
113
-
UNIQUE(source_account_id, atproto_did)
114
-
)
115
-
`;
116
-
117
-
await sql`
118
-
CREATE TABLE IF NOT EXISTS user_match_status (
119
-
id SERIAL PRIMARY KEY,
120
-
did TEXT NOT NULL,
121
-
atproto_match_id INTEGER NOT NULL REFERENCES atproto_matches(id) ON DELETE CASCADE,
122
-
source_account_id INTEGER NOT NULL REFERENCES source_accounts(id) ON DELETE CASCADE,
123
-
notified BOOLEAN DEFAULT FALSE,
124
-
notified_at TIMESTAMP,
125
-
viewed BOOLEAN DEFAULT FALSE,
126
-
viewed_at TIMESTAMP,
127
-
followed BOOLEAN DEFAULT FALSE,
128
-
followed_at TIMESTAMP,
129
-
dismissed BOOLEAN DEFAULT FALSE,
130
-
dismissed_at TIMESTAMP,
131
-
UNIQUE(did, atproto_match_id)
132
-
)
133
-
`;
134
-
135
-
await sql`
136
-
CREATE TABLE IF NOT EXISTS notification_queue (
137
-
id SERIAL PRIMARY KEY,
138
-
did TEXT NOT NULL,
139
-
new_matches_count INTEGER NOT NULL,
140
-
created_at TIMESTAMP DEFAULT NOW(),
141
-
sent BOOLEAN DEFAULT FALSE,
142
-
sent_at TIMESTAMP,
143
-
retry_count INTEGER DEFAULT 0,
144
-
last_error TEXT
145
-
)
146
-
`;
147
-
148
-
// Existing indexes
149
-
await sql`CREATE INDEX IF NOT EXISTS idx_source_accounts_to_check ON source_accounts(source_platform, match_found, last_checked)`;
150
-
await sql`CREATE INDEX IF NOT EXISTS idx_source_accounts_platform ON source_accounts(source_platform)`;
151
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_source_follows_did ON user_source_follows(did)`;
152
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_source_follows_source ON user_source_follows(source_account_id)`;
153
-
await sql`CREATE INDEX IF NOT EXISTS idx_atproto_matches_source ON atproto_matches(source_account_id)`;
154
-
await sql`CREATE INDEX IF NOT EXISTS idx_atproto_matches_did ON atproto_matches(atproto_did)`;
155
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_match_status_did_notified ON user_match_status(did, notified, viewed)`;
156
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_match_status_did_followed ON user_match_status(did, followed)`;
157
-
await sql`CREATE INDEX IF NOT EXISTS idx_notification_queue_pending ON notification_queue(sent, created_at) WHERE sent = false`;
158
-
159
-
// ======== Enhanced indexes for common query patterns =========
160
-
161
-
// For sorting
162
-
await sql`CREATE INDEX IF NOT EXISTS idx_atproto_matches_stats ON atproto_matches(source_account_id, found_at DESC, post_count DESC, follower_count DESC)`;
163
-
164
-
// For session lookups (most frequent query)
165
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_sessions_did ON user_sessions(did)`;
166
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_sessions_expires ON user_sessions(expires_at)`;
167
-
168
-
// For OAuth state/session cleanup
169
-
await sql`CREATE INDEX IF NOT EXISTS idx_oauth_states_expires ON oauth_states(expires_at)`;
170
-
await sql`CREATE INDEX IF NOT EXISTS idx_oauth_sessions_expires ON oauth_sessions(expires_at)`;
171
-
172
-
// For upload queries by user
173
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_uploads_did_created ON user_uploads(did, created_at DESC)`;
174
-
175
-
// For upload details pagination (composite index for ORDER BY + JOIN)
176
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_source_follows_upload_created ON user_source_follows(upload_id, source_account_id)`;
177
-
178
-
// For match status queries
179
-
await sql`CREATE INDEX IF NOT EXISTS idx_user_match_status_match_id ON user_match_status(atproto_match_id)`;
180
-
181
-
// Composite index for the common join pattern in get-upload-details
182
-
await sql`CREATE INDEX IF NOT EXISTS idx_atproto_matches_source_active ON atproto_matches(source_account_id, is_active) WHERE is_active = true`;
183
-
184
-
// For bulk operations - normalized username lookups
185
-
await sql`CREATE INDEX IF NOT EXISTS idx_source_accounts_normalized ON source_accounts(normalized_username, source_platform)`;
186
-
187
-
// Follow status indexes
188
-
await sql`CREATE INDEX IF NOT EXISTS idx_atproto_matches_follow_status ON atproto_matches USING gin(follow_status)`;
189
-
await sql`CREATE INDEX IF NOT EXISTS idx_atproto_matches_follow_check ON atproto_matches(last_follow_check)`;
190
-
191
-
console.log("✅ Database indexes created/verified");
192
-
}
193
-
194
-
export async function cleanupExpiredSessions() {
195
-
const sql = getDbClient();
196
-
197
-
// Use indexes for efficient cleanup
198
-
const statesDeleted =
199
-
await sql`DELETE FROM oauth_states WHERE expires_at < NOW()`;
200
-
const sessionsDeleted =
201
-
await sql`DELETE FROM oauth_sessions WHERE expires_at < NOW()`;
202
-
const userSessionsDeleted =
203
-
await sql`DELETE FROM user_sessions WHERE expires_at < NOW()`;
204
-
205
-
console.log("🧹 Cleanup:", {
206
-
states: (statesDeleted as any).length,
207
-
sessions: (sessionsDeleted as any).length,
208
-
userSessions: (userSessionsDeleted as any).length,
209
-
});
210
-
}
211
-
212
-
export { getDbClient as sql };
+86
-176
netlify/functions/get-upload-details.ts
+86
-176
netlify/functions/get-upload-details.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { userSessions } from "./oauth-stores-db";
3
-
import { getDbClient } from "./db";
4
-
import cookie from "cookie";
1
+
import { AuthenticatedHandler } from "./shared/types";
2
+
import { MatchRepository } from "./shared/repositories";
3
+
import { successResponse } from "./shared/utils";
4
+
import { withAuthErrorHandling } from "./shared/middleware";
5
+
import { ValidationError, NotFoundError } from "./shared/constants/errors";
5
6
6
7
const DEFAULT_PAGE_SIZE = 50;
7
8
const MAX_PAGE_SIZE = 100;
8
9
9
-
export const handler: Handler = async (
10
-
event: HandlerEvent,
11
-
): Promise<HandlerResponse> => {
12
-
try {
13
-
const uploadId = event.queryStringParameters?.uploadId;
14
-
const page = parseInt(event.queryStringParameters?.page || "1");
15
-
const pageSize = Math.min(
16
-
parseInt(
17
-
event.queryStringParameters?.pageSize || String(DEFAULT_PAGE_SIZE),
18
-
),
19
-
MAX_PAGE_SIZE,
20
-
);
10
+
const getUploadDetailsHandler: AuthenticatedHandler = async (context) => {
11
+
const uploadId = context.event.queryStringParameters?.uploadId;
12
+
const page = parseInt(context.event.queryStringParameters?.page || "1");
13
+
const pageSize = Math.min(
14
+
parseInt(
15
+
context.event.queryStringParameters?.pageSize ||
16
+
String(DEFAULT_PAGE_SIZE),
17
+
),
18
+
MAX_PAGE_SIZE,
19
+
);
21
20
22
-
if (!uploadId) {
23
-
return {
24
-
statusCode: 400,
25
-
headers: { "Content-Type": "application/json" },
26
-
body: JSON.stringify({ error: "uploadId is required" }),
27
-
};
28
-
}
21
+
if (!uploadId) {
22
+
throw new ValidationError("uploadId is required");
23
+
}
29
24
30
-
if (page < 1 || pageSize < 1) {
31
-
return {
32
-
statusCode: 400,
33
-
headers: { "Content-Type": "application/json" },
34
-
body: JSON.stringify({ error: "Invalid page or pageSize parameters" }),
35
-
};
36
-
}
25
+
if (page < 1 || pageSize < 1) {
26
+
throw new ValidationError("Invalid page or pageSize parameters");
27
+
}
37
28
38
-
// Get session from cookie
39
-
const cookies = event.headers.cookie
40
-
? cookie.parse(event.headers.cookie)
41
-
: {};
42
-
const sessionId = cookies.atlast_session;
29
+
const matchRepo = new MatchRepository();
43
30
44
-
if (!sessionId) {
45
-
return {
46
-
statusCode: 401,
47
-
headers: { "Content-Type": "application/json" },
48
-
body: JSON.stringify({ error: "No session cookie" }),
49
-
};
50
-
}
31
+
// Fetch paginated results
32
+
const { results, totalUsers } = await matchRepo.getUploadDetails(
33
+
uploadId,
34
+
context.did,
35
+
page,
36
+
pageSize,
37
+
);
51
38
52
-
// Get DID from session
53
-
const userSession = await userSessions.get(sessionId);
54
-
if (!userSession) {
55
-
return {
56
-
statusCode: 401,
57
-
headers: { "Content-Type": "application/json" },
58
-
body: JSON.stringify({ error: "Invalid or expired session" }),
59
-
};
60
-
}
61
-
62
-
const sql = getDbClient();
63
-
64
-
// Verify upload belongs to user and get total count
65
-
const uploadCheck = await sql`
66
-
SELECT upload_id, total_users FROM user_uploads
67
-
WHERE upload_id = ${uploadId} AND did = ${userSession.did}
68
-
`;
69
-
70
-
if ((uploadCheck as any[]).length === 0) {
71
-
return {
72
-
statusCode: 404,
73
-
headers: { "Content-Type": "application/json" },
74
-
body: JSON.stringify({ error: "Upload not found" }),
75
-
};
76
-
}
39
+
if (totalUsers === 0) {
40
+
throw new NotFoundError("Upload not found");
41
+
}
77
42
78
-
const totalUsers = (uploadCheck as any[])[0].total_users;
79
-
const totalPages = Math.ceil(totalUsers / pageSize);
80
-
const offset = (page - 1) * pageSize;
81
-
82
-
// Fetch paginated results with optimized query
83
-
const results = await sql`
84
-
SELECT
85
-
sa.source_username,
86
-
sa.normalized_username,
87
-
usf.source_date,
88
-
am.atproto_did,
89
-
am.atproto_handle,
90
-
am.atproto_display_name,
91
-
am.atproto_avatar,
92
-
am.atproto_description,
93
-
am.match_score,
94
-
am.post_count,
95
-
am.follower_count,
96
-
am.found_at,
97
-
am.follow_status,
98
-
am.last_follow_check,
99
-
ums.followed,
100
-
ums.dismissed,
101
-
-- Calculate if this is a new match (found after upload creation)
102
-
CASE WHEN am.found_at > uu.created_at THEN 1 ELSE 0 END as is_new_match
103
-
FROM user_source_follows usf
104
-
JOIN source_accounts sa ON usf.source_account_id = sa.id
105
-
JOIN user_uploads uu ON usf.upload_id = uu.upload_id
106
-
LEFT JOIN atproto_matches am ON sa.id = am.source_account_id AND am.is_active = true
107
-
LEFT JOIN user_match_status ums ON am.id = ums.atproto_match_id AND ums.did = ${userSession.did}
108
-
WHERE usf.upload_id = ${uploadId}
109
-
ORDER BY
110
-
-- 1. Users with matches first
111
-
CASE WHEN am.atproto_did IS NOT NULL THEN 0 ELSE 1 END,
112
-
-- 2. New matches (found after initial upload)
113
-
is_new_match DESC,
114
-
-- 3. Highest post count
115
-
am.post_count DESC NULLS LAST,
116
-
-- 4. Highest follower count
117
-
am.follower_count DESC NULLS LAST,
118
-
-- 5. Username as tiebreaker
119
-
sa.source_username
120
-
LIMIT ${pageSize}
121
-
OFFSET ${offset}
122
-
`;
43
+
const totalPages = Math.ceil(totalUsers / pageSize);
123
44
124
-
// Group results by source username
125
-
const groupedResults = new Map<string, any>();
45
+
// Group results by source username
46
+
const groupedResults = new Map<string, any>();
126
47
127
-
(results as any[]).forEach((row: any) => {
128
-
const username = row.source_username;
48
+
results.forEach((row: any) => {
49
+
const username = row.source_username;
129
50
130
-
// Get or create the entry for this username
131
-
let userResult = groupedResults.get(username);
51
+
// Get or create the entry for this username
52
+
let userResult = groupedResults.get(username);
132
53
133
-
if (!userResult) {
134
-
userResult = {
135
-
sourceUser: {
136
-
username: username,
137
-
date: row.source_date || "",
138
-
},
139
-
atprotoMatches: [],
140
-
};
141
-
groupedResults.set(username, userResult); // Add to map, this preserves the order
142
-
}
54
+
if (!userResult) {
55
+
userResult = {
56
+
sourceUser: {
57
+
username: username,
58
+
date: row.source_date || "",
59
+
},
60
+
atprotoMatches: [],
61
+
};
62
+
groupedResults.set(username, userResult);
63
+
}
143
64
144
-
// Add the match (if it exists) to the array
145
-
if (row.atproto_did) {
146
-
userResult.atprotoMatches.push({
147
-
did: row.atproto_did,
148
-
handle: row.atproto_handle,
149
-
displayName: row.atproto_display_name,
150
-
avatar: row.atproto_avatar,
151
-
description: row.atproto_description,
152
-
matchScore: row.match_score,
153
-
postCount: row.post_count,
154
-
followerCount: row.follower_count,
155
-
foundAt: row.found_at,
156
-
followed: row.followed || false,
157
-
dismissed: row.dismissed || false,
158
-
followStatus: row.follow_status || {},
159
-
});
160
-
}
161
-
});
65
+
// Add the match (if it exists) to the array
66
+
if (row.atproto_did) {
67
+
userResult.atprotoMatches.push({
68
+
did: row.atproto_did,
69
+
handle: row.atproto_handle,
70
+
displayName: row.atproto_display_name,
71
+
avatar: row.atproto_avatar,
72
+
description: row.atproto_description,
73
+
matchScore: row.match_score,
74
+
postCount: row.post_count,
75
+
followerCount: row.follower_count,
76
+
foundAt: row.found_at,
77
+
followed: row.followed || false,
78
+
dismissed: row.dismissed || false,
79
+
followStatus: row.follow_status || {},
80
+
});
81
+
}
82
+
});
162
83
163
-
const searchResults = Array.from(groupedResults.values());
84
+
const searchResults = Array.from(groupedResults.values());
164
85
165
-
return {
166
-
statusCode: 200,
167
-
headers: {
168
-
"Content-Type": "application/json",
169
-
"Access-Control-Allow-Origin": "*",
170
-
"Cache-Control": "private, max-age=600", // 10 minute browser cache
86
+
return successResponse(
87
+
{
88
+
results: searchResults,
89
+
pagination: {
90
+
page,
91
+
pageSize,
92
+
totalPages,
93
+
totalUsers,
94
+
hasNextPage: page < totalPages,
95
+
hasPrevPage: page > 1,
171
96
},
172
-
body: JSON.stringify({
173
-
results: searchResults,
174
-
pagination: {
175
-
page,
176
-
pageSize,
177
-
totalPages,
178
-
totalUsers,
179
-
hasNextPage: page < totalPages,
180
-
hasPrevPage: page > 1,
181
-
},
182
-
}),
183
-
};
184
-
} catch (error) {
185
-
console.error("Get upload details error:", error);
186
-
return {
187
-
statusCode: 500,
188
-
headers: { "Content-Type": "application/json" },
189
-
body: JSON.stringify({
190
-
error: "Failed to fetch upload details",
191
-
details: error instanceof Error ? error.message : "Unknown error",
192
-
}),
193
-
};
194
-
}
97
+
},
98
+
200,
99
+
{
100
+
"Cache-Control": "private, max-age=600",
101
+
},
102
+
);
195
103
};
104
+
105
+
export const handler = withAuthErrorHandling(getUploadDetailsHandler);
+20
-74
netlify/functions/get-uploads.ts
+20
-74
netlify/functions/get-uploads.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { userSessions } from "./oauth-stores-db";
3
-
import { getDbClient } from "./db";
4
-
import cookie from "cookie";
1
+
import { AuthenticatedHandler } from "./shared/types";
2
+
import { UploadRepository } from "./shared/repositories";
3
+
import { successResponse } from "./shared/utils";
4
+
import { withAuthErrorHandling } from "./shared/middleware";
5
5
6
-
export const handler: Handler = async (
7
-
event: HandlerEvent,
8
-
): Promise<HandlerResponse> => {
9
-
try {
10
-
// Get session from cookie
11
-
const cookies = event.headers.cookie
12
-
? cookie.parse(event.headers.cookie)
13
-
: {};
14
-
const sessionId = cookies.atlast_session;
6
+
const getUploadsHandler: AuthenticatedHandler = async (context) => {
7
+
const uploadRepo = new UploadRepository();
15
8
16
-
if (!sessionId) {
17
-
return {
18
-
statusCode: 401,
19
-
headers: { "Content-Type": "application/json" },
20
-
body: JSON.stringify({ error: "No session cookie" }),
21
-
};
22
-
}
23
-
24
-
// Get DID from session
25
-
const userSession = await userSessions.get(sessionId);
26
-
if (!userSession) {
27
-
return {
28
-
statusCode: 401,
29
-
headers: { "Content-Type": "application/json" },
30
-
body: JSON.stringify({ error: "Invalid or expired session" }),
31
-
};
32
-
}
33
-
34
-
const sql = getDbClient();
35
-
36
-
// Fetch all uploads for this user
37
-
const uploads = await sql`
38
-
SELECT
39
-
upload_id,
40
-
source_platform,
41
-
created_at,
42
-
total_users,
43
-
matched_users,
44
-
unmatched_users
45
-
FROM user_uploads
46
-
WHERE did = ${userSession.did}
47
-
ORDER BY created_at DESC
48
-
`;
9
+
// Fetch all uploads for this user
10
+
const uploads = await uploadRepo.getUserUploads(context.did);
49
11
50
-
return {
51
-
statusCode: 200,
52
-
headers: {
53
-
"Content-Type": "application/json",
54
-
"Access-Control-Allow-Origin": "*",
55
-
},
56
-
body: JSON.stringify({
57
-
uploads: (uploads as any[]).map((upload: any) => ({
58
-
uploadId: upload.upload_id,
59
-
sourcePlatform: upload.source_platform,
60
-
createdAt: upload.created_at,
61
-
totalUsers: upload.total_users,
62
-
matchedUsers: upload.matched_users,
63
-
unmatchedUsers: upload.unmatched_users,
64
-
})),
65
-
}),
66
-
};
67
-
} catch (error) {
68
-
console.error("Get uploads error:", error);
69
-
return {
70
-
statusCode: 500,
71
-
headers: { "Content-Type": "application/json" },
72
-
body: JSON.stringify({
73
-
error: "Failed to fetch uploads",
74
-
details: error instanceof Error ? error.message : "Unknown error",
75
-
}),
76
-
};
77
-
}
12
+
return successResponse({
13
+
uploads: uploads.map((upload) => ({
14
+
uploadId: upload.upload_id,
15
+
sourcePlatform: upload.source_platform,
16
+
createdAt: upload.created_at,
17
+
totalUsers: upload.total_users,
18
+
matchedUsers: upload.matched_users,
19
+
unmatchedUsers: upload.unmatched_users,
20
+
})),
21
+
});
78
22
};
23
+
24
+
export const handler = withAuthErrorHandling(getUploadsHandler);
+10
-22
netlify/functions/init-db.ts
+10
-22
netlify/functions/init-db.ts
···
1
-
import { Handler } from "@netlify/functions";
2
-
import { initDB } from "./db";
1
+
import { SimpleHandler } from "./shared/types/api.types";
2
+
import { DatabaseService } from "./shared/services/database";
3
+
import { withErrorHandling } from "./shared/middleware";
4
+
import { successResponse } from "./shared/utils";
3
5
4
-
export const handler: Handler = async () => {
5
-
try {
6
-
await initDB();
7
-
return {
8
-
statusCode: 200,
9
-
headers: { "Content-Type": "application/json" },
10
-
body: JSON.stringify({ message: "Database initialized successfully" }),
11
-
};
12
-
} catch (error) {
13
-
console.error("Database initialization error:", error);
14
-
return {
15
-
statusCode: 500,
16
-
headers: { "Content-Type": "application/json" },
17
-
body: JSON.stringify({
18
-
error: "Failed to initialize database",
19
-
details: error instanceof Error ? error.message : "Unknown error",
20
-
stack: error instanceof Error ? error.stack : undefined,
21
-
}),
22
-
};
23
-
}
6
+
const initDbHandler: SimpleHandler = async () => {
7
+
const dbService = new DatabaseService();
8
+
await dbService.initDatabase();
9
+
return successResponse({ message: "Database initialized successfully" });
24
10
};
11
+
12
+
export const handler = withErrorHandling(initDbHandler);
+32
-51
netlify/functions/logout.ts
+32
-51
netlify/functions/logout.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { SessionManager } from "./session-manager";
3
-
import { getOAuthConfig } from "./oauth-config";
4
-
import cookie from "cookie";
1
+
import { SimpleHandler } from "./shared/types/api.types";
2
+
import { SessionService } from "./shared/services/session";
3
+
import { getOAuthConfig } from "./shared/services/oauth";
4
+
import { extractSessionId } from "./shared/middleware";
5
+
import { withErrorHandling } from "./shared/middleware";
5
6
6
-
export const handler: Handler = async (
7
-
event: HandlerEvent,
8
-
): Promise<HandlerResponse> => {
7
+
const logoutHandler: SimpleHandler = async (event) => {
9
8
// Only allow POST for logout
10
9
if (event.httpMethod !== "POST") {
11
-
return {
12
-
statusCode: 405,
13
-
headers: { "Content-Type": "application/json" },
14
-
body: JSON.stringify({ error: "Method not allowed" }),
15
-
};
10
+
throw new Error("Method not allowed");
16
11
}
17
12
18
-
try {
19
-
console.log("[logout] Starting logout process...");
20
-
console.log("[logout] Cookies received:", event.headers.cookie);
13
+
console.log("[logout] Starting logout process...");
14
+
console.log("[logout] Cookies received:", event.headers.cookie);
21
15
22
-
// Get session from cookie
23
-
const cookies = event.headers.cookie
24
-
? cookie.parse(event.headers.cookie)
25
-
: {};
26
-
const sessionId = cookies.atlast_session;
27
-
console.log("[logout] Session ID from cookie:", sessionId);
16
+
const sessionId = extractSessionId(event);
17
+
console.log("[logout] Session ID from cookie:", sessionId);
28
18
29
-
if (sessionId) {
30
-
// Use SessionManager to properly clean up both user and OAuth sessions
31
-
await SessionManager.deleteSession(sessionId);
32
-
console.log("[logout] Successfully deleted session:", sessionId);
33
-
}
19
+
if (sessionId) {
20
+
// Use SessionService to properly clean up both user and OAuth sessions
21
+
await SessionService.deleteSession(sessionId);
22
+
console.log("[logout] Successfully deleted session:", sessionId);
23
+
}
34
24
35
-
// Clear the session cookie with matching flags from when it was set
36
-
const config = getOAuthConfig();
37
-
const isDev = config.clientType === "loopback";
25
+
// Clear the session cookie with matching flags from when it was set
26
+
const config = getOAuthConfig();
27
+
const isDev = config.clientType === "loopback";
38
28
39
-
const cookieFlags = isDev
40
-
? "HttpOnly; SameSite=Lax; Max-Age=0; Path=/"
41
-
: "HttpOnly; SameSite=Lax; Max-Age=0; Path=/; Secure";
29
+
const cookieFlags = isDev
30
+
? `HttpOnly; SameSite=Lax; Max-Age=0; Path=/`
31
+
: `HttpOnly; SameSite=Lax; Max-Age=0; Path=/; Secure`;
42
32
43
-
return {
44
-
statusCode: 200,
45
-
headers: {
46
-
"Content-Type": "application/json",
47
-
"Set-Cookie": `atlast_session=; ${cookieFlags}`,
48
-
},
49
-
body: JSON.stringify({ success: true }),
50
-
};
51
-
} catch (error) {
52
-
console.error("Logout error:", error);
53
-
return {
54
-
statusCode: 500,
55
-
headers: { "Content-Type": "application/json" },
56
-
body: JSON.stringify({
57
-
error: "Failed to logout",
58
-
details: error instanceof Error ? error.message : "Unknown error",
59
-
}),
60
-
};
61
-
}
33
+
return {
34
+
statusCode: 200,
35
+
headers: {
36
+
"Content-Type": "application/json",
37
+
"Set-Cookie": `atlast_session=; ${cookieFlags}`,
38
+
},
39
+
body: JSON.stringify({ success: true }),
40
+
};
62
41
};
42
+
43
+
export const handler = withErrorHandling(logoutHandler);
+42
-66
netlify/functions/oauth-callback.ts
+42
-66
netlify/functions/oauth-callback.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { createOAuthClient } from "./client";
3
-
import { userSessions } from "./oauth-stores-db";
4
-
import { getOAuthConfig } from "./oauth-config";
1
+
import { SimpleHandler } from "./shared/types/api.types";
2
+
import { createOAuthClient, getOAuthConfig } from "./shared/services/oauth";
3
+
import { userSessions } from "./shared/services/session";
4
+
import { redirectResponse } from "./shared/utils";
5
+
import { withErrorHandling } from "./shared/middleware";
6
+
import { CONFIG } from "./shared/constants";
5
7
import * as crypto from "crypto";
6
8
7
-
export const handler: Handler = async (
8
-
event: HandlerEvent,
9
-
): Promise<HandlerResponse> => {
10
-
const config = getOAuthConfig();
9
+
const oauthCallbackHandler: SimpleHandler = async (event) => {
10
+
const config = getOAuthConfig(event);
11
11
const isDev = config.clientType === "loopback";
12
12
13
13
let currentUrl = isDev
14
14
? "http://127.0.0.1:8888"
15
-
: process.env.DEPLOY_URL
16
-
? `https://${new URL(process.env.DEPLOY_URL).host}`
17
-
: process.env.URL ||
18
-
process.env.DEPLOY_PRIME_URL ||
19
-
"https://atlast.byarielm.fyi";
15
+
: config.redirectUri.replace("/.netlify/functions/oauth-callback", "");
20
16
21
-
try {
22
-
const params = new URLSearchParams(event.rawUrl.split("?")[1] || "");
23
-
const code = params.get("code");
24
-
const state = params.get("state");
17
+
const params = new URLSearchParams(event.rawUrl.split("?")[1] || "");
18
+
const code = params.get("code");
19
+
const state = params.get("state");
25
20
26
-
console.log(
27
-
"[oauth-callback] Processing callback - Mode:",
28
-
isDev ? "loopback" : "production",
29
-
);
30
-
console.log("[oauth-callback] URL:", currentUrl);
21
+
console.log(
22
+
"[oauth-callback] Processing callback - Mode:",
23
+
isDev ? "loopback" : "production",
24
+
);
25
+
console.log("[oauth-callback] URL:", currentUrl);
31
26
32
-
if (!code || !state) {
33
-
return {
34
-
statusCode: 302,
35
-
headers: {
36
-
Location: `${currentUrl}/?error=Missing OAuth parameters`,
37
-
},
38
-
body: "",
39
-
};
40
-
}
27
+
if (!code || !state) {
28
+
return redirectResponse(`${currentUrl}/?error=Missing OAuth parameters`);
29
+
}
41
30
42
-
// Create OAuth client using shared helper
43
-
const client = await createOAuthClient();
31
+
// Create OAuth client using shared helper
32
+
const client = await createOAuthClient();
44
33
45
-
// Process the OAuth callback
46
-
const result = await client.callback(params);
34
+
// Process the OAuth callback
35
+
const result = await client.callback(params);
47
36
48
-
console.log(
49
-
"[oauth-callback] Successfully authenticated DID:",
50
-
result.session.did,
51
-
);
37
+
console.log(
38
+
"[oauth-callback] Successfully authenticated DID:",
39
+
result.session.did,
40
+
);
52
41
53
-
// Store session
54
-
const sessionId = crypto.randomUUID();
55
-
const did = result.session.did;
56
-
await userSessions.set(sessionId, { did });
42
+
// Store session
43
+
const sessionId = crypto.randomUUID();
44
+
const did = result.session.did;
45
+
await userSessions.set(sessionId, { did });
57
46
58
-
console.log("[oauth-callback] Created user session:", sessionId);
47
+
console.log("[oauth-callback] Created user session:", sessionId);
59
48
60
-
// Cookie flags - no Secure flag for loopback
61
-
const cookieFlags = isDev
62
-
? "HttpOnly; SameSite=Lax; Max-Age=1209600; Path=/"
63
-
: "HttpOnly; SameSite=Lax; Max-Age=1209600; Path=/; Secure";
49
+
// Cookie flags - no Secure flag for loopback
50
+
const cookieFlags = isDev
51
+
? `HttpOnly; SameSite=Lax; Max-Age=${CONFIG.COOKIE_MAX_AGE}; Path=/`
52
+
: `HttpOnly; SameSite=Lax; Max-Age=${CONFIG.COOKIE_MAX_AGE}; Path=/; Secure`;
64
53
65
-
return {
66
-
statusCode: 302,
67
-
headers: {
68
-
Location: `${currentUrl}/?session=${sessionId}`,
69
-
"Set-Cookie": `atlast_session=${sessionId}; ${cookieFlags}`,
70
-
},
71
-
body: "",
72
-
};
73
-
} catch (error) {
74
-
console.error("OAuth callback error:", error);
75
-
return {
76
-
statusCode: 302,
77
-
headers: {
78
-
Location: `${currentUrl}/?error=OAuth failed: ${error instanceof Error ? error.message : "Unknown error"}`,
79
-
},
80
-
body: "",
81
-
};
82
-
}
54
+
return redirectResponse(`${currentUrl}/?session=${sessionId}`, [
55
+
`atlast_session=${sessionId}; ${cookieFlags}`,
56
+
]);
83
57
};
58
+
59
+
export const handler = withErrorHandling(oauthCallbackHandler);
-61
netlify/functions/oauth-config.ts
-61
netlify/functions/oauth-config.ts
···
1
-
export function getOAuthConfig() {
2
-
// Check if we have a public URL (production or --live mode)
3
-
const baseUrl =
4
-
process.env.URL || process.env.DEPLOY_URL || process.env.DEPLOY_PRIME_URL;
5
-
6
-
// Development: loopback client for local dev
7
-
// Check if we're running on localhost (true local dev)
8
-
const isLocalhost =
9
-
!baseUrl ||
10
-
baseUrl.includes("localhost") ||
11
-
baseUrl.includes("127.0.0.1") ||
12
-
baseUrl.startsWith("http://localhost") ||
13
-
baseUrl.startsWith("http://127.0.0.1");
14
-
15
-
// Use loopback for localhost, production for everything else
16
-
const isDev = isLocalhost;
17
-
18
-
if (isDev) {
19
-
const port = process.env.PORT || "8888";
20
-
21
-
// Special loopback client_id format with query params
22
-
const clientId = `http://localhost?${new URLSearchParams([
23
-
[
24
-
"redirect_uri",
25
-
`http://127.0.0.1:${port}/.netlify/functions/oauth-callback`,
26
-
],
27
-
["scope", "atproto transition:generic"],
28
-
])}`;
29
-
30
-
console.log("Using loopback OAuth for local development");
31
-
console.log("Access your app at: http://127.0.0.1:" + port);
32
-
33
-
return {
34
-
clientId: clientId,
35
-
redirectUri: `http://127.0.0.1:${port}/.netlify/functions/oauth-callback`,
36
-
jwksUri: undefined,
37
-
clientType: "loopback" as const,
38
-
};
39
-
}
40
-
41
-
// Production: discoverable client logic
42
-
if (!baseUrl) {
43
-
throw new Error("No public URL available");
44
-
}
45
-
46
-
console.log("Using confidential OAuth client for production");
47
-
console.log("OAuth Config URLs:", {
48
-
DEPLOY_PRIME_URL: process.env.DEPLOY_PRIME_URL,
49
-
URL: process.env.URL,
50
-
CONTEXT: process.env.CONTEXT,
51
-
using: baseUrl,
52
-
});
53
-
54
-
return {
55
-
clientId: `${baseUrl}/oauth-client-metadata.json`, // discoverable client URL
56
-
redirectUri: `${baseUrl}/.netlify/functions/oauth-callback`,
57
-
jwksUri: `${baseUrl}/.netlify/functions/jwks`,
58
-
clientType: "discoverable" as const,
59
-
usePrivateKey: true,
60
-
};
61
-
}
+25
-67
netlify/functions/oauth-start.ts
+25
-67
netlify/functions/oauth-start.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { createOAuthClient } from "./client";
1
+
import { SimpleHandler } from "./shared/types/api.types";
2
+
import { createOAuthClient } from "./shared/services/oauth";
3
+
import { successResponse } from "./shared/utils";
4
+
import { withErrorHandling } from "./shared/middleware";
5
+
import { ValidationError } from "./shared/constants/errors";
3
6
4
7
interface OAuthStartRequestBody {
5
8
login_hint?: string;
6
9
origin?: string;
7
10
}
8
11
9
-
export const handler: Handler = async (
10
-
event: HandlerEvent,
11
-
): Promise<HandlerResponse> => {
12
-
try {
13
-
let loginHint: string | undefined = undefined;
14
-
15
-
if (event.body) {
16
-
const parsed: OAuthStartRequestBody = JSON.parse(event.body);
17
-
loginHint = parsed.login_hint;
18
-
}
19
-
20
-
if (!loginHint) {
21
-
return {
22
-
statusCode: 400,
23
-
headers: { "Content-Type": "application/json" },
24
-
body: JSON.stringify({
25
-
error: "login_hint (handle or DID) is required",
26
-
}),
27
-
};
28
-
}
12
+
const oauthStartHandler: SimpleHandler = async (event) => {
13
+
let loginHint: string | undefined = undefined;
29
14
30
-
console.log("[oauth-start] Starting OAuth flow for:", loginHint);
15
+
if (event.body) {
16
+
const parsed: OAuthStartRequestBody = JSON.parse(event.body);
17
+
loginHint = parsed.login_hint;
18
+
}
31
19
32
-
// Create OAuth client using shared helper
33
-
const client = await createOAuthClient();
20
+
if (!loginHint) {
21
+
throw new ValidationError("login_hint (handle or DID) is required");
22
+
}
34
23
35
-
// Start the authorization flow
36
-
const authUrl = await client.authorize(loginHint, {
37
-
scope: "atproto transition:generic",
38
-
});
39
-
40
-
console.log("[oauth-start] Generated auth URL for:", loginHint);
24
+
console.log("[oauth-start] Starting OAuth flow for:", loginHint);
41
25
42
-
return {
43
-
statusCode: 200,
44
-
headers: { "Content-Type": "application/json" },
45
-
body: JSON.stringify({ url: authUrl.toString() }),
46
-
};
47
-
} catch (error) {
48
-
console.error("OAuth start error:", error);
26
+
// Create OAuth client using shared helper
27
+
const client = await createOAuthClient(event);
49
28
50
-
// Provide user-friendly error messages
51
-
let userMessage = "Failed to start authentication";
29
+
// Start the authorization flow
30
+
const authUrl = await client.authorize(loginHint, {
31
+
scope: "atproto transition:generic",
32
+
});
52
33
53
-
if (error instanceof Error) {
54
-
if (
55
-
error.message.includes("resolve") ||
56
-
error.message.includes("not found")
57
-
) {
58
-
userMessage =
59
-
"Account not found. Please check your handle and try again.";
60
-
} else if (
61
-
error.message.includes("network") ||
62
-
error.message.includes("timeout")
63
-
) {
64
-
userMessage =
65
-
"Network error. Please check your connection and try again.";
66
-
} else if (error.message.includes("Invalid identifier")) {
67
-
userMessage =
68
-
"Invalid handle format. Please use the format: username.bsky.social";
69
-
}
70
-
}
34
+
console.log("[oauth-start] Generated auth URL for:", loginHint);
71
35
72
-
return {
73
-
statusCode: 500,
74
-
headers: { "Content-Type": "application/json" },
75
-
body: JSON.stringify({
76
-
error: userMessage,
77
-
details: error instanceof Error ? error.message : "Unknown error",
78
-
}),
79
-
};
80
-
}
36
+
return successResponse({ url: authUrl.toString() });
81
37
};
38
+
39
+
export const handler = withErrorHandling(oauthStartHandler);
-94
netlify/functions/oauth-stores-db.ts
-94
netlify/functions/oauth-stores-db.ts
···
1
-
import { getDbClient } from "./db";
2
-
3
-
interface StateData {
4
-
dpopKey: any;
5
-
verifier: string;
6
-
appState?: string;
7
-
}
8
-
9
-
interface SessionData {
10
-
dpopKey: any;
11
-
tokenSet: any;
12
-
}
13
-
14
-
// Reuse the same DB client across all store instances
15
-
const sql = getDbClient();
16
-
17
-
export class PostgresStateStore {
18
-
async get(key: string): Promise<StateData | undefined> {
19
-
const result = await sql`
20
-
SELECT data FROM oauth_states
21
-
WHERE key = ${key} AND expires_at > NOW()
22
-
`;
23
-
return (result as Record<string, any>[])[0]?.data as StateData | undefined;
24
-
}
25
-
26
-
async set(key: string, value: StateData): Promise<void> {
27
-
const expiresAt = new Date(Date.now() + 10 * 60 * 1000); // 10 minutes
28
-
await sql`
29
-
INSERT INTO oauth_states (key, data, expires_at)
30
-
VALUES (${key}, ${JSON.stringify(value)}, ${expiresAt.toISOString()})
31
-
ON CONFLICT (key) DO UPDATE SET data = ${JSON.stringify(value)}, expires_at = ${expiresAt.toISOString()}
32
-
`;
33
-
}
34
-
35
-
async del(key: string): Promise<void> {
36
-
await sql`DELETE FROM oauth_states WHERE key = ${key}`;
37
-
}
38
-
}
39
-
40
-
export class PostgresSessionStore {
41
-
async get(key: string): Promise<SessionData | undefined> {
42
-
const result = await sql`
43
-
SELECT data FROM oauth_sessions
44
-
WHERE key = ${key} AND expires_at > NOW()
45
-
`;
46
-
return (result as Record<string, any>[])[0]?.data as
47
-
| SessionData
48
-
| undefined;
49
-
}
50
-
51
-
async set(key: string, value: SessionData): Promise<void> {
52
-
// Session includes tokens, DPoP keys, etc.
53
-
const expiresAt = new Date(Date.now() + 14 * 24 * 60 * 60 * 1000); // 14 days
54
-
await sql`
55
-
INSERT INTO oauth_sessions (key, data, expires_at)
56
-
VALUES (${key}, ${JSON.stringify(value)}, ${expiresAt})
57
-
ON CONFLICT (key) DO UPDATE SET data = ${JSON.stringify(value)}, expires_at = ${expiresAt}
58
-
`;
59
-
}
60
-
61
-
async del(key: string): Promise<void> {
62
-
await sql`DELETE FROM oauth_sessions WHERE key = ${key}`;
63
-
}
64
-
}
65
-
66
-
export class PostgresUserSessionStore {
67
-
async get(sessionId: string): Promise<{ did: string } | undefined> {
68
-
const result = await sql`
69
-
SELECT did FROM user_sessions
70
-
WHERE session_id = ${sessionId} AND expires_at > NOW()
71
-
`;
72
-
const row = (result as Record<string, any>[])[0];
73
-
return row ? { did: row.did } : undefined;
74
-
}
75
-
76
-
async set(sessionId: string, data: { did: string }): Promise<void> {
77
-
const expiresAt = new Date(Date.now() + 14 * 24 * 60 * 60 * 1000); // 14 days
78
-
await sql`
79
-
INSERT INTO user_sessions (session_id, did, expires_at)
80
-
VALUES (${sessionId}, ${data.did}, ${expiresAt})
81
-
ON CONFLICT (session_id) DO UPDATE SET
82
-
did = ${data.did},
83
-
expires_at = ${expiresAt}
84
-
`;
85
-
}
86
-
87
-
async del(sessionId: string): Promise<void> {
88
-
await sql`DELETE FROM user_sessions WHERE session_id = ${sessionId}`;
89
-
}
90
-
}
91
-
92
-
export const stateStore = new PostgresStateStore();
93
-
export const sessionStore = new PostgresSessionStore();
94
-
export const userSessions = new PostgresUserSessionStore();
+155
-225
netlify/functions/save-results.ts
+155
-225
netlify/functions/save-results.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { userSessions } from "./oauth-stores-db";
3
-
import cookie from "cookie";
1
+
import { AuthenticatedHandler } from "./shared/types";
4
2
import {
5
-
createUpload,
6
-
bulkCreateSourceAccounts,
7
-
bulkLinkUserToSourceAccounts,
8
-
bulkStoreAtprotoMatches,
9
-
bulkMarkSourceAccountsMatched,
10
-
bulkCreateUserMatchStatus,
11
-
} from "./db-helpers";
12
-
import { getDbClient } from "./db";
3
+
UploadRepository,
4
+
SourceAccountRepository,
5
+
MatchRepository,
6
+
} from "./shared/repositories";
7
+
import { successResponse } from "./shared/utils";
8
+
import { withAuthErrorHandling } from "./shared/middleware";
9
+
import { ValidationError } from "./shared/constants/errors";
13
10
14
11
interface SearchResult {
15
12
sourceUser: {
···
38
35
saveData?: boolean;
39
36
}
40
37
41
-
export const handler: Handler = async (
42
-
event: HandlerEvent,
43
-
): Promise<HandlerResponse> => {
44
-
if (event.httpMethod !== "POST") {
45
-
return {
46
-
statusCode: 405,
47
-
headers: { "Content-Type": "application/json" },
48
-
body: JSON.stringify({ error: "Method not allowed" }),
49
-
};
50
-
}
51
-
52
-
try {
53
-
// Get session from cookie
54
-
const cookies = event.headers.cookie
55
-
? cookie.parse(event.headers.cookie)
56
-
: {};
57
-
const sessionId = cookies.atlast_session;
58
-
59
-
if (!sessionId) {
60
-
return {
61
-
statusCode: 401,
62
-
headers: { "Content-Type": "application/json" },
63
-
body: JSON.stringify({ error: "No session cookie" }),
64
-
};
65
-
}
66
-
67
-
// Get DID from session
68
-
const userSession = await userSessions.get(sessionId);
69
-
if (!userSession) {
70
-
return {
71
-
statusCode: 401,
72
-
headers: { "Content-Type": "application/json" },
73
-
body: JSON.stringify({ error: "Invalid or expired session" }),
74
-
};
75
-
}
76
-
77
-
// Parse request body
78
-
const body: SaveResultsRequest = JSON.parse(event.body || "{}");
79
-
const { uploadId, sourcePlatform, results, saveData } = body;
80
-
81
-
if (!uploadId || !sourcePlatform || !Array.isArray(results)) {
82
-
return {
83
-
statusCode: 400,
84
-
headers: { "Content-Type": "application/json" },
85
-
body: JSON.stringify({
86
-
error: "uploadId, sourcePlatform, and results are required",
87
-
}),
88
-
};
89
-
}
90
-
91
-
// Server-side validation for saveData flag, controlled by frontend
92
-
if (saveData === false) {
93
-
console.log(
94
-
`User ${userSession.did} has data storage disabled - skipping save`,
95
-
);
96
-
return {
97
-
statusCode: 200,
98
-
headers: { "Content-Type": "application/json" },
99
-
body: JSON.stringify({
100
-
success: true,
101
-
message: "Data storage disabled - results not saved",
102
-
uploadId,
103
-
totalUsers: results.length,
104
-
matchedUsers: results.filter((r) => r.atprotoMatches.length > 0)
105
-
.length,
106
-
unmatchedUsers: results.filter((r) => r.atprotoMatches.length === 0)
107
-
.length,
108
-
}),
109
-
};
110
-
}
111
-
112
-
const sql = getDbClient();
113
-
let matchedCount = 0;
114
-
115
-
// Check for recent uploads from this user
116
-
const recentUpload = await sql`
117
-
SELECT upload_id FROM user_uploads
118
-
WHERE did = ${userSession.did}
119
-
AND created_at > NOW() - INTERVAL '5 seconds'
120
-
ORDER BY created_at DESC
121
-
LIMIT 1
122
-
`;
123
-
124
-
if ((recentUpload as any[]).length > 0) {
125
-
console.log(
126
-
`User ${userSession.did} already saved within 5 seconds, skipping duplicate`,
127
-
);
128
-
return {
129
-
statusCode: 200,
130
-
headers: { "Content-Type": "application/json" },
131
-
body: JSON.stringify({ success: true, message: "Recently saved" }),
132
-
};
133
-
}
38
+
const saveResultsHandler: AuthenticatedHandler = async (context) => {
39
+
// Parse request body
40
+
const body: SaveResultsRequest = JSON.parse(context.event.body || "{}");
41
+
const { uploadId, sourcePlatform, results, saveData } = body;
134
42
135
-
// Create upload record FIRST
136
-
await createUpload(
137
-
uploadId,
138
-
userSession.did,
139
-
sourcePlatform,
140
-
results.length,
141
-
0,
43
+
if (!uploadId || !sourcePlatform || !Array.isArray(results)) {
44
+
throw new ValidationError(
45
+
"uploadId, sourcePlatform, and results are required",
142
46
);
47
+
}
143
48
144
-
// BULK OPERATION 1: Create all source accounts at once
145
-
const allUsernames = results.map((r) => r.sourceUser.username);
146
-
const sourceAccountIdMap = await bulkCreateSourceAccounts(
147
-
sourcePlatform,
148
-
allUsernames,
49
+
// Server-side validation for saveData flag, controlled by frontend
50
+
if (saveData === false) {
51
+
console.log(
52
+
`User ${context.did} has data storage disabled - skipping save`,
149
53
);
54
+
return successResponse({
55
+
success: true,
56
+
message: "Data storage disabled - results not saved",
57
+
uploadId,
58
+
totalUsers: results.length,
59
+
matchedUsers: results.filter((r) => r.atprotoMatches.length > 0).length,
60
+
unmatchedUsers: results.filter((r) => r.atprotoMatches.length === 0)
61
+
.length,
62
+
});
63
+
}
150
64
151
-
// BULK OPERATION 2: Link all users to source accounts
152
-
const links = results
153
-
.map((result) => {
154
-
const normalized = result.sourceUser.username
155
-
.toLowerCase()
156
-
.replace(/[._-]/g, "");
157
-
const sourceAccountId = sourceAccountIdMap.get(normalized);
158
-
return {
159
-
sourceAccountId: sourceAccountId!,
160
-
sourceDate: result.sourceUser.date,
161
-
};
162
-
})
163
-
.filter((link) => link.sourceAccountId !== undefined);
65
+
const uploadRepo = new UploadRepository();
66
+
const sourceAccountRepo = new SourceAccountRepository();
67
+
const matchRepo = new MatchRepository();
68
+
let matchedCount = 0;
164
69
165
-
await bulkLinkUserToSourceAccounts(uploadId, userSession.did, links);
70
+
// Check for recent uploads from this user
71
+
const hasRecent = await uploadRepo.hasRecentUpload(context.did);
72
+
if (hasRecent) {
73
+
console.log(
74
+
`User ${context.did} already saved within 5 seconds, skipping duplicate`,
75
+
);
76
+
return successResponse({
77
+
success: true,
78
+
message: "Recently saved",
79
+
});
80
+
}
166
81
167
-
// BULK OPERATION 3: Store all atproto matches at once
168
-
const allMatches: Array<{
169
-
sourceAccountId: number;
170
-
atprotoDid: string;
171
-
atprotoHandle: string;
172
-
atprotoDisplayName?: string;
173
-
atprotoAvatar?: string;
174
-
atprotoDescription?: string;
175
-
matchScore: number;
176
-
postCount: number;
177
-
followerCount: number;
178
-
}> = [];
82
+
// Create upload record FIRST
83
+
await uploadRepo.createUpload(
84
+
uploadId,
85
+
context.did,
86
+
sourcePlatform,
87
+
results.length,
88
+
0,
89
+
);
179
90
180
-
const matchedSourceAccountIds: number[] = [];
91
+
// BULK OPERATION 1: Create all source accounts at once
92
+
const allUsernames = results.map((r) => r.sourceUser.username);
93
+
const sourceAccountIdMap = await sourceAccountRepo.bulkCreate(
94
+
sourcePlatform,
95
+
allUsernames,
96
+
);
181
97
182
-
for (const result of results) {
98
+
// BULK OPERATION 2: Link all users to source accounts
99
+
const links = results
100
+
.map((result) => {
183
101
const normalized = result.sourceUser.username
184
102
.toLowerCase()
185
103
.replace(/[._-]/g, "");
186
104
const sourceAccountId = sourceAccountIdMap.get(normalized);
105
+
return {
106
+
sourceAccountId: sourceAccountId!,
107
+
sourceDate: result.sourceUser.date,
108
+
};
109
+
})
110
+
.filter((link) => link.sourceAccountId !== undefined);
187
111
188
-
if (
189
-
sourceAccountId &&
190
-
result.atprotoMatches &&
191
-
result.atprotoMatches.length > 0
192
-
) {
193
-
matchedCount++;
194
-
matchedSourceAccountIds.push(sourceAccountId);
112
+
await sourceAccountRepo.linkUserToAccounts(uploadId, context.did, links);
195
113
196
-
for (const match of result.atprotoMatches) {
197
-
allMatches.push({
198
-
sourceAccountId,
199
-
atprotoDid: match.did,
200
-
atprotoHandle: match.handle,
201
-
atprotoDisplayName: match.displayName,
202
-
atprotoAvatar: match.avatar,
203
-
atprotoDescription: (match as any).description,
204
-
matchScore: match.matchScore,
205
-
postCount: match.postCount || 0,
206
-
followerCount: match.followerCount || 0,
207
-
});
208
-
}
209
-
}
210
-
}
114
+
// BULK OPERATION 3: Store all atproto matches at once
115
+
const allMatches: Array<{
116
+
sourceAccountId: number;
117
+
atprotoDid: string;
118
+
atprotoHandle: string;
119
+
atprotoDisplayName?: string;
120
+
atprotoAvatar?: string;
121
+
atprotoDescription?: string;
122
+
matchScore: number;
123
+
postCount: number;
124
+
followerCount: number;
125
+
}> = [];
211
126
212
-
// Store all matches in one operation
213
-
let matchIdMap = new Map<string, number>();
214
-
if (allMatches.length > 0) {
215
-
matchIdMap = await bulkStoreAtprotoMatches(allMatches);
216
-
}
127
+
const matchedSourceAccountIds: number[] = [];
217
128
218
-
// BULK OPERATION 4: Mark all matched source accounts
219
-
if (matchedSourceAccountIds.length > 0) {
220
-
await bulkMarkSourceAccountsMatched(matchedSourceAccountIds);
221
-
}
129
+
for (const result of results) {
130
+
const normalized = result.sourceUser.username
131
+
.toLowerCase()
132
+
.replace(/[._-]/g, "");
133
+
const sourceAccountId = sourceAccountIdMap.get(normalized);
222
134
223
-
// BULK OPERATION 5: Create all user match statuses
224
-
const statuses: Array<{
225
-
did: string;
226
-
atprotoMatchId: number;
227
-
sourceAccountId: number;
228
-
viewed: boolean;
229
-
}> = [];
135
+
if (
136
+
sourceAccountId &&
137
+
result.atprotoMatches &&
138
+
result.atprotoMatches.length > 0
139
+
) {
140
+
matchedCount++;
141
+
matchedSourceAccountIds.push(sourceAccountId);
230
142
231
-
for (const match of allMatches) {
232
-
const key = `${match.sourceAccountId}:${match.atprotoDid}`;
233
-
const matchId = matchIdMap.get(key);
234
-
if (matchId) {
235
-
statuses.push({
236
-
did: userSession.did,
237
-
atprotoMatchId: matchId,
238
-
sourceAccountId: match.sourceAccountId,
239
-
viewed: true,
143
+
for (const match of result.atprotoMatches) {
144
+
allMatches.push({
145
+
sourceAccountId,
146
+
atprotoDid: match.did,
147
+
atprotoHandle: match.handle,
148
+
atprotoDisplayName: match.displayName,
149
+
atprotoAvatar: match.avatar,
150
+
atprotoDescription: (match as any).description,
151
+
matchScore: match.matchScore,
152
+
postCount: match.postCount || 0,
153
+
followerCount: match.followerCount || 0,
240
154
});
241
155
}
242
156
}
157
+
}
243
158
244
-
if (statuses.length > 0) {
245
-
await bulkCreateUserMatchStatus(statuses);
159
+
// Store all matches in one operation
160
+
let matchIdMap = new Map<string, number>();
161
+
if (allMatches.length > 0) {
162
+
matchIdMap = await matchRepo.bulkStoreMatches(allMatches);
163
+
}
164
+
165
+
// BULK OPERATION 4: Mark all matched source accounts
166
+
if (matchedSourceAccountIds.length > 0) {
167
+
await sourceAccountRepo.markAsMatched(matchedSourceAccountIds);
168
+
}
169
+
170
+
// BULK OPERATION 5: Create all user match statuses
171
+
const statuses: Array<{
172
+
did: string;
173
+
atprotoMatchId: number;
174
+
sourceAccountId: number;
175
+
viewed: boolean;
176
+
}> = [];
177
+
178
+
for (const match of allMatches) {
179
+
const key = `${match.sourceAccountId}:${match.atprotoDid}`;
180
+
const matchId = matchIdMap.get(key);
181
+
if (matchId) {
182
+
statuses.push({
183
+
did: context.did,
184
+
atprotoMatchId: matchId,
185
+
sourceAccountId: match.sourceAccountId,
186
+
viewed: true,
187
+
});
246
188
}
189
+
}
190
+
191
+
if (statuses.length > 0) {
192
+
await matchRepo.upsertUserMatchStatus(statuses);
193
+
}
247
194
248
-
// Update upload record with final counts
249
-
await sql`
250
-
UPDATE user_uploads
251
-
SET matched_users = ${matchedCount},
252
-
unmatched_users = ${results.length - matchedCount}
253
-
WHERE upload_id = ${uploadId}
254
-
`;
195
+
// Update upload record with final counts
196
+
await uploadRepo.updateMatchCounts(
197
+
uploadId,
198
+
matchedCount,
199
+
results.length - matchedCount,
200
+
);
255
201
256
-
return {
257
-
statusCode: 200,
258
-
headers: {
259
-
"Content-Type": "application/json",
260
-
"Access-Control-Allow-Origin": "*",
261
-
},
262
-
body: JSON.stringify({
263
-
success: true,
264
-
uploadId,
265
-
totalUsers: results.length,
266
-
matchedUsers: matchedCount,
267
-
unmatchedUsers: results.length - matchedCount,
268
-
}),
269
-
};
270
-
} catch (error) {
271
-
console.error("Save results error:", error);
272
-
return {
273
-
statusCode: 500,
274
-
headers: { "Content-Type": "application/json" },
275
-
body: JSON.stringify({
276
-
error: "Failed to save results",
277
-
details: error instanceof Error ? error.message : "Unknown error",
278
-
}),
279
-
};
280
-
}
202
+
return successResponse({
203
+
success: true,
204
+
uploadId,
205
+
totalUsers: results.length,
206
+
matchedUsers: matchedCount,
207
+
unmatchedUsers: results.length - matchedCount,
208
+
});
281
209
};
210
+
211
+
export const handler = withAuthErrorHandling(saveResultsHandler);
+60
-113
netlify/functions/session.ts
+60
-113
netlify/functions/session.ts
···
1
-
import { Handler, HandlerEvent, HandlerResponse } from "@netlify/functions";
2
-
import { SessionManager } from "./session-manager";
3
-
import cookie from "cookie";
1
+
import { SimpleHandler } from "./shared/types/api.types";
2
+
import { SessionService } from "./shared/services/session";
3
+
import { extractSessionId } from "./shared/middleware";
4
+
import { successResponse } from "./shared/utils";
5
+
import { withErrorHandling } from "./shared/middleware";
6
+
import { AuthenticationError, ERROR_MESSAGES } from "./shared/constants/errors";
4
7
5
8
// In-memory cache for profile
6
9
const profileCache = new Map<string, { data: any; timestamp: number }>();
7
10
const PROFILE_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
8
11
9
-
export const handler: Handler = async (
10
-
event: HandlerEvent,
11
-
): Promise<HandlerResponse> => {
12
-
try {
13
-
const cookies = event.headers.cookie
14
-
? cookie.parse(event.headers.cookie)
15
-
: {};
16
-
const sessionId =
17
-
event.queryStringParameters?.session || cookies.atlast_session;
12
+
const sessionHandler: SimpleHandler = async (event) => {
13
+
const sessionId =
14
+
event.queryStringParameters?.session || extractSessionId(event);
18
15
19
-
if (!sessionId) {
20
-
return {
21
-
statusCode: 401,
22
-
headers: { "Content-Type": "application/json" },
23
-
body: JSON.stringify({ error: "No session" }),
24
-
};
25
-
}
16
+
if (!sessionId) {
17
+
throw new AuthenticationError(ERROR_MESSAGES.NO_SESSION_COOKIE);
18
+
}
26
19
27
-
// Verify session exists
28
-
const isValid = await SessionManager.verifySession(sessionId);
29
-
if (!isValid) {
30
-
return {
31
-
statusCode: 401,
32
-
headers: { "Content-Type": "application/json" },
33
-
body: JSON.stringify({ error: "Invalid or expired session" }),
34
-
};
35
-
}
20
+
// Verify session exists
21
+
const isValid = await SessionService.verifySession(sessionId);
22
+
if (!isValid) {
23
+
throw new AuthenticationError(ERROR_MESSAGES.INVALID_SESSION);
24
+
}
36
25
37
-
// Get DID from session
38
-
const did = await SessionManager.getDIDForSession(sessionId);
39
-
if (!did) {
40
-
return {
41
-
statusCode: 401,
42
-
headers: { "Content-Type": "application/json" },
43
-
body: JSON.stringify({ error: "Invalid session" }),
44
-
};
45
-
}
26
+
// Get DID from session
27
+
const did = await SessionService.getDIDForSession(sessionId);
28
+
if (!did) {
29
+
throw new AuthenticationError(ERROR_MESSAGES.INVALID_SESSION);
30
+
}
46
31
47
-
const now = Date.now();
32
+
const now = Date.now();
48
33
49
-
// Check profile cache
50
-
const cached = profileCache.get(did);
51
-
if (cached && now - cached.timestamp < PROFILE_CACHE_TTL) {
52
-
console.log("Returning cached profile for", did);
34
+
// Check profile cache
35
+
const cached = profileCache.get(did);
36
+
if (cached && now - cached.timestamp < PROFILE_CACHE_TTL) {
37
+
console.log("Returning cached profile for", did);
38
+
return successResponse(cached.data, 200, {
39
+
"Cache-Control": "private, max-age=300",
40
+
"X-Cache-Status": "HIT",
41
+
});
42
+
}
53
43
54
-
return {
55
-
statusCode: 200,
56
-
headers: {
57
-
"Content-Type": "application/json",
58
-
"Access-Control-Allow-Origin": "*",
59
-
"Cache-Control": "private, max-age=300", // Browser can cache for 5 minutes
60
-
"X-Cache-Status": "HIT",
61
-
},
62
-
body: JSON.stringify(cached.data),
63
-
};
64
-
}
44
+
// Cache miss - fetch full profile
45
+
const { agent } = await SessionService.getAgentForSession(sessionId);
65
46
66
-
// Cache miss - fetch full profile
67
-
try {
68
-
// Get authenticated agent using SessionManager
69
-
const { agent } = await SessionManager.getAgentForSession(sessionId);
47
+
// Get profile - throw error if this fails
48
+
const profile = await agent.getProfile({ actor: did });
70
49
71
-
// Get profile
72
-
const profile = await agent.getProfile({ actor: did });
50
+
const profileData = {
51
+
did: did,
52
+
handle: profile.data.handle,
53
+
displayName: profile.data.displayName,
54
+
avatar: profile.data.avatar,
55
+
description: profile.data.description,
56
+
};
73
57
74
-
const profileData = {
75
-
did: did,
76
-
handle: profile.data.handle,
77
-
displayName: profile.data.displayName,
78
-
avatar: profile.data.avatar,
79
-
description: profile.data.description,
80
-
};
58
+
// Cache the profile data
59
+
profileCache.set(did, {
60
+
data: profileData,
61
+
timestamp: now,
62
+
});
81
63
82
-
// Cache the profile data
83
-
profileCache.set(did, {
84
-
data: profileData,
85
-
timestamp: now,
86
-
});
87
-
88
-
// Clean up old profile cache entries
89
-
if (profileCache.size > 100) {
90
-
for (const [cachedDid, entry] of profileCache.entries()) {
91
-
if (now - entry.timestamp > PROFILE_CACHE_TTL) {
92
-
profileCache.delete(cachedDid);
93
-
}
94
-
}
64
+
// Clean up old profile cache entries
65
+
if (profileCache.size > 100) {
66
+
for (const [cachedDid, entry] of profileCache.entries()) {
67
+
if (now - entry.timestamp > PROFILE_CACHE_TTL) {
68
+
profileCache.delete(cachedDid);
95
69
}
96
-
97
-
return {
98
-
statusCode: 200,
99
-
headers: {
100
-
"Content-Type": "application/json",
101
-
"Access-Control-Allow-Origin": "*",
102
-
"Cache-Control": "private, max-age=300",
103
-
"X-Cache-Status": "MISS",
104
-
},
105
-
body: JSON.stringify(profileData),
106
-
};
107
-
} catch (error) {
108
-
console.error("Profile fetch error:", error);
109
-
110
-
// If profile fetch fails, return basic session info
111
-
return {
112
-
statusCode: 200,
113
-
headers: {
114
-
"Content-Type": "application/json",
115
-
"Access-Control-Allow-Origin": "*",
116
-
"X-Cache-Status": "ERROR",
117
-
},
118
-
body: JSON.stringify({
119
-
did: did,
120
-
// Profile data unavailable
121
-
}),
122
-
};
123
70
}
124
-
} catch (error) {
125
-
console.error("Session error:", error);
126
-
return {
127
-
statusCode: 500,
128
-
headers: { "Content-Type": "application/json" },
129
-
body: JSON.stringify({ error: "Internal server error" }),
130
-
};
131
71
}
72
+
73
+
return successResponse(profileData, 200, {
74
+
"Cache-Control": "private, max-age=300",
75
+
"X-Cache-Status": "MISS",
76
+
});
132
77
};
78
+
79
+
export const handler = withErrorHandling(sessionHandler);
public/favicon-png.ico
public/favicon.ico
public/favicon-png.ico
public/favicon.ico
+30
-8
src/lib/apiClient/realApiClient.ts
+30
-8
src/lib/apiClient/realApiClient.ts
···
68
68
69
69
const cache = new ResponseCache();
70
70
71
+
/**
72
+
* Unwrap the standardized API response format
73
+
* New format: { success: true, data: {...} }
74
+
* Old format: direct data
75
+
*/
76
+
function unwrapResponse<T>(response: any): T {
77
+
if (response.success !== undefined && response.data !== undefined) {
78
+
return response.data as T;
79
+
}
80
+
return response as T;
81
+
}
82
+
71
83
export const apiClient = {
72
84
// OAuth and Authentication
73
85
async startOAuth(handle: string): Promise<{ url: string }> {
···
87
99
throw new Error(errorData.error || "Failed to start OAuth flow");
88
100
}
89
101
90
-
return res.json();
102
+
const response = await res.json();
103
+
return unwrapResponse<{ url: string }>(response);
91
104
},
92
105
93
106
async getSession(): Promise<{
···
113
126
throw new Error("No valid session");
114
127
}
115
128
116
-
const data = await res.json();
129
+
const response = await res.json();
130
+
const data = unwrapResponse<AtprotoSession>(response);
117
131
118
132
// Cache the session data for 5 minutes
119
133
cache.set(cacheKey, data, 5 * 60 * 1000);
···
162
176
throw new Error("Failed to fetch uploads");
163
177
}
164
178
165
-
const data = await res.json();
179
+
const response = await res.json();
180
+
const data = unwrapResponse<any>(response);
166
181
167
182
// Cache uploads list for 2 minutes
168
183
cache.set(cacheKey, data, 2 * 60 * 1000);
···
207
222
throw new Error("Failed to fetch upload details");
208
223
}
209
224
210
-
const data = await res.json();
225
+
const response = await res.json();
226
+
const data = unwrapResponse<any>(response);
211
227
212
228
// Cache upload details page for 10 minutes
213
229
cache.set(cacheKey, data, 10 * 60 * 1000);
···
265
281
throw new Error("Failed to check follow status");
266
282
}
267
283
268
-
const data = await res.json();
284
+
const response = await res.json();
285
+
const data = unwrapResponse<{ followStatus: Record<string, boolean> }>(
286
+
response,
287
+
);
269
288
270
289
// Cache for 2 minutes
271
290
cache.set(cacheKey, data.followStatus, 2 * 60 * 1000);
···
301
320
throw new Error(`Batch search failed: ${res.status}`);
302
321
}
303
322
304
-
const data = await res.json();
323
+
const response = await res.json();
324
+
const data = unwrapResponse<{ results: BatchSearchResult[] }>(response);
305
325
306
326
// Cache search results for 10 minutes
307
327
cache.set(cacheKey, data, 10 * 60 * 1000);
···
332
352
throw new Error("Batch follow failed");
333
353
}
334
354
335
-
const data = await res.json();
355
+
const response = await res.json();
356
+
const data = unwrapResponse<any>(response);
336
357
337
358
// Invalidate caches after following
338
359
cache.invalidate("uploads");
···
370
391
});
371
392
372
393
if (res.ok) {
373
-
const data = await res.json();
394
+
const response = await res.json();
395
+
const data = unwrapResponse<SaveResultsResponse>(response);
374
396
console.log(`Successfully saved ${data.matchedUsers} matches`);
375
397
376
398
// Invalidate caches after saving