ATlast — you'll never need to find your favorites on another platform again. Find your favs in the ATmosphere.
atproto

add rate limiting to batch endpoints

Optimization #12:
- created rateLimit.middleware with in-memory rate limiting
- batch-search-actors: 5 requests/min (conservative)
- batch-follow-users: 8 requests/hr (conservative)
- calculated based on AT Protocol limits with 50% buffer
- DRY implementation with applyRateLimit helper function
- prevents users from exhausting AT Protocol API limits
- leaves buffer for likes, replies, posts

Note: In-memory (resets on cold starts). Upgrade to Upstash Redis for production-grade shared state.

byarielm.fyi 587a9b03 35061ae4

verified
Changed files
+152
netlify
+13
netlify/functions/batch-follow-users.ts
··· 4 4 import { MatchRepository } from "./repositories"; 5 5 import { successResponse, validateArrayInput, ValidationSchemas } from "./utils"; 6 6 import { withAuthErrorHandling } from "./core/middleware"; 7 + import { 8 + createRateLimiter, 9 + applyRateLimit, 10 + } from "./core/middleware/rateLimit.middleware"; 11 + 12 + // Rate limit: 8 requests per hour 13 + // Each request can follow up to 100 users (300 points) 14 + // Leaves ~50% of 5,000 points/hr for other operations 15 + const checkRateLimit = createRateLimiter({ 16 + maxRequests: 8, 17 + windowMs: 60 * 60 * 1000, // 1 hour 18 + }); 7 19 8 20 const batchFollowHandler: AuthenticatedHandler = async (context) => { 21 + applyRateLimit(checkRateLimit, context.event, "minutes"); 9 22 const body = JSON.parse(context.event.body || "{}"); 10 23 const dids = validateArrayInput<string>( 11 24 context.event.body,
+12
netlify/functions/batch-search-actors.ts
··· 8 8 import { SessionService } from "./services/SessionService"; 9 9 import { successResponse, validateArrayInput, ValidationSchemas } from "./utils"; 10 10 import { withAuthErrorHandling } from "./core/middleware"; 11 + import { 12 + createRateLimiter, 13 + applyRateLimit, 14 + } from "./core/middleware/rateLimit.middleware"; 11 15 import { normalize } from "./utils/string.utils"; 12 16 import { FollowService } from "./services/FollowService"; 13 17 18 + // Rate limit: 5 requests per minute 19 + // Leaves ~50% buffer for other AT Protocol operations 20 + const checkRateLimit = createRateLimiter({ 21 + maxRequests: 5, 22 + windowMs: 60 * 1000, // 1 minute 23 + }); 24 + 14 25 const batchSearchHandler: AuthenticatedHandler = async (context) => { 26 + applyRateLimit(checkRateLimit, context.event, "seconds"); 15 27 const body = JSON.parse(context.event.body || "{}"); 16 28 const usernames = validateArrayInput<string>( 17 29 context.event.body,
+127
netlify/functions/core/middleware/rateLimit.middleware.ts
··· 1 + import { HandlerEvent } from "@netlify/functions"; 2 + 3 + interface RateLimitConfig { 4 + maxRequests: number; 5 + windowMs: number; 6 + identifier?: (event: HandlerEvent) => string; 7 + } 8 + 9 + interface RateLimitRecord { 10 + count: number; 11 + resetAt: number; 12 + } 13 + 14 + // In-memory store per function instance 15 + // Note: Resets on cold starts, but provides basic protection 16 + const rateLimitStore = new Map<string, RateLimitRecord>(); 17 + 18 + // Cleanup old entries every 5 minutes to prevent memory leaks 19 + setInterval(() => { 20 + const now = Date.now(); 21 + for (const [key, record] of rateLimitStore.entries()) { 22 + if (now > record.resetAt) { 23 + rateLimitStore.delete(key); 24 + } 25 + } 26 + }, 5 * 60 * 1000); 27 + 28 + /** 29 + * Rate limiting middleware 30 + * 31 + * Limitations: 32 + * - Per function instance (not shared across instances) 33 + * - Resets on cold starts 34 + * - In-memory only 35 + * 36 + * For production use with shared state, consider @upstash/ratelimit 37 + */ 38 + export function createRateLimiter(config: RateLimitConfig) { 39 + const { 40 + maxRequests, 41 + windowMs, 42 + identifier = (event) => event.headers["x-forwarded-for"] || "unknown", 43 + } = config; 44 + 45 + return function checkRateLimit(event: HandlerEvent): { 46 + allowed: boolean; 47 + limit: number; 48 + remaining: number; 49 + resetAt: number; 50 + } { 51 + const key = identifier(event); 52 + const now = Date.now(); 53 + const record = rateLimitStore.get(key); 54 + 55 + // No record or window expired - allow and create new record 56 + if (!record || now > record.resetAt) { 57 + const resetAt = now + windowMs; 58 + rateLimitStore.set(key, { count: 1, resetAt }); 59 + return { 60 + allowed: true, 61 + limit: maxRequests, 62 + remaining: maxRequests - 1, 63 + resetAt, 64 + }; 65 + } 66 + 67 + // Within window - check if under limit 68 + if (record.count < maxRequests) { 69 + record.count++; 70 + return { 71 + allowed: true, 72 + limit: maxRequests, 73 + remaining: maxRequests - record.count, 74 + resetAt: record.resetAt, 75 + }; 76 + } 77 + 78 + // Rate limit exceeded 79 + return { 80 + allowed: false, 81 + limit: maxRequests, 82 + remaining: 0, 83 + resetAt: record.resetAt, 84 + }; 85 + }; 86 + } 87 + 88 + /** 89 + * Rate limit error response 90 + */ 91 + export class RateLimitError extends Error { 92 + constructor( 93 + message: string, 94 + public limit: number, 95 + public resetAt: number, 96 + ) { 97 + super(message); 98 + this.name = "RateLimitError"; 99 + } 100 + } 101 + 102 + /** 103 + * Apply rate limiting to a request 104 + * Throws RateLimitError if limit exceeded 105 + */ 106 + export function applyRateLimit( 107 + checkRateLimit: ReturnType<typeof createRateLimiter>, 108 + event: HandlerEvent, 109 + timeUnit: "seconds" | "minutes" = "seconds", 110 + ): void { 111 + const rateLimit = checkRateLimit(event); 112 + 113 + if (!rateLimit.allowed) { 114 + const timeRemaining = 115 + timeUnit === "minutes" 116 + ? Math.ceil((rateLimit.resetAt - Date.now()) / 60000) 117 + : Math.ceil((rateLimit.resetAt - Date.now()) / 1000); 118 + 119 + const unitLabel = timeUnit === "minutes" ? "minutes" : "seconds"; 120 + 121 + throw new RateLimitError( 122 + `Rate limit exceeded. Try again in ${timeRemaining} ${unitLabel}.`, 123 + rateLimit.limit, 124 + rateLimit.resetAt, 125 + ); 126 + } 127 + }