···54 # after a restart, preventing it from reprocessing old events or skipping new ones.
55 volumes:
56 - ./cursor.txt:/app/cursor.txt
05758 environment:
59 - NODE_ENV=production
···54 # after a restart, preventing it from reprocessing old events or skipping new ones.
55 volumes:
56 - ./cursor.txt:/app/cursor.txt
57+ - ./.session:/app/.session
5859 environment:
60 - NODE_ENV=production
···1import { pRateLimit } from "p-ratelimit";
0023-// TypeScript
000000000000045-// create a rate limiter that allows up to 30 API calls per second,
6-// with max concurrency of 10
78-export const limit = pRateLimit({
9- interval: 30000, // 1000 ms == 1 second
10- rate: 280, // 30 API calls per interval
11- concurrency: 48, // no more than 10 running at once
12- maxDelay: 0, // an API call delayed > 30 sec is rejected
13});
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
···1import { pRateLimit } from "p-ratelimit";
2+import { logger } from "./logger.js";
3+import { Counter, Gauge, Histogram } from "prom-client";
45+interface RateLimitState {
6+ limit: number;
7+ remaining: number;
8+ reset: number; // Unix timestamp in seconds
9+ policy?: string;
10+}
11+12+// Conservative defaults based on previous static configuration
13+// Will be replaced with dynamic values from ATP response headers
14+let rateLimitState: RateLimitState = {
15+ limit: 280,
16+ remaining: 280,
17+ reset: Math.floor(Date.now() / 1000) + 30,
18+};
1920+const SAFETY_BUFFER = 5; // Keep this many requests in reserve (reduced from 20)
21+const CONCURRENCY = 24; // Reduced from 48 to prevent rapid depletion
2223+// Metrics
24+const rateLimitWaitsTotal = new Counter({
25+ name: "rate_limit_waits_total",
26+ help: "Total number of times rate limit wait was triggered",
027});
28+29+const rateLimitWaitDuration = new Histogram({
30+ name: "rate_limit_wait_duration_seconds",
31+ help: "Duration of rate limit waits in seconds",
32+ buckets: [0.1, 0.5, 1, 5, 10, 30, 60],
33+});
34+35+const rateLimitRemaining = new Gauge({
36+ name: "rate_limit_remaining",
37+ help: "Current remaining rate limit",
38+});
39+40+const rateLimitTotal = new Gauge({
41+ name: "rate_limit_total",
42+ help: "Total rate limit from headers",
43+});
44+45+const concurrentRequestsGauge = new Gauge({
46+ name: "concurrent_requests",
47+ help: "Current number of concurrent requests",
48+});
49+50+// Use p-ratelimit purely for concurrency management
51+const concurrencyLimiter = pRateLimit({
52+ interval: 1000,
53+ rate: 10000, // Very high rate, we manage rate limiting separately
54+ concurrency: CONCURRENCY,
55+ maxDelay: 0,
56+});
57+58+export function getRateLimitState(): RateLimitState {
59+ return { ...rateLimitState };
60+}
61+62+export function updateRateLimitState(state: Partial<RateLimitState>): void {
63+ rateLimitState = { ...rateLimitState, ...state };
64+65+ // Update Prometheus metrics
66+ if (state.remaining !== undefined) {
67+ rateLimitRemaining.set(state.remaining);
68+ }
69+ if (state.limit !== undefined) {
70+ rateLimitTotal.set(state.limit);
71+ }
72+73+ logger.debug(
74+ {
75+ limit: rateLimitState.limit,
76+ remaining: rateLimitState.remaining,
77+ resetIn: rateLimitState.reset - Math.floor(Date.now() / 1000),
78+ },
79+ "Rate limit state updated"
80+ );
81+}
82+83+async function awaitRateLimit(): Promise<void> {
84+ const state = getRateLimitState();
85+ const now = Math.floor(Date.now() / 1000);
86+87+ // Only wait if we're critically low
88+ if (state.remaining <= SAFETY_BUFFER) {
89+ rateLimitWaitsTotal.inc();
90+91+ const delaySeconds = Math.max(0, state.reset - now);
92+ const delayMs = delaySeconds * 1000;
93+94+ if (delayMs > 0) {
95+ logger.warn(
96+ `Rate limit critical (${state.remaining}/${state.limit} remaining). Waiting ${delaySeconds}s until reset...`
97+ );
98+99+ const waitStart = Date.now();
100+ await new Promise((resolve) => setTimeout(resolve, delayMs));
101+ const waitDuration = (Date.now() - waitStart) / 1000;
102+ rateLimitWaitDuration.observe(waitDuration);
103+104+ // Don't manually reset state - let the next API response update it
105+ logger.info("Rate limit wait complete, resuming requests");
106+ }
107+ }
108+}
109+110+export async function limit<T>(fn: () => Promise<T>): Promise<T> {
111+ return concurrencyLimiter(async () => {
112+ concurrentRequestsGauge.inc();
113+ try {
114+ await awaitRateLimit();
115+ return await fn();
116+ } finally {
117+ concurrentRequestsGauge.dec();
118+ }
119+ });
120+}
···1-import { describe, expect, it } from "vitest";
2-import { limit } from "../limits.js";
34describe("Rate Limiter", () => {
5- it("should limit the rate of calls", async () => {
6- const calls = [];
7- for (let i = 0; i < 10; i++) {
8- calls.push(limit(() => Promise.resolve(Date.now())));
9- }
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001011- const start = Date.now();
12- const results = await Promise.all(calls);
13- const end = Date.now();
1415- // With a concurrency of 4, 10 calls should take at least 2 intervals.
16- // However, the interval is 30 seconds, so this test would be very slow.
17- // Instead, we'll just check that the calls were successful and returned a timestamp.
18- expect(results.length).toBe(10);
19- for (const result of results) {
20- expect(typeof result).toBe("number");
21- }
22- // A better test would be to mock the timer and advance it, but that's more complex.
23- // For now, we'll just check that the time taken is greater than 0.
24- expect(end - start).toBeGreaterThanOrEqual(0);
25- }, 40000); // Increase timeout for this test
000000000000000000000000000000000000000000000000026});