music on atproto
plyr.fm
1#!/usr/bin/env python3
2# /// script
3# requires-python = ">=3.11"
4# dependencies = ["asyncpg", "boto3", "pydantic", "pydantic-settings", "typer"]
5# ///
6"""export platform costs to R2 for public dashboard
7
8usage:
9 uv run scripts/costs/export_costs.py # export to R2 (prod)
10 uv run scripts/costs/export_costs.py --dry-run # print JSON, don't upload
11 uv run scripts/costs/export_costs.py --env stg # use staging db
12
13AudD billing model:
14 - $5/month base (indie plan)
15 - 6000 free requests/month (1000 base + 5000 bonus)
16 - $5 per 1000 requests after free tier
17 - 1 request = 12 seconds of audio
18 - so a 5-minute track = ceil(300/12) = 25 requests
19"""
20
21import asyncio
22import json
23import re
24from datetime import UTC, datetime, timedelta
25from typing import Any
26
27import typer
28from pydantic import Field
29from pydantic_settings import BaseSettings, SettingsConfigDict
30
31# billing constants
32AUDD_BILLING_DAY = 24
33AUDD_SECONDS_PER_REQUEST = 12
34AUDD_FREE_REQUESTS = 6000 # 1000 base + 5000 bonus on indie plan
35AUDD_COST_PER_1000 = 5.00 # $5 per 1000 requests
36AUDD_BASE_COST = 5.00 # $5/month base
37
38# fixed monthly costs (updated 2025-12-26)
39# fly.io: manually updated from cost explorer (TODO: use fly billing API)
40# neon: fixed $5/month
41# cloudflare: mostly free tier
42# redis: self-hosted on fly (included in fly_io costs)
43FIXED_COSTS = {
44 "fly_io": {
45 "breakdown": {
46 "relay-api": 5.80, # prod backend
47 "relay-api-staging": 5.60,
48 "plyr-moderation": 0.24,
49 "plyr-transcoder": 0.02,
50 },
51 "note": "compute (2x shared-cpu VMs + moderation + transcoder)",
52 },
53 "neon": {
54 "total": 5.00,
55 "note": "postgres serverless (fixed)",
56 },
57 "cloudflare": {
58 "r2": 0.16,
59 "pages": 0.00,
60 "domain": 1.00,
61 "total": 1.16,
62 "note": "r2 egress is free, pages free tier",
63 },
64}
65
66
67class Settings(BaseSettings):
68 model_config = SettingsConfigDict(env_file=(".env", "backend/.env"), extra="ignore")
69
70 neon_database_url: str | None = None
71 neon_database_url_prd: str | None = None
72 neon_database_url_stg: str | None = None
73 neon_database_url_dev: str | None = None
74
75 # r2 stats bucket (dedicated, shared across environments)
76 aws_access_key_id: str = ""
77 aws_secret_access_key: str = ""
78 r2_endpoint_url: str = ""
79 r2_stats_bucket: str = Field(
80 default="plyr-stats", validation_alias="R2_STATS_BUCKET"
81 )
82 r2_stats_public_url: str = Field(
83 default="https://pub-68f2c7379f204d81bdf65152b0ff0207.r2.dev",
84 validation_alias="R2_STATS_PUBLIC_URL",
85 )
86
87 def get_db_url(self, env: str) -> str:
88 """get database url for environment, converting to asyncpg format"""
89 url = getattr(self, f"neon_database_url_{env}", None) or self.neon_database_url
90 if not url:
91 raise ValueError(f"no database url for {env}")
92 return re.sub(r"postgresql\+\w+://", "postgresql://", url)
93
94
95settings = Settings()
96app = typer.Typer(add_completion=False)
97
98
99def get_billing_period_start() -> datetime:
100 """get the start of current billing period (24th of month)"""
101 now = datetime.now()
102 if now.day >= AUDD_BILLING_DAY:
103 return datetime(now.year, now.month, AUDD_BILLING_DAY)
104 else:
105 first_of_month = datetime(now.year, now.month, 1)
106 prev_month = first_of_month - timedelta(days=1)
107 return datetime(prev_month.year, prev_month.month, AUDD_BILLING_DAY)
108
109
110async def get_audd_stats(db_url: str) -> dict[str, Any]:
111 """fetch audd scan stats from postgres.
112
113 calculates AudD API requests from track duration:
114 - each 12 seconds of audio = 1 API request
115 - derived by joining copyright_scans with tracks table
116 """
117 import asyncpg
118
119 billing_start = get_billing_period_start()
120 # 30 days of history for the daily chart (independent of billing cycle)
121 history_start = datetime.now() - timedelta(days=30)
122
123 conn = await asyncpg.connect(db_url)
124 try:
125 # get totals: scans, flagged, and derived API requests from duration
126 # uses billing period for accurate cost calculation
127 row = await conn.fetchrow(
128 """
129 SELECT
130 COUNT(*) as total_scans,
131 COUNT(CASE WHEN cs.is_flagged THEN 1 END) as flagged,
132 COALESCE(SUM(CEIL((t.extra->>'duration')::float / $2)), 0)::bigint as total_requests,
133 COALESCE(SUM((t.extra->>'duration')::int), 0)::bigint as total_seconds
134 FROM copyright_scans cs
135 JOIN tracks t ON t.id = cs.track_id
136 WHERE cs.scanned_at >= $1
137 """,
138 billing_start,
139 AUDD_SECONDS_PER_REQUEST,
140 )
141 total_scans = row["total_scans"]
142 flagged = row["flagged"]
143 total_requests = row["total_requests"]
144 total_seconds = row["total_seconds"]
145
146 # daily breakdown for chart - 30 days of history for flexible views
147 daily = await conn.fetch(
148 """
149 SELECT
150 DATE(cs.scanned_at) as date,
151 COUNT(*) as scans,
152 COUNT(CASE WHEN cs.is_flagged THEN 1 END) as flagged,
153 COALESCE(SUM(CEIL((t.extra->>'duration')::float / $2)), 0)::bigint as requests
154 FROM copyright_scans cs
155 JOIN tracks t ON t.id = cs.track_id
156 WHERE cs.scanned_at >= $1
157 GROUP BY DATE(cs.scanned_at)
158 ORDER BY date
159 """,
160 history_start,
161 AUDD_SECONDS_PER_REQUEST,
162 )
163
164 # calculate costs
165 billable_requests = max(0, total_requests - AUDD_FREE_REQUESTS)
166 overage_cost = round(billable_requests * AUDD_COST_PER_1000 / 1000, 2)
167 total_cost = AUDD_BASE_COST + overage_cost
168
169 return {
170 "billing_period_start": billing_start.isoformat(),
171 "total_scans": total_scans,
172 "total_requests": total_requests,
173 "total_audio_seconds": total_seconds,
174 "flagged": flagged,
175 "flag_rate": round(flagged / total_scans * 100, 1) if total_scans else 0,
176 "free_requests": AUDD_FREE_REQUESTS,
177 "remaining_free": max(0, AUDD_FREE_REQUESTS - total_requests),
178 "billable_requests": billable_requests,
179 "base_cost": AUDD_BASE_COST,
180 "overage_cost": overage_cost,
181 "estimated_cost": total_cost,
182 "daily": [
183 {
184 "date": r["date"].isoformat(),
185 "scans": r["scans"],
186 "flagged": r["flagged"],
187 "requests": r["requests"],
188 }
189 for r in daily
190 ],
191 }
192 finally:
193 await conn.close()
194
195
196def build_cost_data(audd_stats: dict[str, Any]) -> dict[str, Any]:
197 """assemble full cost dashboard data"""
198 # calculate plyr-specific fly costs
199 plyr_fly = sum(FIXED_COSTS["fly_io"]["breakdown"].values())
200
201 monthly_total = (
202 plyr_fly
203 + FIXED_COSTS["neon"]["total"]
204 + FIXED_COSTS["cloudflare"]["total"]
205 + audd_stats["estimated_cost"]
206 )
207
208 return {
209 "generated_at": datetime.now(UTC).isoformat().replace("+00:00", "Z"),
210 "monthly_estimate": round(monthly_total, 2),
211 "costs": {
212 "fly_io": {
213 "amount": round(plyr_fly, 2),
214 "breakdown": FIXED_COSTS["fly_io"]["breakdown"],
215 "note": FIXED_COSTS["fly_io"]["note"],
216 },
217 "neon": {
218 "amount": FIXED_COSTS["neon"]["total"],
219 "note": FIXED_COSTS["neon"]["note"],
220 },
221 "cloudflare": {
222 "amount": FIXED_COSTS["cloudflare"]["total"],
223 "breakdown": {
224 "r2_storage": FIXED_COSTS["cloudflare"]["r2"],
225 "pages": FIXED_COSTS["cloudflare"]["pages"],
226 "domain": FIXED_COSTS["cloudflare"]["domain"],
227 },
228 "note": FIXED_COSTS["cloudflare"]["note"],
229 },
230 "audd": {
231 "amount": audd_stats["estimated_cost"],
232 "base_cost": audd_stats["base_cost"],
233 "overage_cost": audd_stats["overage_cost"],
234 "scans_this_period": audd_stats["total_scans"],
235 "requests_this_period": audd_stats["total_requests"],
236 "audio_seconds": audd_stats["total_audio_seconds"],
237 "free_requests": audd_stats["free_requests"],
238 "remaining_free": audd_stats["remaining_free"],
239 "billable_requests": audd_stats["billable_requests"],
240 "flag_rate": audd_stats["flag_rate"],
241 "daily": audd_stats["daily"],
242 "note": f"copyright detection ($5 base + ${AUDD_COST_PER_1000}/1k requests over {AUDD_FREE_REQUESTS})",
243 },
244 },
245 "support": {
246 "url": "https://atprotofans.com/u/did:plc:xbtmt2zjwlrfegqvch7fboei",
247 "message": "help cover moderation costs",
248 },
249 }
250
251
252async def upload_to_r2(data: dict[str, Any]) -> str:
253 """upload json to dedicated stats bucket"""
254 import boto3
255
256 bucket = settings.r2_stats_bucket
257 key = "costs.json"
258 body = json.dumps(data, indent=2).encode()
259
260 s3 = boto3.client(
261 "s3",
262 endpoint_url=settings.r2_endpoint_url,
263 aws_access_key_id=settings.aws_access_key_id,
264 aws_secret_access_key=settings.aws_secret_access_key,
265 )
266 s3.put_object(
267 Bucket=bucket,
268 Key=key,
269 Body=body,
270 ContentType="application/json",
271 CacheControl="public, max-age=3600",
272 )
273 return f"{settings.r2_stats_public_url}/{key}"
274
275
276@app.command()
277def main(
278 dry_run: bool = typer.Option(
279 False, "--dry-run", "-n", help="print json, don't upload"
280 ),
281 env: str = typer.Option("prd", "--env", "-e", help="environment: prd, stg, dev"),
282) -> None:
283 """export platform costs to R2 for public dashboard"""
284
285 async def run():
286 db_url = settings.get_db_url(env)
287 audd_stats = await get_audd_stats(db_url)
288 data = build_cost_data(audd_stats)
289
290 if dry_run:
291 print(json.dumps(data, indent=2))
292 return
293
294 url = await upload_to_r2(data)
295 print(f"uploaded to {url}")
296
297 asyncio.run(run())
298
299
300if __name__ == "__main__":
301 app()