+6
-8
backend/src/backend/config.py
+6
-8
backend/src/backend/config.py
···
267
267
validation_alias="R2_PUBLIC_IMAGE_BUCKET_URL",
268
268
description="R2 public bucket URL for image files",
269
269
)
270
-
271
-
@computed_field
272
-
@property
273
-
def costs_json_url(self) -> str:
274
-
"""URL for the public costs dashboard JSON."""
275
-
if self.r2_public_bucket_url:
276
-
return f"{self.r2_public_bucket_url.rstrip('/')}/stats/costs.json"
277
-
return ""
270
+
# dedicated stats bucket - shared across all environments
271
+
costs_json_url: str = Field(
272
+
default="https://pub-68f2c7379f204d81bdf65152b0ff0207.r2.dev/costs.json",
273
+
validation_alias="COSTS_JSON_URL",
274
+
description="URL for public costs dashboard JSON",
275
+
)
278
276
279
277
@computed_field
280
278
@property
+24
-44
scripts/costs/export_costs.py
+24
-44
scripts/costs/export_costs.py
···
1
1
#!/usr/bin/env python3
2
2
# /// script
3
3
# requires-python = ">=3.11"
4
-
# dependencies = ["asyncpg", "boto3", "pydantic-settings", "typer"]
4
+
# dependencies = ["asyncpg", "boto3", "pydantic", "pydantic-settings", "typer"]
5
5
# ///
6
6
"""export platform costs to R2 for public dashboard
7
7
···
18
18
from typing import Any
19
19
20
20
import typer
21
+
from pydantic import Field
21
22
from pydantic_settings import BaseSettings, SettingsConfigDict
22
23
23
24
# billing constants
···
79
80
neon_database_url_stg: str | None = None
80
81
neon_database_url_dev: str | None = None
81
82
82
-
# r2 for upload
83
+
# r2 stats bucket (dedicated, shared across environments)
83
84
aws_access_key_id: str = ""
84
85
aws_secret_access_key: str = ""
85
86
r2_endpoint_url: str = ""
86
-
r2_bucket: str = ""
87
-
r2_public_bucket_url: str = ""
87
+
r2_stats_bucket: str = Field(
88
+
default="plyr-stats", validation_alias="R2_STATS_BUCKET"
89
+
)
90
+
r2_stats_public_url: str = Field(
91
+
default="https://pub-68f2c7379f204d81bdf65152b0ff0207.r2.dev",
92
+
validation_alias="R2_STATS_PUBLIC_URL",
93
+
)
88
94
89
95
def get_db_url(self, env: str) -> str:
90
96
"""get database url for environment, converting to asyncpg format"""
···
236
242
237
243
238
244
async def upload_to_r2(data: dict[str, Any]) -> str:
239
-
"""upload json to r2 public bucket"""
245
+
"""upload json to dedicated stats bucket"""
246
+
import boto3
240
247
241
-
# s3-compatible signing for r2
242
-
bucket = settings.r2_bucket
243
-
key = "stats/costs.json"
248
+
bucket = settings.r2_stats_bucket
249
+
key = "costs.json"
244
250
body = json.dumps(data, indent=2).encode()
245
251
246
-
# use httpx with basic auth approach via presigned-like headers
247
-
# actually simpler: use boto3-like signing or just httpx with aws4auth
248
-
# for simplicity, let's use the s3 client approach
249
-
250
-
try:
251
-
import aioboto3
252
-
except ImportError:
253
-
# fallback to sync boto3
254
-
import boto3
255
-
256
-
s3 = boto3.client(
257
-
"s3",
258
-
endpoint_url=settings.r2_endpoint_url,
259
-
aws_access_key_id=settings.aws_access_key_id,
260
-
aws_secret_access_key=settings.aws_secret_access_key,
261
-
)
262
-
s3.put_object(
263
-
Bucket=bucket,
264
-
Key=key,
265
-
Body=body,
266
-
ContentType="application/json",
267
-
CacheControl="public, max-age=3600", # 1 hour cache
268
-
)
269
-
return f"{settings.r2_public_bucket_url}/{key}"
270
-
271
-
session = aioboto3.Session()
272
-
async with session.client(
252
+
s3 = boto3.client(
273
253
"s3",
274
254
endpoint_url=settings.r2_endpoint_url,
275
255
aws_access_key_id=settings.aws_access_key_id,
276
256
aws_secret_access_key=settings.aws_secret_access_key,
277
-
) as s3:
278
-
await s3.put_object(
279
-
Bucket=bucket,
280
-
Key=key,
281
-
Body=body,
282
-
ContentType="application/json",
283
-
CacheControl="public, max-age=3600",
284
-
)
285
-
return f"{settings.r2_public_bucket_url}/{key}"
257
+
)
258
+
s3.put_object(
259
+
Bucket=bucket,
260
+
Key=key,
261
+
Body=body,
262
+
ContentType="application/json",
263
+
CacheControl="public, max-age=3600",
264
+
)
265
+
return f"{settings.r2_stats_public_url}/{key}"
286
266
287
267
288
268
@app.command()