+1
modules/default.nix
+1
modules/default.nix
+319
modules/nixos/services/pds.nix
+319
modules/nixos/services/pds.nix
···
1
+
{
2
+
config,
3
+
lib,
4
+
pkgs,
5
+
...
6
+
}:
7
+
let
8
+
cfg = config.services.pds-backup;
9
+
10
+
pdsUser = config.systemd.services.bluesky-pds.serviceConfig.User or "pds";
11
+
pdsGroup = config.systemd.services.bluesky-pds.serviceConfig.Group or "pds";
12
+
13
+
restoreScript = pkgs.writeShellApplication {
14
+
name = "pds-restore";
15
+
runtimeInputs = with pkgs; [
16
+
awscli2
17
+
gnutar
18
+
coreutils
19
+
];
20
+
excludeShellChecks = [ "SC1091" ];
21
+
text = ''
22
+
echo "Starting PDS restore..."
23
+
24
+
if [ -f "${cfg.s3CredentialsFile}" ]; then
25
+
set -a; source "${cfg.s3CredentialsFile}"; set +a
26
+
else
27
+
echo "Error: Credentials file not found at ${cfg.s3CredentialsFile}"
28
+
exit 1
29
+
fi
30
+
31
+
backups=$(aws s3 ls "s3://$S3_BUCKET/backups/")
32
+
if [ -z "$backups" ]; then
33
+
echo "Error: No backups found in S3"
34
+
exit 1
35
+
fi
36
+
37
+
LATEST=$(echo "$backups" | sort | tail -1 | awk '{print $4}')
38
+
echo "Latest backup: $LATEST"
39
+
40
+
local_file="/tmp/$LATEST"
41
+
echo "Downloading backup..."
42
+
if ! aws s3 cp "s3://$S3_BUCKET/backups/$LATEST" "$local_file"; then
43
+
echo "Error: Failed to download backup"
44
+
exit 1
45
+
fi
46
+
47
+
echo "Stopping PDS service..."
48
+
systemctl stop bluesky-pds
49
+
50
+
echo "Clearing existing data..."
51
+
rm -rf ${cfg.pdsDataDir}/*
52
+
53
+
echo "Extracting backup..."
54
+
tar -xzf "$local_file" -C ${cfg.pdsDataDir}
55
+
rm -f "$local_file"
56
+
57
+
echo "Setting ownership..."
58
+
chown -R ${pdsUser}:${pdsGroup} ${cfg.pdsDataDir}
59
+
60
+
echo "Starting PDS service..."
61
+
systemctl start bluesky-pds
62
+
63
+
echo "Restore completed successfully."
64
+
'';
65
+
};
66
+
67
+
backupScript = pkgs.writeShellApplication {
68
+
name = "pds-backup-script";
69
+
runtimeInputs = with pkgs; [
70
+
awscli2
71
+
gnutar
72
+
gzip
73
+
coreutils
74
+
];
75
+
bashOptions = [ "errexit" ];
76
+
text = ''
77
+
log() {
78
+
echo "$(date): $1" | tee -a "$LOG_FILE"
79
+
}
80
+
81
+
fail() {
82
+
log "ERROR: $1"
83
+
systemctl restart bluesky-pds 2>/dev/null || log "WARNING: Failed to restart PDS service"
84
+
exit 1
85
+
}
86
+
87
+
cleanup_old_logs() {
88
+
find "$LOG_DIR" -name "*.log" -mtime +90 -delete
89
+
if [ "$(find "$LOG_FILE" -mtime +30 2>/dev/null)" ]; then
90
+
mv "$LOG_FILE" "$LOG_FILE.old" && touch "$LOG_FILE"
91
+
fi
92
+
if [ "$(wc -l < "$LOG_FILE" 2>/dev/null)" -gt 1000 ]; then
93
+
mv "$LOG_FILE" "$LOG_FILE.old" && touch "$LOG_FILE"
94
+
fi
95
+
}
96
+
97
+
mkdir -p "$LOG_DIR"
98
+
DATE_LABEL=$(date +"%Y%m%d-%H%M")
99
+
LOG_FILE="$LOG_DIR/$DATE_LABEL.log"
100
+
ARCHIVE_FILE="/tmp/pds-backup-$DATE_LABEL.tar.gz"
101
+
ARCHIVE_NAME="$DATE_LABEL.tar.gz"
102
+
103
+
log "Starting backup..."
104
+
105
+
if ! systemctl list-units --full -all | grep -Fq "bluesky-pds.service"; then
106
+
fail "PDS service not found"
107
+
fi
108
+
109
+
log "Stopping PDS service..."
110
+
if ! systemctl stop bluesky-pds 2>/dev/null; then
111
+
log "Failed to stop PDS service"
112
+
fi
113
+
114
+
if [ ! -d "$PDS_DATA_DIR" ]; then
115
+
fail "Source directory $PDS_DATA_DIR does not exist"
116
+
fi
117
+
118
+
log "Creating archive..."
119
+
if ! tar -czf "$ARCHIVE_FILE" -C "$PDS_DATA_DIR" . 2>> "$LOG_FILE"; then
120
+
fail "Failed to create archive"
121
+
fi
122
+
123
+
log "Uploading to S3..."
124
+
attempt=1
125
+
while [ "$attempt" -le "$MAX_RETRIES" ]; do
126
+
if aws s3 cp "$ARCHIVE_FILE" "s3://$S3_BUCKET/backups/$ARCHIVE_NAME" 2>> "$LOG_FILE"; then
127
+
log "Upload successful"
128
+
break
129
+
else
130
+
if [ "$attempt" -lt "$MAX_RETRIES" ]; then
131
+
log "Upload failed, retrying in $RETRY_INTERVAL seconds..."
132
+
sleep "$RETRY_INTERVAL"
133
+
else
134
+
fail "Upload failed after $MAX_RETRIES attempts"
135
+
fi
136
+
fi
137
+
((attempt++))
138
+
done
139
+
140
+
rm -f "$ARCHIVE_FILE"
141
+
142
+
log "Starting PDS service..."
143
+
if ! systemctl start bluesky-pds 2>/dev/null; then
144
+
fail "Failed to start PDS service"
145
+
fi
146
+
147
+
log "Cleaning up old logs..."
148
+
cleanup_old_logs
149
+
150
+
log "Backup completed successfully"
151
+
'';
152
+
};
153
+
154
+
litestreamConfigFile = pkgs.writeText "litestream-pds-config.yml" ''
155
+
dbs:
156
+
- dir: ${cfg.pdsDataDir}
157
+
pattern: "*.sqlite"
158
+
recursive: true
159
+
watch: true
160
+
replica:
161
+
type: s3
162
+
path: ${cfg.s3Prefix}
163
+
endpoint: ''${AWS_ENDPOINT_URL}
164
+
bucket: ''${S3_BUCKET}
165
+
access-key-id: ''${AWS_ACCESS_KEY_ID}
166
+
secret-access-key: ''${AWS_SECRET_ACCESS_KEY}
167
+
'';
168
+
169
+
litestreamRestore = pkgs.writeShellApplication {
170
+
name = "pds-litestream-restore";
171
+
runtimeInputs = with pkgs; [
172
+
awscli2
173
+
litestream
174
+
gnugrep
175
+
coreutils
176
+
];
177
+
excludeShellChecks = [ "SC1091" ];
178
+
text = ''
179
+
set -e
180
+
181
+
if [ -f "${cfg.s3CredentialsFile}" ]; then
182
+
set -a; source "${cfg.s3CredentialsFile}"; set +a
183
+
else
184
+
echo "Error: Credentials file not found at ${cfg.s3CredentialsFile}"
185
+
exit 1
186
+
fi
187
+
188
+
systemctl stop bluesky-pds
189
+
190
+
S3_PREFIX="${cfg.s3Prefix}/"
191
+
S3_URI="s3://$S3_BUCKET/$S3_PREFIX"
192
+
MAP=$(aws s3 ls "$S3_URI" --recursive --endpoint-url "$AWS_ENDPOINT_URL" | grep -oE "$S3_PREFIX.+\.sqlite/" | sort -u)
193
+
194
+
if [ -z "$MAP" ]; then
195
+
echo "No databases found in S3."
196
+
exit 2
197
+
fi
198
+
199
+
for S3_DB_PATH in $MAP; do
200
+
REL_PATH=''${S3_DB_PATH#"$S3_PREFIX"}
201
+
REL_PATH=''${REL_PATH%/}
202
+
S3_DB_REPLICA_URL="s3://$S3_BUCKET/$S3_DB_PATH?endpoint=$AWS_ENDPOINT_URL"
203
+
S3_DB_REPLICA_URL=''${S3_DB_REPLICA_URL%/}
204
+
205
+
litestream restore -if-db-not-exists -if-replica-exists -o "${cfg.pdsDataDir}/$REL_PATH" "$S3_DB_REPLICA_URL"
206
+
207
+
chown ${pdsUser}:${pdsGroup} "${cfg.pdsDataDir}/$REL_PATH"
208
+
done
209
+
210
+
systemctl start bluesky-pds
211
+
'';
212
+
};
213
+
in
214
+
{
215
+
options.services.pds-backup = {
216
+
enable = lib.mkEnableOption "PDS backup with Litestream and S3 archive";
217
+
pdsDataDir = lib.mkOption {
218
+
type = lib.types.path;
219
+
default = "/var/lib/pds";
220
+
description = "PDS data directory.";
221
+
};
222
+
pdsSecretsFile = lib.mkOption {
223
+
type = lib.types.path;
224
+
description = "Path to PDS secrets file (dotenv format).";
225
+
};
226
+
s3Prefix = lib.mkOption {
227
+
type = lib.types.strMatching "[^/].*[^/]";
228
+
default = "pds";
229
+
description = "S3 directory subpath.";
230
+
};
231
+
s3CredentialsFile = lib.mkOption {
232
+
type = lib.types.path;
233
+
description = "Path to S3 credentials file (containing AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, S3_BUCKET, etc).";
234
+
};
235
+
pdsSettings = lib.mkOption {
236
+
type = lib.types.attrs;
237
+
default = { };
238
+
description = "Additional settings to pass to bluesky-pds.";
239
+
};
240
+
backupLogDir = lib.mkOption {
241
+
type = lib.types.path;
242
+
default = "/var/log/pds-backup";
243
+
description = "Directory for backup logs.";
244
+
};
245
+
maxRetries = lib.mkOption {
246
+
type = lib.types.int;
247
+
default = 3;
248
+
description = "Maximum number of retry attempts for S3 upload.";
249
+
};
250
+
retryInterval = lib.mkOption {
251
+
type = lib.types.int;
252
+
default = 60;
253
+
description = "Seconds to wait between retry attempts.";
254
+
};
255
+
};
256
+
257
+
config = lib.mkIf cfg.enable {
258
+
services.bluesky-pds = {
259
+
enable = true;
260
+
settings = lib.mkMerge [
261
+
{ PDS_SQLITE_DISABLE_WAL_AUTO_CHECKPOINT = "true"; }
262
+
cfg.pdsSettings
263
+
];
264
+
environmentFiles = [ cfg.pdsSecretsFile ];
265
+
};
266
+
267
+
systemd.services.litestream-pds = {
268
+
description = "Litestream backup for PDS databases";
269
+
after = [
270
+
"network.target"
271
+
"bluesky-pds.service"
272
+
];
273
+
requires = [ "bluesky-pds.service" ];
274
+
wantedBy = [ "multi-user.target" ];
275
+
276
+
serviceConfig = {
277
+
ExecStart = "${pkgs.litestream}/bin/litestream replicate -config ${litestreamConfigFile}";
278
+
EnvironmentFile = cfg.s3CredentialsFile;
279
+
User = pdsUser;
280
+
Group = pdsGroup;
281
+
Restart = "on-failure";
282
+
RestartSec = "5s";
283
+
NoNewPrivileges = true;
284
+
ProtectSystem = "full";
285
+
RestrictRealtime = true;
286
+
};
287
+
};
288
+
289
+
systemd.services.pds-backup = {
290
+
description = "Backup PDS data to S3";
291
+
serviceConfig = {
292
+
ExecStart = "${backupScript}/bin/pds-backup-script";
293
+
Environment = [
294
+
"PDS_DATA_DIR=${cfg.pdsDataDir}"
295
+
"LOG_DIR=${cfg.backupLogDir}"
296
+
"MAX_RETRIES=${toString cfg.maxRetries}"
297
+
"RETRY_INTERVAL=${toString cfg.retryInterval}"
298
+
];
299
+
EnvironmentFile = [ cfg.s3CredentialsFile ];
300
+
User = "root";
301
+
Type = "oneshot";
302
+
};
303
+
};
304
+
305
+
systemd.timers.pds-backup = {
306
+
wantedBy = [ "timers.target" ];
307
+
timerConfig = {
308
+
OnCalendar = "daily";
309
+
Persistent = true;
310
+
};
311
+
};
312
+
313
+
environment.systemPackages = [
314
+
restoreScript
315
+
litestreamRestore
316
+
pkgs.litestream
317
+
];
318
+
};
319
+
}
+16
modules/overlays/default.nix
+16
modules/overlays/default.nix
···
14
14
};
15
15
vendorHash = "sha256-UZNYHx5y+kRp3AJq6s4Wy+k789GDG7FBTSzCTorVjgg=";
16
16
});
17
+
18
+
# build failing because of sqlite3 node-gyp
19
+
bluesky-pds = prev.bluesky-pds.override { nodejs = prev.nodejs_22; };
20
+
21
+
# https://github.com/benbjohnson/litestream/issues/912
22
+
litestream = prev.litestream.overrideAttrs (old: {
23
+
version = "devel";
24
+
src = prev.fetchFromGitHub {
25
+
owner = "benbjohnson";
26
+
repo = "litestream";
27
+
rev = "92fc139923d2b13909ba8b0e5df8b63d45a91648";
28
+
sha256 = "sha256-UDyI4pcd8fUdVzvuLBFKifVORYto0yvtMc1pEUY2OaU=";
29
+
};
30
+
vendorHash = "sha256-MFKyECRWvhHwV0NZuuUQ0OYHpyTjRg0vKHuDNzaZJ7c=";
31
+
patches = [ ];
32
+
});
17
33
}
+1
-2
systems/reg/default.nix
+1
-2
systems/reg/default.nix
-112
systems/reg/pds-backup.nix
-112
systems/reg/pds-backup.nix
···
1
-
{ config, pkgs, ... }:
2
-
let
3
-
restoreScript = pkgs.writeShellScriptBin "pds-restore" ''
4
-
set -e
5
-
6
-
echo "Starting PDS restore..."
7
-
8
-
set -a
9
-
source ${config.sops.secrets.s3.path}
10
-
set +a
11
-
12
-
LATEST=$(${pkgs.awscli2}/bin/aws s3 ls s3://$S3_BUCKET/backups/ | sort | tail -1 | awk '{print $4}')
13
-
[ -z "$LATEST" ] && echo "No backups found." && exit 1
14
-
15
-
echo "Latest backup: $LATEST"
16
-
${pkgs.awscli2}/bin/aws s3 cp s3://$S3_BUCKET/backups/$LATEST /tmp/$LATEST
17
-
18
-
systemctl stop bluesky-pds
19
-
rm -rf /var/lib/pds/*
20
-
21
-
${pkgs.gnutar}/bin/tar -xzf /tmp/$LATEST -C /var/lib/pds
22
-
chown -R pds:pds /var/lib/pds
23
-
24
-
systemctl start bluesky-pds
25
-
26
-
echo "Restore completed."
27
-
'';
28
-
backupScript = pkgs.writeShellScript "pds-backup-script" ''
29
-
SOURCE_DIR="$PDS_DATA_DIR"
30
-
S3_BUCKET="$S3_BUCKET"
31
-
LOG_DIR="/var/log/pds-backup"
32
-
DATE_LABEL=$(date +"%Y%m%d-%H%M")
33
-
LOG_FILE="$LOG_DIR/$DATE_LABEL.log"
34
-
ARCHIVE_FILE="/tmp/pds-backup-$DATE_LABEL.tar.gz"
35
-
MAX_RETRIES=3
36
-
RETRY_INTERVAL=60
37
-
38
-
fail() {
39
-
echo "$(date): ERROR: $1" | tee -a "$LOG_FILE"
40
-
systemctl restart "$PDS_SERVICE" 2>/dev/null || echo "$(date): WARNING: Failed to restart PDS service after failure." >> "$LOG_FILE"
41
-
exit 1
42
-
}
43
-
44
-
mkdir -p "$LOG_DIR"
45
-
46
-
systemctl list-units --full -all | grep -Fq "$PDS_SERVICE.service" || fail "PDS service not found."
47
-
48
-
systemctl stop "$PDS_SERVICE" 2>/dev/null && echo "$(date): Stopped PDS service." >> "$LOG_FILE" || echo "$(date): Failed to stop PDS service." >> "$LOG_FILE"
49
-
50
-
[ -d "$SOURCE_DIR" ] || fail "Source directory $SOURCE_DIR does not exist."
51
-
"$TAR_CMD" -czf "$ARCHIVE_FILE" -C "$SOURCE_DIR" . 2>> "$LOG_FILE" || fail "Failed to create archive."
52
-
53
-
attempt=1
54
-
while [ $attempt -le $MAX_RETRIES ]; do
55
-
if "$AWS_CMD" s3 cp "$ARCHIVE_FILE" "s3://$S3_BUCKET/backups/$DATE_LABEL.tar.gz" 2>> "$LOG_FILE"; then
56
-
echo "$(date): Upload successful." >> "$LOG_FILE"
57
-
break
58
-
else
59
-
[ $attempt -lt $MAX_RETRIES ] && sleep $RETRY_INTERVAL || fail "Upload failed after retries."
60
-
fi
61
-
((attempt++))
62
-
done
63
-
64
-
rm -f "$ARCHIVE_FILE"
65
-
66
-
systemctl start "$PDS_SERVICE" 2>/dev/null || fail "Failed to start PDS service."
67
-
68
-
find "$LOG_DIR" -name "*.log" -mtime +90 -delete
69
-
[ $(find "$LOG_FILE" -mtime +30) ] && mv "$LOG_FILE" "$LOG_FILE.old" && touch "$LOG_FILE"
70
-
[ $(wc -l < "$LOG_FILE") -gt 1000 ] && mv "$LOG_FILE" "$LOG_FILE.old" && touch "$LOG_FILE"
71
-
72
-
echo "$(date): Backup completed." >> "$LOG_FILE"
73
-
'';
74
-
in
75
-
{
76
-
sops.secrets.s3 = {
77
-
format = "dotenv";
78
-
sopsFile = ../../secrets/pds-backup-s3.env;
79
-
};
80
-
81
-
environment.systemPackages = [ restoreScript ];
82
-
83
-
systemd.services.pds-backup = {
84
-
description = "Backup PDS data to S3";
85
-
path = [
86
-
pkgs.awscli2
87
-
pkgs.coreutils
88
-
pkgs.gnutar
89
-
pkgs.gzip
90
-
];
91
-
serviceConfig = {
92
-
ExecStart = "${backupScript}";
93
-
Environment = [
94
-
"PDS_DATA_DIR=${config.services.bluesky-pds.settings.PDS_DATA_DIRECTORY}"
95
-
"PDS_SERVICE=bluesky-pds"
96
-
"TAR_CMD=${pkgs.gnutar}/bin/tar"
97
-
"AWS_CMD=${pkgs.awscli2}/bin/aws"
98
-
];
99
-
EnvironmentFile = [ config.sops.secrets.s3.path ];
100
-
User = "root";
101
-
Type = "oneshot";
102
-
};
103
-
};
104
-
105
-
systemd.timers.pds-backup = {
106
-
wantedBy = [ "timers.target" ];
107
-
timerConfig = {
108
-
OnCalendar = "daily";
109
-
Persistent = true;
110
-
};
111
-
};
112
-
}
+15
-5
systems/reg/pds.nix
+15
-5
systems/reg/pds.nix
···
1
-
{ config, ... }:
1
+
{ config, self, ... }:
2
2
{
3
-
imports = [ ../../modules/nixos/services/acme-nginx.nix ];
3
+
imports = [
4
+
self.nixosModules.pds-backup
5
+
../../modules/nixos/services/acme-nginx.nix
6
+
];
7
+
4
8
sops = {
5
9
secrets.pds = {
6
10
format = "dotenv";
···
11
15
format = "dotenv";
12
16
sopsFile = ../../secrets/cloudflare-api.env;
13
17
};
18
+
secrets.pds-s3 = {
19
+
format = "dotenv";
20
+
sopsFile = ../../secrets/pds-backup-s3.env;
21
+
};
14
22
};
15
23
16
-
services.bluesky-pds = {
24
+
services.pds-backup = {
17
25
enable = true;
18
-
settings = {
26
+
pdsSecretsFile = config.sops.secrets.pds.path;
27
+
s3CredentialsFile = config.sops.secrets.pds-s3.path;
28
+
29
+
pdsSettings = {
19
30
PDS_HOSTNAME = "0xf.fr";
20
31
};
21
-
environmentFiles = [ config.sops.secrets.pds.path ];
22
32
};
23
33
24
34
services.acme-nginx = {