···131132- [transfer-sh](https://github.com/dutchcoders/transfer.sh), a tool that supports easy and fast file sharing from the command-line. Available as [services.transfer-sh](#opt-services.transfer-sh.enable).
13300134- [MollySocket](https://github.com/mollyim/mollysocket) which allows getting Signal notifications via UnifiedPush.
135136- [Suwayomi Server](https://github.com/Suwayomi/Suwayomi-Server), a free and open source manga reader server that runs extensions built for [Tachiyomi](https://tachiyomi.org). Available as [services.suwayomi-server](#opt-services.suwayomi-server.enable).
137138- [ping_exporter](https://github.com/czerwonk/ping_exporter), a Prometheus exporter for ICMP echo requests. Available as [services.prometheus.exporters.ping](#opt-services.prometheus.exporters.ping.enable).
00139140- [TigerBeetle](https://tigerbeetle.com/), a distributed financial accounting database designed for mission critical safety and performance. Available as [services.tigerbeetle](#opt-services.tigerbeetle.enable).
141···282- `mkosi` was updated to v20. Parts of the user interface have changed. Consult the
283 release notes of [v19](https://github.com/systemd/mkosi/releases/tag/v19) and
284 [v20](https://github.com/systemd/mkosi/releases/tag/v20) for a list of changes.
00285286- The `services.vikunja` systemd service now uses `vikunja` as dynamic user instead of `vikunja-api`. Database users might need to be changed.
287
···131132- [transfer-sh](https://github.com/dutchcoders/transfer.sh), a tool that supports easy and fast file sharing from the command-line. Available as [services.transfer-sh](#opt-services.transfer-sh.enable).
133134+- [FCast Receiver](https://fcast.org), an open-source alternative to Chromecast and AirPlay. Available as [programs.fcast-receiver](#opt-programs.fcast-receiver.enable).
135+136- [MollySocket](https://github.com/mollyim/mollysocket) which allows getting Signal notifications via UnifiedPush.
137138- [Suwayomi Server](https://github.com/Suwayomi/Suwayomi-Server), a free and open source manga reader server that runs extensions built for [Tachiyomi](https://tachiyomi.org). Available as [services.suwayomi-server](#opt-services.suwayomi-server.enable).
139140- [ping_exporter](https://github.com/czerwonk/ping_exporter), a Prometheus exporter for ICMP echo requests. Available as [services.prometheus.exporters.ping](#opt-services.prometheus.exporters.ping.enable).
141+142+- [Prometheus DNSSEC Exporter](https://github.com/chrj/prometheus-dnssec-exporter), check for validity and expiration in DNSSEC signatures and expose metrics for Prometheus. Available as [services.prometheus.exporters.dnssec](#opt-services.prometheus.exporters.dnssec.enable).
143144- [TigerBeetle](https://tigerbeetle.com/), a distributed financial accounting database designed for mission critical safety and performance. Available as [services.tigerbeetle](#opt-services.tigerbeetle.enable).
145···286- `mkosi` was updated to v20. Parts of the user interface have changed. Consult the
287 release notes of [v19](https://github.com/systemd/mkosi/releases/tag/v19) and
288 [v20](https://github.com/systemd/mkosi/releases/tag/v20) for a list of changes.
289+290+- `gonic` has been updated to v0.16.4. Config now requires `playlists-path` to be set. See the rest of the [v0.16.0 release notes](https://github.com/sentriz/gonic/releases/tag/v0.16.0) for more details.
291292- The `services.vikunja` systemd service now uses `vikunja` as dynamic user instead of `vikunja-api`. Database users might need to be changed.
293
+2-2
nixos/modules/config/shells-environment.nix
···42 strings. The latter is concatenated, interspersed with colon
43 characters.
44 '';
45- type = with types; attrsOf (oneOf [ (listOf str) str path ]);
46- apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else "${v}");
47 };
4849 environment.profiles = mkOption {
···42 strings. The latter is concatenated, interspersed with colon
43 characters.
44 '';
45+ type = with types; attrsOf (oneOf [ (listOf (oneOf [ float int str ])) float int str path ]);
46+ apply = mapAttrs (n: v: if isList v then concatMapStringsSep ":" toString v else toString v);
47 };
4849 environment.profiles = mkOption {
···227 '';
228 };
229000000000000000000000000000000000000000000000000230 # Access to WHOIS server is required to properly test this exporter, so
231 # just perform basic sanity check that the exporter is running and returns
232 # a failure.
···227 '';
228 };
229230+ dnssec = {
231+ exporterConfig = {
232+ enable = true;
233+ configuration = {
234+ records = [
235+ {
236+ zone = "example.com";
237+ record = "@";
238+ type = "SOA";
239+ }
240+ ];
241+ };
242+ resolvers = [ "127.0.0.1:53" ];
243+ };
244+ metricProvider = {
245+ services.knot = {
246+ enable = true;
247+ settingsFile = pkgs.writeText "knot.conf" ''
248+ server:
249+ listen: 127.0.0.1@53
250+ template:
251+ - id: default
252+ storage: ${pkgs.buildEnv {
253+ name = "zones";
254+ paths = [(pkgs.writeTextDir "example.com.zone" ''
255+ @ SOA ns1.example.com. noc.example.com. 2024032401 86400 7200 3600000 172800
256+ @ NS ns1
257+ ns1 A 192.168.0.1
258+ '')];
259+ }}
260+ zonefile-load: difference
261+ zonefile-sync: -1
262+ zone:
263+ - domain: example.com
264+ file: example.com.zone
265+ dnssec-signing: on
266+ '';
267+ };
268+ };
269+ exporterTest = ''
270+ wait_for_unit("knot.service")
271+ wait_for_open_port(53)
272+ wait_for_unit("prometheus-dnssec-exporter.service")
273+ wait_for_open_port(9204)
274+ succeed("curl -sSf http://localhost:9204/metrics | grep 'example.com'")
275+ '';
276+ };
277+278 # Access to WHOIS server is required to properly test this exporter, so
279 # just perform basic sanity check that the exporter is running and returns
280 # a failure.
···27}:
2829let
30- version = "1.17.1";
3132 # build stimuli file for PGO build and the script to generate it
33 # independently of the foot's build, so we can cache the result
···99 owner = "dnkl";
100 repo = "foot";
101 rev = version;
102- hash = "sha256-B6RhzsOPwczPLJRx3gBFZZvklwx9IwqplRG2vsAPIlg=";
103 };
104105 separateDebugInfo = true;
···27}:
2829let
30+ version = "1.17.2";
3132 # build stimuli file for PGO build and the script to generate it
33 # independently of the foot's build, so we can cache the result
···99 owner = "dnkl";
100 repo = "foot";
101 rev = version;
102+ hash = "sha256-p+qaWHBrUn6YpNyAmQf6XoQyO3degHP5oMN53/9gIr4=";
103 };
104105 separateDebugInfo = true;
···1-#!/usr/bin/env nix-shell
2-#! nix-shell -i "python3 -I" -p python3
3-4-from contextlib import contextmanager
5-from pathlib import Path
6-from typing import Iterable, Optional
7-from urllib import request
8-9-import hashlib, json
10-11-12-def getMetadata(apiKey: str, family: str = "Noto Emoji"):
13- '''Fetch the Google Fonts metadata for a given family.
14-15- An API key can be obtained by anyone with a Google account (🚮) from
16- `https://developers.google.com/fonts/docs/developer_api#APIKey`
17- '''
18- from urllib.parse import urlencode
19-20- with request.urlopen(
21- "https://www.googleapis.com/webfonts/v1/webfonts?" +
22- urlencode({ 'key': apiKey, 'family': family })
23- ) as req:
24- return json.load(req)
25-26-def getUrls(metadata) -> Iterable[str]:
27- '''Fetch all files' URLs from Google Fonts' metadata.
28-29- The metadata must obey the API v1 schema, and can be obtained from:
30- https://www.googleapis.com/webfonts/v1/webfonts?key=${GOOGLE_FONTS_TOKEN}&family=${FAMILY}
31- '''
32- return ( url for i in metadata['items'] for _, url in i['files'].items() )
33-34-35-def hashUrl(url: str, *, hash: str = 'sha256'):
36- '''Compute the hash of the data from HTTP GETing a given `url`.
37-38- The `hash` must be an algorithm name `hashlib.new` accepts.
39- '''
40- with request.urlopen(url) as req:
41- return hashlib.new(hash, req.read())
42-43-44-def sriEncode(h) -> str:
45- '''Encode a hash in the SRI format.
46-47- Takes a `hashlib` object, and produces a string that
48- nixpkgs' `fetchurl` accepts as `hash` parameter.
49- '''
50- from base64 import b64encode
51- return f"{h.name}-{b64encode(h.digest()).decode()}"
52-53-def validateSRI(sri: Optional[str]) -> Optional[str]:
54- '''Decode an SRI hash, return `None` if invalid.
55-56- This is not a full SRI hash parser, hash options aren't supported.
57- '''
58- from base64 import b64decode
59-60- if sri is None:
61- return None
62-63- try:
64- hashName, b64 = sri.split('-', 1)
65-66- h = hashlib.new(hashName)
67- digest = b64decode(b64, validate=True)
68- assert len(digest) == h.digest_size
69-70- except:
71- return None
72- else:
73- return sri
74-75-76-def hashUrls(
77- urls: Iterable[str],
78- knownHashes: dict[str, str] = {},
79-) -> dict[str, str]:
80- '''Generate a `dict` mapping URLs to SRI-encoded hashes.
81-82- The `knownHashes` optional parameter can be used to avoid
83- re-downloading files whose URL have not changed.
84- '''
85- return {
86- url: validateSRI(knownHashes.get(url)) or sriEncode(hashUrl(url))
87- for url in urls
88- }
89-90-91-@contextmanager
92-def atomicFileUpdate(target: Path):
93- '''Atomically replace the contents of a file.
94-95- Yields an open file to write into; upon exiting the context,
96- the file is closed and (atomically) replaces the `target`.
97-98- Guarantees that the `target` was either successfully overwritten
99- with new content and no exception was raised, or the temporary
100- file was cleaned up.
101- '''
102- from tempfile import mkstemp
103- fd, _p = mkstemp(
104- dir = target.parent,
105- prefix = target.name,
106- )
107- tmpPath = Path(_p)
108-109- try:
110- with open(fd, 'w') as f:
111- yield f
112-113- tmpPath.replace(target)
114-115- except Exception:
116- tmpPath.unlink(missing_ok = True)
117- raise
118-119-120-if __name__ == "__main__":
121- from os import environ
122- from urllib.error import HTTPError
123-124- environVar = 'GOOGLE_FONTS_TOKEN'
125- currentDir = Path(__file__).parent
126- metadataPath = currentDir / 'noto-emoji.json'
127-128- try:
129- apiToken = environ[environVar]
130- metadata = getMetadata(apiToken)
131-132- except (KeyError, HTTPError) as exn:
133- # No API key in the environment, or the query was rejected.
134- match exn:
135- case KeyError if exn.args[0] == environVar:
136- print(f"No '{environVar}' in the environment, "
137- "skipping metadata update")
138-139- case HTTPError if exn.getcode() == 403:
140- print("Got HTTP 403 (Forbidden)")
141- if apiToken != '':
142- print("Your Google API key appears to be valid "
143- "but does not grant access to the fonts API.")
144- print("Aborting!")
145- raise SystemExit(1)
146-147- case HTTPError if exn.getcode() == 400:
148- # Printing the supposed token should be fine, as this is
149- # what the API returns on invalid tokens.
150- print(f"Got HTTP 400 (Bad Request), is this really an API token: '{apiToken}' ?")
151- case _:
152- # Unknown error, let's bubble it up
153- raise
154-155- # In that case just use the existing metadata
156- with metadataPath.open() as metadataFile:
157- metadata = json.load(metadataFile)
158-159- lastModified = metadata["items"][0]["lastModified"];
160- print(f"Using metadata from file, last modified {lastModified}")
161-162- else:
163- # If metadata was successfully fetched, validate and persist it
164- lastModified = metadata["items"][0]["lastModified"];
165- print(f"Fetched current metadata, last modified {lastModified}")
166- with atomicFileUpdate(metadataPath) as metadataFile:
167- json.dump(metadata, metadataFile, indent = 2)
168- metadataFile.write("\n") # Pacify nixpkgs' dumb editor config check
169-170- hashPath = currentDir / 'noto-emoji.hashes.json'
171- try:
172- with hashPath.open() as hashFile:
173- hashes = json.load(hashFile)
174- except FileNotFoundError:
175- hashes = {}
176-177- with atomicFileUpdate(hashPath) as hashFile:
178- json.dump(
179- hashUrls(getUrls(metadata), knownHashes = hashes),
180- hashFile,
181- indent = 2,
182- )
183- hashFile.write("\n") # Pacify nixpkgs' dumb editor config check