nixpkgs mirror (for testing)
github.com/NixOS/nixpkgs
nix
1#! /usr/bin/env nix-shell
2#! nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ packaging rich ])" -p pyright ruff isort nixfmt
3#
4# This script downloads Home Assistant's source tarball.
5# Inside the homeassistant/components directory, each integration has an associated manifest.json,
6# specifying required packages and other integrations it depends on:
7#
8# {
9# "requirements": [ "package==1.2.3" ],
10# "dependencies": [ "component" ]
11# }
12#
13# By parsing the files, a dictionary mapping integrations to requirements and dependencies is created.
14# For all of these requirements and the dependencies' requirements,
15# nixpkgs' python3Packages are searched for appropriate names.
16# Then, a Nix attribute set mapping integration name to dependencies is created.
17
18import json
19import os
20import pathlib
21import re
22import subprocess
23import sys
24import tarfile
25import tempfile
26from functools import reduce
27from io import BytesIO
28from typing import Any, Dict, List, Optional, Set
29from urllib.request import urlopen
30
31from packaging import version as Version
32from packaging.version import InvalidVersion
33from rich.console import Console
34from rich.table import Table
35
36COMPONENT_PREFIX = "homeassistant.components"
37PKG_SET = "home-assistant.python.pkgs"
38
39# If some requirements are matched by multiple or no Python packages, the
40# following can be used to choose the correct one
41PKG_PREFERENCES = {
42 "av": "av",
43 "fiblary3": "fiblary3-fork", # https://github.com/home-assistant/core/issues/66466
44 "HAP-python": "hap-python",
45 "ha-av": "av",
46 "numpy": "numpy",
47 "ollama-hass": "ollama",
48 "paho-mqtt": "paho-mqtt",
49 "sentry-sdk": "sentry-sdk",
50 "slackclient": "slack-sdk",
51 "SQLAlchemy": "sqlalchemy",
52 "tensorflow": "tensorflow",
53 "yt-dlp": "yt-dlp",
54}
55
56# Some dependencies are loaded dynamically at runtime, and are not
57# mentioned in the manifest files.
58EXTRA_COMPONENT_DEPS = {
59 "conversation": [
60 "intent"
61 ],
62 "default_config": [
63 "backup",
64 ],
65}
66
67# Sometimes we have unstable versions for libraries that are not
68# well-maintained. This allows us to mark our weird version as newer
69# than a certain wanted version
70OUR_VERSION_IS_NEWER_THAN = {
71 "blinkstick": "1.2.0",
72 "gps3": "0.33.3",
73 "proxmoxer": "2.2.0",
74 "pybluez": "0.22",
75 "pyps4-2ndscreen": "1.3.1",
76}
77
78
79
80def run_sync(cmd: List[str]) -> None:
81 print(f"$ {' '.join(cmd)}")
82 process = subprocess.run(cmd)
83
84 if process.returncode != 0:
85 sys.exit(1)
86
87
88def get_version() -> str:
89 with open(os.path.dirname(sys.argv[0]) + "/default.nix") as f:
90 # A version consists of digits, dots, and possibly a "b" (for beta)
91 if match := re.search('hassVersion = "([\\d\\.b]+)";', f.read()):
92 return match.group(1)
93 raise RuntimeError("hassVersion not in default.nix")
94
95
96def parse_components(version: str = "master"):
97 components = {}
98 components_with_tests = []
99 with tempfile.TemporaryDirectory() as tmp:
100 with urlopen(
101 f"https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz"
102 ) as response:
103 tarfile.open(fileobj=BytesIO(response.read())).extractall(tmp, filter="data")
104 # Use part of a script from the Home Assistant codebase
105 core_path = os.path.join(tmp, f"core-{version}")
106
107 for entry in os.scandir(os.path.join(core_path, "tests/components")):
108 if entry.is_dir():
109 components_with_tests.append(entry.name)
110
111 sys.path.append(core_path)
112 from script.hassfest.model import Config, Integration # type: ignore
113 config = Config(
114 root=pathlib.Path(core_path),
115 specific_integrations=None,
116 action="generate",
117 requirements=False,
118 )
119 integrations = Integration.load_dir(config.core_integrations_path, config)
120 for domain in sorted(integrations):
121 integration = integrations[domain]
122 if extra_deps := EXTRA_COMPONENT_DEPS.get(integration.domain):
123 integration.dependencies.extend(extra_deps)
124 if not integration.disabled:
125 components[domain] = integration.manifest
126
127 return components, components_with_tests
128
129
130# Recursively get the requirements of a component and its dependencies
131def get_reqs(components: Dict[str, Dict[str, Any]], component: str, processed: Set[str]) -> Set[str]:
132 requirements = set(components[component].get("requirements", []))
133 deps = components[component].get("dependencies", [])
134 deps.extend(components[component].get("after_dependencies", []))
135 processed.add(component)
136 for dependency in deps:
137 if dependency not in processed:
138 requirements.update(get_reqs(components, dependency, processed))
139 return requirements
140
141
142def repository_root() -> str:
143 return os.path.abspath(sys.argv[0] + "/../../../..")
144
145
146# For a package attribute and and an extra, check if the package exposes it via passthru.optional-dependencies
147def has_extra(package: str, extra: str):
148 cmd = [
149 "nix-instantiate",
150 repository_root(),
151 "-A",
152 f"{package}.optional-dependencies.{extra}",
153 ]
154 try:
155 subprocess.run(
156 cmd,
157 check=True,
158 stdout=subprocess.DEVNULL,
159 stderr=subprocess.DEVNULL,
160 )
161 except subprocess.CalledProcessError:
162 return False
163 return True
164
165
166def dump_packages() -> Dict[str, Dict[str, str]]:
167 # Store a JSON dump of Nixpkgs' python3Packages
168 output = subprocess.check_output(
169 [
170 "nix-env",
171 "-f",
172 repository_root(),
173 "-qa",
174 "-A",
175 PKG_SET,
176 "--arg", "config", "{ allowAliases = false; }",
177 "--json",
178 ]
179 )
180 return json.loads(output)
181
182
183def name_to_attr_path(req: str, packages: Dict[str, Dict[str, str]]) -> Optional[str]:
184 if req in PKG_PREFERENCES:
185 return f"{PKG_SET}.{PKG_PREFERENCES[req]}"
186 attr_paths = []
187 names = [req]
188 # E.g. python-mpd2 is actually called python3.6-mpd2
189 # instead of python-3.6-python-mpd2 inside Nixpkgs
190 if req.startswith("python-") or req.startswith("python_"):
191 names.append(req[len("python-") :])
192 for name in names:
193 # treat "-" and "_" equally
194 name = re.sub("[-_]", "[-_]", name)
195 # python(minor).(major)-(pname)-(version or unstable-date)
196 # we need the version qualifier, or we'll have multiple matches
197 # (e.g. pyserial and pyserial-asyncio when looking for pyserial)
198 pattern = re.compile(f"^python\\d+\\.\\d+-{name}-(?:\\d|unstable-.*)", re.I)
199 for attr_path, package in packages.items():
200 if pattern.match(package["name"]):
201 attr_paths.append(attr_path)
202 # Let's hope there's only one derivation with a matching name
203 assert len(attr_paths) <= 1, f"{req} matches more than one derivation: {attr_paths}"
204 if attr_paths:
205 return attr_paths[0]
206 else:
207 return None
208
209
210def get_pkg_version(attr_path: str, packages: Dict[str, Dict[str, str]]) -> Optional[str]:
211 pkg = packages.get(attr_path, None)
212 if not pkg:
213 return None
214 return pkg["version"]
215
216
217def main() -> None:
218 packages = dump_packages()
219 version = get_version()
220 print("Generating component-packages.nix for version {}".format(version))
221 components, components_with_tests = parse_components(version=version)
222 build_inputs = {}
223 outdated = {}
224 for component in sorted(components.keys()):
225 attr_paths = []
226 extra_attrs = []
227 missing_reqs = []
228 reqs = sorted(get_reqs(components, component, set()))
229 for req in reqs:
230 # Some requirements are specified by url, e.g. https://example.org/foobar#xyz==1.0.0
231 # Therefore, if there's a "#" in the line, only take the part after it
232 req = req[req.find("#") + 1 :]
233 name, required_version = req.split("==", maxsplit=1)
234 # Strip conditions off version constraints e.g. "1.0; python<3.11"
235 required_version = required_version.split(";").pop(0)
236 # Split package name and extra requires
237 extras = []
238 if name.endswith("]"):
239 extras = name[name.find("[")+1:name.find("]")].split(",")
240 name = name[:name.find("[")]
241 attr_path = name_to_attr_path(name, packages)
242 if attr_path:
243 if our_version := get_pkg_version(attr_path, packages):
244 attr_name = attr_path.split(".")[-1]
245 attr_outdated = False
246 try:
247 Version.parse(our_version)
248 except InvalidVersion:
249 print(f"Attribute {attr_name} has invalid version specifier {our_version}", file=sys.stderr)
250
251 # allow specifying that our unstable version is newer than some version
252 if newer_than_version := OUR_VERSION_IS_NEWER_THAN.get(attr_name):
253 attr_outdated = Version.parse(newer_than_version) < Version.parse(required_version)
254 else:
255 attr_outdated = True
256 else:
257 attr_outdated = Version.parse(our_version) < Version.parse(required_version)
258 finally:
259 if attr_outdated:
260 outdated[attr_name] = {
261 'wanted': required_version,
262 'current': our_version
263 }
264 if attr_path is not None:
265 # Add attribute path without "python3Packages." prefix
266 pname = attr_path[len(PKG_SET + "."):]
267 attr_paths.append(pname)
268 for extra in extras:
269 # Check if package advertises extra requirements
270 extra_attr = f"{pname}.optional-dependencies.{extra}"
271 if has_extra(attr_path, extra):
272 extra_attrs.append(extra_attr)
273 else:
274 missing_reqs.append(extra_attr)
275
276 else:
277 missing_reqs.append(name)
278 else:
279 build_inputs[component] = (attr_paths, extra_attrs, missing_reqs)
280
281 outpath = os.path.dirname(sys.argv[0]) + "/component-packages.nix"
282 with open(outpath, "w") as f:
283 f.write("# Generated by update-component-packages.py\n")
284 f.write("# Do not edit!\n\n")
285 f.write("{\n")
286 f.write(f' version = "{version}";\n')
287 f.write(" components = {\n")
288 for component, deps in build_inputs.items():
289 available, extras, missing = deps
290 f.write(f' "{component}" = ps: with ps; [')
291 if available:
292 f.write("\n " + "\n ".join(sorted(available)))
293 f.write("\n ]")
294 if extras:
295 f.write("\n ++ " + "\n ++ ".join(sorted(extras)))
296 f.write(";")
297 if len(missing) > 0:
298 f.write(f" # missing inputs: {' '.join(sorted(missing))}")
299 f.write("\n")
300 f.write(" };\n")
301 f.write(" # components listed in tests/components for which all dependencies are packaged\n")
302 f.write(" supportedComponentsWithTests = [\n")
303 for component, deps in build_inputs.items():
304 available, extras, missing = deps
305 if len(missing) == 0 and component in components_with_tests:
306 f.write(f' "{component}"' + "\n")
307 f.write(" ];\n")
308 f.write("}\n")
309
310 run_sync(["nixfmt", outpath])
311
312 supported_components = reduce(lambda n, c: n + (build_inputs[c][2] == []),
313 components.keys(), 0)
314 total_components = len(components)
315 print(f"{supported_components} / {total_components} components supported, "
316 f"i.e. {supported_components / total_components:.2%}")
317
318 if outdated:
319 table = Table(title="Outdated dependencies")
320 table.add_column("Package")
321 table.add_column("Current")
322 table.add_column("Wanted")
323 for package, version in sorted(outdated.items()):
324 table.add_row(package, version['current'], version['wanted'])
325
326 console = Console()
327 console.print(table)
328
329
330if __name__ == "__main__":
331 run_sync(["pyright", __file__])
332 run_sync(["ruff", "check", "--ignore=E501", __file__])
333 run_sync(["isort", __file__])
334 main()