1#! /usr/bin/env nix-shell
2#! nix-shell -i python -p python3.pkgs.joblib python3.pkgs.click python3.pkgs.click-log nix nix-prefetch-git nix-universal-prefetch prefetch-yarn-deps prefetch-npm-deps
3"""
4electron updater
5
6A script for updating both binary and source hashes.
7
8It supports the following modes:
9
10| Mode | Description |
11|------------- | ----------------------------------------------- |
12| `update` | for updating a specific Electron release |
13| `update-all` | for updating all electron releases at once |
14| `eval` | just print the necessary sources to fetch |
15
16The `eval` and `update` commands accept an optional `--version` flag
17to restrict the mechanism only to a given major release.
18
19The `update` and `update-all` commands accept an optional `--commit`
20flag to automatically commit the changes for you.
21
22The `update` and `update-all` commands accept optional `--bin-only`
23and `--source-only` flags to restict the update to binary or source
24releases.
25"""
26import base64
27import csv
28import json
29import logging
30import os
31import random
32import re
33import subprocess
34import sys
35import tempfile
36import traceback
37import urllib.request
38
39from abc import ABC
40from codecs import iterdecode
41from datetime import datetime
42from typing import Iterable, Optional, Tuple
43from urllib.request import urlopen
44
45import click
46import click_log
47
48from joblib import Parallel, delayed, Memory
49
50depot_tools_checkout = tempfile.TemporaryDirectory()
51subprocess.check_call(
52 [
53 "nix-prefetch-git",
54 "--builder",
55 "--quiet",
56 "--url",
57 "https://chromium.googlesource.com/chromium/tools/depot_tools",
58 "--out",
59 depot_tools_checkout.name,
60 "--rev",
61 "7a69b031d58081d51c9e8e89557b343bba8518b1",
62 ]
63)
64sys.path.append(depot_tools_checkout.name)
65
66import gclient_eval
67import gclient_utils
68
69
70# Relative path to the electron-source info.json
71SOURCE_INFO_JSON = "info.json"
72
73# Relatice path to the electron-bin info.json
74BINARY_INFO_JSON = "binary/info.json"
75
76# Number of spaces used for each indentation level
77JSON_INDENT = 4
78
79os.chdir(os.path.dirname(__file__))
80
81memory: Memory = Memory("cache", verbose=0)
82
83logger = logging.getLogger(__name__)
84click_log.basic_config(logger)
85
86
87class Repo:
88 fetcher: str
89 args: dict
90
91 def __init__(self) -> None:
92 self.deps: dict = {}
93 self.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
94
95 def get_deps(self, repo_vars: dict, path: str) -> None:
96 print(
97 "evaluating " + json.dumps(self, default=vars, sort_keys=True),
98 file=sys.stderr,
99 )
100
101 deps_file = self.get_file("DEPS")
102 evaluated = gclient_eval.Parse(deps_file, filename="DEPS")
103
104 repo_vars = dict(evaluated["vars"]) | repo_vars
105
106 prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
107
108 self.deps = {
109 prefix + dep_name: repo_from_dep(dep)
110 for dep_name, dep in evaluated["deps"].items()
111 if (
112 gclient_eval.EvaluateCondition(dep["condition"], repo_vars)
113 if "condition" in dep
114 else True
115 )
116 and repo_from_dep(dep) != None
117 }
118
119 for key in evaluated.get("recursedeps", []):
120 dep_path = prefix + key
121 if dep_path in self.deps and dep_path != "src/third_party/squirrel.mac":
122 self.deps[dep_path].get_deps(repo_vars, dep_path)
123
124 def prefetch(self) -> None:
125 self.hash = get_repo_hash(self.fetcher, self.args)
126
127 def prefetch_all(self) -> int:
128 return sum(
129 [dep.prefetch_all() for [_, dep] in self.deps.items()],
130 [delayed(self.prefetch)()],
131 )
132
133 def flatten_repr(self) -> dict:
134 return {"fetcher": self.fetcher, "hash": self.hash, **self.args}
135
136 def flatten(self, path: str) -> dict:
137 out = {path: self.flatten_repr()}
138 for dep_path, dep in self.deps.items():
139 out |= dep.flatten(dep_path)
140 return out
141
142 def get_file(self, filepath: str) -> str:
143 raise NotImplementedError
144
145
146class GitRepo(Repo):
147 def __init__(self, url: str, rev: str) -> None:
148 super().__init__()
149 self.fetcher = "fetchgit"
150 self.args = {
151 "url": url,
152 "rev": rev,
153 }
154
155
156class GitHubRepo(Repo):
157 def __init__(self, owner: str, repo: str, rev: str) -> None:
158 super().__init__()
159 self.fetcher = "fetchFromGitHub"
160 self.args = {
161 "owner": owner,
162 "repo": repo,
163 "rev": rev,
164 }
165
166 def get_file(self, filepath: str) -> str:
167 return (
168 urlopen(
169 f"https://raw.githubusercontent.com/{self.args['owner']}/{self.args['repo']}/{self.args['rev']}/{filepath}"
170 )
171 .read()
172 .decode("utf-8")
173 )
174
175
176class GitilesRepo(Repo):
177 def __init__(self, url: str, rev: str) -> None:
178 super().__init__()
179 self.fetcher = "fetchFromGitiles"
180 # self.fetcher = 'fetchgit'
181 self.args = {
182 "url": url,
183 "rev": rev,
184 # "fetchSubmodules": "false",
185 }
186
187 if url == "https://chromium.googlesource.com/chromium/src.git":
188 self.args["postFetch"] = "rm -r $out/third_party/blink/web_tests; "
189 self.args["postFetch"] += "rm -r $out/third_party/hunspell/tests; "
190 self.args["postFetch"] += "rm -r $out/content/test/data; "
191 self.args["postFetch"] += "rm -r $out/courgette/testdata; "
192 self.args["postFetch"] += "rm -r $out/extensions/test/data; "
193 self.args["postFetch"] += "rm -r $out/media/test/data; "
194
195 def get_file(self, filepath: str) -> str:
196 return base64.b64decode(
197 urlopen(
198 f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT"
199 ).read()
200 ).decode("utf-8")
201
202
203class ElectronBinRepo(GitHubRepo):
204 def __init__(self, owner: str, repo: str, rev: str) -> None:
205 super().__init__(owner, repo, rev)
206 self.systems = {
207 "i686-linux": "linux-ia32",
208 "x86_64-linux": "linux-x64",
209 "armv7l-linux": "linux-armv7l",
210 "aarch64-linux": "linux-arm64",
211 "x86_64-darwin": "darwin-x64",
212 "aarch64-darwin": "darwin-arm64",
213 }
214
215 def get_shasums256(self, version: str) -> list:
216 """Returns the contents of SHASUMS256.txt"""
217 try:
218 called_process: subprocess.CompletedProcess = subprocess.run(
219 [
220 "nix-prefetch-url",
221 "--print-path",
222 f"https://github.com/electron/electron/releases/download/v{version}/SHASUMS256.txt",
223 ],
224 capture_output=True,
225 check=True,
226 text=True,
227 )
228
229 hash_file_path = called_process.stdout.split("\n")[1]
230
231 with open(hash_file_path, "r") as f:
232 return f.read().split("\n")
233
234 except subprocess.CalledProcessError as err:
235 print(err.stderr)
236 sys.exit(1)
237
238 def get_headers(self, version: str) -> str:
239 """Returns the hash of the release headers tarball"""
240 try:
241 called_process: subprocess.CompletedProcess = subprocess.run(
242 [
243 "nix-prefetch-url",
244 f"https://artifacts.electronjs.org/headers/dist/v{version}/node-v{version}-headers.tar.gz",
245 ],
246 capture_output=True,
247 check=True,
248 text=True,
249 )
250 return called_process.stdout.split("\n")[0]
251 except subprocess.CalledProcessError as err:
252 print(err.stderr)
253 sys.exit(1)
254
255 def get_hashes(self, major_version: str) -> dict:
256 """Returns a dictionary of hashes for a given major version"""
257 m, _ = get_latest_version(major_version)
258 version: str = m["version"]
259
260 out = {}
261 out[major_version] = {
262 "hashes": {},
263 "version": version,
264 }
265
266 hashes: list = self.get_shasums256(version)
267
268 for nix_system, electron_system in self.systems.items():
269 filename = f"*electron-v{version}-{electron_system}.zip"
270 if any([x.endswith(filename) for x in hashes]):
271 out[major_version]["hashes"][nix_system] = [
272 x.split(" ")[0] for x in hashes if x.endswith(filename)
273 ][0]
274 out[major_version]["hashes"]["headers"] = self.get_headers(version)
275
276 return out
277
278
279# Releases that have reached end-of-life no longer receive any updates
280# and it is rather pointless trying to update those.
281#
282# https://endoflife.date/electron
283@memory.cache
284def supported_version_range() -> range:
285 """Returns a range of electron releases that have not reached end-of-life yet"""
286 releases_json = json.loads(
287 urlopen("https://endoflife.date/api/electron.json").read()
288 )
289 supported_releases = [
290 int(x["cycle"])
291 for x in releases_json
292 if x["eol"] == False
293 or datetime.strptime(x["eol"], "%Y-%m-%d") > datetime.today()
294 ]
295
296 return range(
297 min(supported_releases), # incl.
298 # We have also packaged the beta release in nixpkgs,
299 # but it is not tracked by endoflife.date
300 max(supported_releases) + 2, # excl.
301 1,
302 )
303
304
305@memory.cache
306def get_repo_hash(fetcher: str, args: dict) -> str:
307 cmd = ["nix-universal-prefetch", fetcher]
308 for arg_name, arg in args.items():
309 cmd.append(f"--{arg_name}")
310 cmd.append(arg)
311
312 print(" ".join(cmd), file=sys.stderr)
313 out = subprocess.check_output(cmd)
314 return out.decode("utf-8").strip()
315
316
317@memory.cache
318def _get_yarn_hash(path: str) -> str:
319 print(f"prefetch-yarn-deps", file=sys.stderr)
320 with tempfile.TemporaryDirectory() as tmp_dir:
321 with open(tmp_dir + "/yarn.lock", "w") as f:
322 f.write(path)
323 return (
324 subprocess.check_output(["prefetch-yarn-deps", tmp_dir + "/yarn.lock"])
325 .decode("utf-8")
326 .strip()
327 )
328
329
330def get_yarn_hash(repo: Repo, yarn_lock_path: str = "yarn.lock") -> str:
331 return _get_yarn_hash(repo.get_file(yarn_lock_path))
332
333
334@memory.cache
335def _get_npm_hash(filename: str) -> str:
336 print(f"prefetch-npm-deps", file=sys.stderr)
337 with tempfile.TemporaryDirectory() as tmp_dir:
338 with open(tmp_dir + "/package-lock.json", "w") as f:
339 f.write(filename)
340 return (
341 subprocess.check_output(
342 ["prefetch-npm-deps", tmp_dir + "/package-lock.json"]
343 )
344 .decode("utf-8")
345 .strip()
346 )
347
348
349def get_npm_hash(repo: Repo, package_lock_path: str = "package-lock.json") -> str:
350 return _get_npm_hash(repo.get_file(package_lock_path))
351
352
353def repo_from_dep(dep: dict) -> Optional[Repo]:
354 if "url" in dep:
355 url, rev = gclient_utils.SplitUrlRevision(dep["url"])
356
357 search_object = re.search(r"https://github.com/(.+)/(.+?)(\.git)?$", url)
358 if search_object:
359 return GitHubRepo(search_object.group(1), search_object.group(2), rev)
360
361 if re.match(r"https://.+.googlesource.com", url):
362 return GitilesRepo(url, rev)
363
364 return GitRepo(url, rev)
365 else:
366 # Not a git dependency; skip
367 return None
368
369
370def get_gn_source(repo: Repo) -> dict:
371 gn_pattern = r"'gn_version': 'git_revision:([0-9a-f]{40})'"
372 gn_commit = re.search(gn_pattern, repo.get_file("DEPS")).group(1)
373 gn_prefetch: bytes = subprocess.check_output(
374 [
375 "nix-prefetch-git",
376 "--quiet",
377 "https://gn.googlesource.com/gn",
378 "--rev",
379 gn_commit,
380 ]
381 )
382 gn: dict = json.loads(gn_prefetch)
383 return {
384 "gn": {
385 "version": datetime.fromisoformat(gn["date"]).date().isoformat(),
386 "url": gn["url"],
387 "rev": gn["rev"],
388 "hash": gn["hash"],
389 }
390 }
391
392
393def get_latest_version(major_version: str) -> Tuple[str, str]:
394 """Returns the latest version for a given major version"""
395 electron_releases: dict = json.loads(
396 urlopen("https://releases.electronjs.org/releases.json").read()
397 )
398 major_version_releases = filter(
399 lambda item: item["version"].startswith(f"{major_version}."), electron_releases
400 )
401 m = max(major_version_releases, key=lambda item: item["date"])
402
403 rev = f"v{m['version']}"
404 return (m, rev)
405
406
407def get_electron_bin_info(major_version: str) -> Tuple[str, str, ElectronBinRepo]:
408 m, rev = get_latest_version(major_version)
409
410 electron_repo: ElectronBinRepo = ElectronBinRepo("electron", "electron", rev)
411 return (major_version, m, electron_repo)
412
413
414def get_electron_info(major_version: str) -> Tuple[str, str, GitHubRepo]:
415 m, rev = get_latest_version(major_version)
416
417 electron_repo: GitHubRepo = GitHubRepo("electron", "electron", rev)
418 electron_repo.get_deps(
419 {
420 f"checkout_{platform}": platform == "linux"
421 for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
422 },
423 "src/electron",
424 )
425
426 return (major_version, m, electron_repo)
427
428
429def get_update(repo: Tuple[str, str, Repo]) -> Tuple[str, dict]:
430 (major_version, m, electron_repo) = repo
431
432 tasks = electron_repo.prefetch_all()
433 a = lambda: (("electron_yarn_hash", get_yarn_hash(electron_repo)))
434 tasks.append(delayed(a)())
435 a = lambda: (
436 (
437 "chromium_npm_hash",
438 get_npm_hash(
439 electron_repo.deps["src"], "third_party/node/package-lock.json"
440 ),
441 )
442 )
443 tasks.append(delayed(a)())
444 random.shuffle(tasks)
445
446 task_results = {
447 n[0]: n[1]
448 for n in Parallel(n_jobs=3, require="sharedmem", return_as="generator")(tasks)
449 if n != None
450 }
451
452 tree = electron_repo.flatten("src/electron")
453
454 return (
455 f"{major_version}",
456 {
457 "deps": tree,
458 **{key: m[key] for key in ["version", "modules", "chrome", "node"]},
459 "chromium": {
460 "version": m["chrome"],
461 "deps": get_gn_source(electron_repo.deps["src"]),
462 },
463 **task_results,
464 },
465 )
466
467
468def load_info_json(path: str) -> dict:
469 """Load the contents of a JSON file
470
471 Args:
472 path: The path to the JSON file
473
474 Returns: An empty dict if the path does not exist, otherwise the contents of the JSON file.
475 """
476 try:
477 with open(path, "r") as f:
478 return json.loads(f.read())
479 except:
480 return {}
481
482
483def save_info_json(path: str, content: dict) -> None:
484 """Saves the given info to a JSON file
485
486 Args:
487 path: The path where the info should be saved
488 content: The content to be saved as JSON.
489 """
490 with open(path, "w") as f:
491 f.write(json.dumps(content, indent=JSON_INDENT, default=vars, sort_keys=True))
492 f.write("\n")
493
494
495def update_bin(major_version: str, commit: bool) -> None:
496 """Update a given electron-bin release
497
498 Args:
499 major_version: The major version number, e.g. '27'
500 commit: Whether the updater should commit the result
501 """
502 package_name = f"electron_{major_version}-bin"
503 print(f"Updating {package_name}")
504
505 electron_bin_info = get_electron_bin_info(major_version)
506 (_major_version, _version, repo) = electron_bin_info
507
508 old_info = load_info_json(BINARY_INFO_JSON)
509 new_info = repo.get_hashes(major_version)
510
511 out = old_info | new_info
512
513 save_info_json(BINARY_INFO_JSON, out)
514
515 old_version = (
516 old_info[major_version]["version"] if major_version in old_info else None
517 )
518 new_version = new_info[major_version]["version"]
519 if old_version == new_version:
520 print(f"{package_name} is up-to-date")
521 elif commit:
522 commit_result(package_name, old_version, new_version, BINARY_INFO_JSON)
523
524
525def update_source(major_version: str, commit: bool) -> None:
526 """Update a given electron-source release
527
528 Args:
529 major_version: The major version number, e.g. '27'
530 commit: Whether the updater should commit the result
531 """
532 package_name = f"electron-source.electron_{major_version}"
533 print(f"Updating electron-source.electron_{major_version}")
534
535 old_info = load_info_json(SOURCE_INFO_JSON)
536 old_version = (
537 old_info[str(major_version)]["version"]
538 if str(major_version) in old_info
539 else None
540 )
541
542 electron_source_info = get_electron_info(major_version)
543 new_info = get_update(electron_source_info)
544 out = old_info | {new_info[0]: new_info[1]}
545
546 save_info_json(SOURCE_INFO_JSON, out)
547
548 new_version = new_info[1]["version"]
549 if old_version == new_version:
550 print(f"{package_name} is up-to-date")
551 elif commit:
552 commit_result(package_name, old_version, new_version, SOURCE_INFO_JSON)
553
554
555def non_eol_releases(releases: Iterable[int]) -> Iterable[int]:
556 """Returns a list of releases that have not reached end-of-life yet."""
557 return tuple(filter(lambda x: x in supported_version_range(), releases))
558
559
560def update_all_source(commit: bool) -> None:
561 """Update all eletron-source releases at once
562
563 Args:
564 commit: Whether to commit the result
565 """
566 old_info = load_info_json(SOURCE_INFO_JSON)
567
568 filtered_releases = non_eol_releases(tuple(map(lambda x: int(x), old_info.keys())))
569
570 # This might take some time
571 repos = Parallel(n_jobs=2, require="sharedmem")(
572 delayed(get_electron_info)(major_version) for major_version in filtered_releases
573 )
574 new_info = {
575 n[0]: n[1]
576 for n in Parallel(n_jobs=2, require="sharedmem")(
577 delayed(get_update)(repo) for repo in repos
578 )
579 }
580
581 if commit:
582 for major_version in filtered_releases:
583 # Since the sources have been fetched at this point already,
584 # fetching them again will be much faster.
585 update_source(str(major_version), commit)
586 else:
587 out = old_info | {new_info[0]: new_info[1]}
588 save_info_json(SOURCE_INFO_JSON, out)
589
590
591def parse_cve_numbers(tag_name: str) -> Iterable[str]:
592 """Returns mentioned CVE numbers from a given release tag"""
593 cve_pattern = r"CVE-\d{4}-\d+"
594 url = f"https://api.github.com/repos/electron/electron/releases/tags/{tag_name}"
595 headers = {
596 "Accept": "application/vnd.github+json",
597 "X-GitHub-Api-Version": "2022-11-28",
598 }
599 request = urllib.request.Request(url=url, headers=headers)
600 release_note = ""
601 try:
602 with urlopen(request) as response:
603 release_note = json.loads(response.read().decode("utf-8"))["body"]
604 except:
605 print(
606 f"WARN: Fetching release note for {tag_name} from GitHub failed!",
607 file=sys.stderr,
608 )
609
610 return sorted(re.findall(cve_pattern, release_note))
611
612
613def commit_result(
614 package_name: str, old_version: Optional[str], new_version: str, path: str
615) -> None:
616 """Creates a git commit with a short description of the change
617
618 Args:
619 package_name: The package name, e.g. `electron-source.electron-{major_version}`
620 or `electron_{major_version}-bin`
621
622 old_version: Version number before the update.
623 Can be left empty when initializing a new release.
624
625 new_version: Version number after the update.
626
627 path: Path to the lockfile to be committed
628 """
629 assert (
630 isinstance(package_name, str) and len(package_name) > 0
631 ), "Argument `package_name` cannot be empty"
632 assert (
633 isinstance(new_version, str) and len(new_version) > 0
634 ), "Argument `new_version` cannot be empty"
635
636 if old_version != new_version:
637 major_version = new_version.split(".")[0]
638 cve_fixes_text = "\n".join(
639 list(
640 map(lambda cve: f"- Fixes {cve}", parse_cve_numbers(f"v{new_version}"))
641 )
642 )
643 init_msg = f"init at {new_version}"
644 update_msg = f"{old_version} -> {new_version}"
645 diff = f"- Diff: https://github.com/electron/electron/compare/refs/tags/v{old_version}...v{new_version}\n" if old_version != None else ""
646 commit_message = f"""{package_name}: {update_msg if old_version != None else init_msg}
647
648- Changelog: https://github.com/electron/electron/releases/tag/v{new_version}
649{diff}{cve_fixes_text}
650"""
651 subprocess.run(
652 [
653 "git",
654 "add",
655 path,
656 ]
657 )
658 subprocess.run(
659 [
660 "git",
661 "commit",
662 "-m",
663 commit_message,
664 ]
665 )
666
667
668@click.group()
669def cli() -> None:
670 """A script for updating electron-bin and electron-source hashes"""
671 pass
672
673
674@cli.command(
675 "eval", help="Print the necessary sources to fetch for a given major release"
676)
677@click.option("--version", help="The major version, e.g. '23'")
678def eval(version):
679 (_, _, repo) = electron_repo = get_electron_info(version)
680 tree = repo.flatten("src/electron")
681 print(json.dumps(tree, indent=JSON_INDENT, default=vars, sort_keys=True))
682
683
684@cli.command("update", help="Update a single major release")
685@click.option("-v", "--version", help="The major version, e.g. '23'")
686@click.option(
687 "-b",
688 "--bin-only",
689 is_flag=True,
690 default=False,
691 help="Only update electron-bin packages",
692)
693@click.option(
694 "-s",
695 "--source-only",
696 is_flag=True,
697 default=False,
698 help="Only update electron-source packages",
699)
700@click.option("-c", "--commit", is_flag=True, default=False, help="Commit the result")
701def update(version: str, bin_only: bool, source_only: bool, commit: bool) -> None:
702 assert isinstance(version, str) and len(version) > 0, "version must be non-empty"
703
704 if bin_only and source_only:
705 print(
706 "Error: Omit --bin-only and --source-only if you want to update both source and binary packages.",
707 file=sys.stderr,
708 )
709 sys.exit(1)
710
711 elif bin_only:
712 update_bin(version, commit)
713
714 elif source_only:
715 update_source(version, commit)
716
717 else:
718 update_bin(version, commit)
719 update_source(version, commit)
720
721
722@cli.command("update-all", help="Update all releases at once")
723@click.option(
724 "-b",
725 "--bin-only",
726 is_flag=True,
727 default=False,
728 help="Only update electron-bin packages",
729)
730@click.option(
731 "-s",
732 "--source-only",
733 is_flag=True,
734 default=False,
735 help="Only update electron-source packages",
736)
737@click.option("-c", "--commit", is_flag=True, default=False, help="Commit the result")
738def update_all(bin_only: bool, source_only: bool, commit: bool) -> None:
739 # Filter out releases that have reached end-of-life
740 filtered_bin_info = dict(
741 filter(
742 lambda entry: int(entry[0]) in supported_version_range(),
743 load_info_json(BINARY_INFO_JSON).items(),
744 )
745 )
746
747 if bin_only and source_only:
748 print(
749 "Error: omit --bin-only and --source-only if you want to update both source and binary packages.",
750 file=sys.stderr,
751 )
752 sys.exit(1)
753
754 elif bin_only:
755 for major_version, _ in filtered_bin_info.items():
756 update_bin(major_version, commit)
757
758 elif source_only:
759 update_all_source(commit)
760
761 else:
762 for major_version, _ in filtered_bin_info.items():
763 update_bin(major_version, commit)
764
765 update_all_source(commit)
766
767
768if __name__ == "__main__":
769 cli()