1#! /usr/bin/env nix-shell
2#! nix-shell -i python -p python3.pkgs.joblib python3.pkgs.click python3.pkgs.click-log nix nix-prefetch-git nix-universal-prefetch prefetch-yarn-deps prefetch-npm-deps
3
4import logging
5import click_log
6import click
7import random
8import traceback
9import csv
10import base64
11import os
12import re
13import tempfile
14import subprocess
15import json
16import sys
17from joblib import Parallel, delayed, Memory
18from codecs import iterdecode
19from datetime import datetime
20from urllib.request import urlopen
21
22os.chdir(os.path.dirname(__file__))
23
24depot_tools_checkout = tempfile.TemporaryDirectory()
25subprocess.check_call([
26 "nix-prefetch-git",
27 "--builder", "--quiet",
28 "--url", "https://chromium.googlesource.com/chromium/tools/depot_tools",
29 "--out", depot_tools_checkout.name,
30 "--rev", "7a69b031d58081d51c9e8e89557b343bba8518b1"])
31sys.path.append(depot_tools_checkout.name)
32
33import gclient_eval
34import gclient_utils
35
36memory = Memory("cache", verbose=0)
37
38@memory.cache
39def get_repo_hash(fetcher, args):
40 cmd = ['nix-universal-prefetch', fetcher]
41 for arg_name, arg in args.items():
42 cmd.append(f'--{arg_name}')
43 cmd.append(arg)
44
45 print(" ".join(cmd), file=sys.stderr)
46 out = subprocess.check_output(cmd)
47 return out.decode('utf-8').strip()
48
49@memory.cache
50def _get_yarn_hash(file):
51 print(f'prefetch-yarn-deps', file=sys.stderr)
52 with tempfile.TemporaryDirectory() as tmp_dir:
53 with open(tmp_dir + '/yarn.lock', 'w') as f:
54 f.write(file)
55 return subprocess.check_output(['prefetch-yarn-deps', tmp_dir + '/yarn.lock']).decode('utf-8').strip()
56def get_yarn_hash(repo, yarn_lock_path = 'yarn.lock'):
57 return _get_yarn_hash(repo.get_file(yarn_lock_path))
58
59@memory.cache
60def _get_npm_hash(file):
61 print(f'prefetch-npm-deps', file=sys.stderr)
62 with tempfile.TemporaryDirectory() as tmp_dir:
63 with open(tmp_dir + '/package-lock.json', 'w') as f:
64 f.write(file)
65 return subprocess.check_output(['prefetch-npm-deps', tmp_dir + '/package-lock.json']).decode('utf-8').strip()
66def get_npm_hash(repo, package_lock_path = 'package-lock.json'):
67 return _get_npm_hash(repo.get_file(package_lock_path))
68
69class Repo:
70 def __init__(self):
71 self.deps = {}
72 self.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
73
74 def get_deps(self, repo_vars, path):
75 print("evaluating " + json.dumps(self, default = vars), file=sys.stderr)
76
77 deps_file = self.get_file("DEPS")
78 evaluated = gclient_eval.Parse(deps_file, filename='DEPS')
79
80 repo_vars = dict(evaluated["vars"]) | repo_vars
81
82 prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
83
84 self.deps = {
85 prefix + dep_name: repo_from_dep(dep)
86 for dep_name, dep in evaluated["deps"].items()
87 if (gclient_eval.EvaluateCondition(dep["condition"], repo_vars) if "condition" in dep else True) and repo_from_dep(dep) != None
88 }
89
90 for key in evaluated.get("recursedeps", []):
91 dep_path = prefix + key
92 if dep_path in self.deps and dep_path != "src/third_party/squirrel.mac":
93 self.deps[dep_path].get_deps(repo_vars, dep_path)
94
95 def prefetch(self):
96 self.hash = get_repo_hash(self.fetcher, self.args)
97
98 def prefetch_all(self):
99 return sum([dep.prefetch_all() for [_, dep] in self.deps.items()], [delayed(self.prefetch)()])
100
101 def flatten_repr(self):
102 return {
103 "fetcher": self.fetcher,
104 "hash": self.hash,
105 **self.args
106 }
107
108 def flatten(self, path):
109 out = {
110 path: self.flatten_repr()
111 }
112 for dep_path, dep in self.deps.items():
113 out |= dep.flatten(dep_path)
114 return out
115
116class GitRepo(Repo):
117 def __init__(self, url, rev):
118 super().__init__()
119 self.fetcher = 'fetchgit'
120 self.args = {
121 "url": url,
122 "rev": rev,
123 }
124
125class GitHubRepo(Repo):
126 def __init__(self, owner, repo, rev):
127 super().__init__()
128 self.fetcher = 'fetchFromGitHub'
129 self.args = {
130 "owner": owner,
131 "repo": repo,
132 "rev": rev,
133 }
134
135 def get_file(self, filepath):
136 return urlopen(f"https://raw.githubusercontent.com/{self.args['owner']}/{self.args['repo']}/{self.args['rev']}/{filepath}").read().decode('utf-8')
137
138class GitilesRepo(Repo):
139 def __init__(self, url, rev):
140 super().__init__()
141 self.fetcher = 'fetchFromGitiles'
142 #self.fetcher = 'fetchgit'
143 self.args = {
144 "url": url,
145 "rev": rev,
146 #"fetchSubmodules": "false",
147 }
148
149 if url == "https://chromium.googlesource.com/chromium/src.git":
150 self.args['postFetch'] = "rm -r $out/third_party/blink/web_tests; "
151 self.args['postFetch'] += "rm -r $out/third_party/hunspell/tests; "
152 self.args['postFetch'] += "rm -r $out/content/test/data; "
153 self.args['postFetch'] += "rm -r $out/courgette/testdata; "
154 self.args['postFetch'] += "rm -r $out/extensions/test/data; "
155 self.args['postFetch'] += "rm -r $out/media/test/data; "
156
157 def get_file(self, filepath):
158 return base64.b64decode(urlopen(f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT").read()).decode('utf-8')
159
160def repo_from_dep(dep):
161 if "url" in dep:
162 url, rev = gclient_utils.SplitUrlRevision(dep["url"])
163
164 search_object = re.search(r'https://github.com/(.+)/(.+?)(\.git)?$', url)
165 if search_object:
166 return GitHubRepo(search_object.group(1), search_object.group(2), rev)
167
168 if re.match(r'https://.+.googlesource.com', url):
169 return GitilesRepo(url, rev)
170
171 return GitRepo(url, rev)
172 else:
173 # Not a git dependency; skip
174 return None
175
176def get_gn_source(repo):
177 gn_pattern = r"'gn_version': 'git_revision:([0-9a-f]{40})'"
178 gn_commit = re.search(gn_pattern, repo.get_file("DEPS")).group(1)
179 gn = subprocess.check_output([
180 "nix-prefetch-git",
181 "--quiet",
182 "https://gn.googlesource.com/gn",
183 "--rev", gn_commit
184 ])
185 gn = json.loads(gn)
186 return {
187 "gn": {
188 "version": datetime.fromisoformat(gn["date"]).date().isoformat(),
189 "url": gn["url"],
190 "rev": gn["rev"],
191 "hash": gn["hash"]
192 }
193 }
194
195def get_electron_info(major_version):
196 electron_releases = json.loads(urlopen("https://releases.electronjs.org/releases.json").read())
197 major_version_releases = filter(lambda item: item["version"].startswith(f"{major_version}."), electron_releases)
198 m = max(major_version_releases, key=lambda item: item["date"])
199
200 rev=f"v{m['version']}"
201
202 electron_repo = GitHubRepo("electron", "electron", rev)
203 electron_repo.recurse = True
204
205 electron_repo.get_deps({
206 f"checkout_{platform}": platform == "linux"
207 for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
208 }, "src/electron")
209
210 return (major_version, m, electron_repo)
211
212logger = logging.getLogger(__name__)
213click_log.basic_config(logger)
214
215@click.group()
216def cli():
217 pass
218
219@cli.command("eval")
220@click.option("--version", help="The major version, e.g. '23'")
221def eval(version):
222 (_, _, repo) = electron_repo = get_electron_info(version)
223 tree = electron_repo.flatten("src/electron")
224 print(json.dumps(tree, indent=4, default = vars))
225
226def get_update(repo):
227 (major_version, m, electron_repo) = repo
228
229 tasks = electron_repo.prefetch_all()
230 a = lambda: (
231 ("electron_yarn_hash", get_yarn_hash(electron_repo))
232 )
233 tasks.append(delayed(a)())
234 a = lambda: (
235 ("chromium_npm_hash", get_npm_hash(electron_repo.deps["src"], "third_party/node/package-lock.json"))
236 )
237 tasks.append(delayed(a)())
238 random.shuffle(tasks)
239
240 task_results = {n[0]: n[1] for n in Parallel(n_jobs=3, require='sharedmem', return_as="generator")(tasks) if n != None}
241
242 tree = electron_repo.flatten("src/electron")
243
244 return (f"{major_version}", {
245 "deps": tree,
246 **{key: m[key] for key in ["version", "modules", "chrome", "node"]},
247 "chromium": {
248 "version": m['chrome'],
249 "deps": get_gn_source(electron_repo.deps["src"])
250 },
251 **task_results
252 })
253
254@cli.command("update")
255@click.option("--version", help="The major version, e.g. '23'")
256def update(version):
257 try:
258 with open('info.json', 'r') as f:
259 old_info = json.loads(f.read())
260 except:
261 old_info = {}
262 repo = get_electron_info(version)
263 update = get_update(repo)
264 out = old_info | { update[0]: update[1] }
265 with open('info.json', 'w') as f:
266 f.write(json.dumps(out, indent=4, default = vars))
267 f.write('\n')
268
269@cli.command("update-all")
270def update_all():
271 repos = Parallel(n_jobs=2, require='sharedmem')(delayed(get_electron_info)(major_version) for major_version in range(28, 24, -1))
272 out = {n[0]: n[1] for n in Parallel(n_jobs=2, require='sharedmem')(delayed(get_update)(repo) for repo in repos)}
273
274 with open('info.json', 'w') as f:
275 f.write(json.dumps(out, indent=4, default = vars))
276 f.write('\n')
277
278if __name__ == "__main__":
279 cli()