tangled
alpha
login
or
join now
pyrox.dev
/
nixpkgs
0
fork
atom
lol
0
fork
atom
overview
issues
pulls
pipelines
gclient2nix: init
Yureka
11 months ago
082d26cd
499b7565
+274
2 changed files
expand all
collapse all
unified
split
pkgs
by-name
gc
gclient2nix
gclient2nix.py
package.nix
+232
pkgs/by-name/gc/gclient2nix/gclient2nix.py
···
1
1
+
#!/usr/bin/env python3
2
2
+
import base64
3
3
+
import json
4
4
+
import os
5
5
+
import subprocess
6
6
+
import re
7
7
+
import random
8
8
+
import sys
9
9
+
import tempfile
10
10
+
import logging
11
11
+
import click
12
12
+
import click_log
13
13
+
from typing import Optional
14
14
+
from urllib.request import urlopen
15
15
+
from joblib import Parallel, delayed, Memory
16
16
+
from platformdirs import user_cache_dir
17
17
+
18
18
+
sys.path.append("@depot_tools_checkout@")
19
19
+
import gclient_eval
20
20
+
import gclient_utils
21
21
+
22
22
+
23
23
+
logger = logging.getLogger(__name__)
24
24
+
click_log.basic_config(logger)
25
25
+
26
26
+
nixpkgs_path = "@nixpkgs_path@"
27
27
+
28
28
+
memory: Memory = Memory(user_cache_dir("gclient2nix"), verbose=0)
29
29
+
30
30
+
def cache(mem, **mem_kwargs):
31
31
+
def cache_(f):
32
32
+
f.__module__ = "gclient2nix"
33
33
+
f.__qualname__ = f.__name__
34
34
+
return mem.cache(f, **mem_kwargs)
35
35
+
return cache_
36
36
+
37
37
+
@cache(memory)
38
38
+
def get_repo_hash(fetcher: str, args: dict) -> str:
39
39
+
expr = f"(import {nixpkgs_path} {{}}).gclient2nix.fetchers.{fetcher}{{"
40
40
+
for key, val in args.items():
41
41
+
expr += f'{key}="{val}";'
42
42
+
expr += "}"
43
43
+
cmd = ["nurl", "-H", "--expr", expr]
44
44
+
print(" ".join(cmd), file=sys.stderr)
45
45
+
out = subprocess.check_output(cmd)
46
46
+
return out.decode("utf-8").strip()
47
47
+
48
48
+
49
49
+
class Repo:
50
50
+
fetcher: str
51
51
+
args: dict
52
52
+
53
53
+
def __init__(self) -> None:
54
54
+
self.deps: dict = {}
55
55
+
56
56
+
def get_deps(self, repo_vars: dict, path: str) -> None:
57
57
+
print(
58
58
+
"evaluating " + json.dumps(self, default=vars, sort_keys=True),
59
59
+
file=sys.stderr,
60
60
+
)
61
61
+
62
62
+
deps_file = self.get_file("DEPS")
63
63
+
evaluated = gclient_eval.Parse(deps_file, vars_override=repo_vars, filename="DEPS")
64
64
+
65
65
+
repo_vars = dict(evaluated.get("vars", {})) | repo_vars
66
66
+
67
67
+
prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
68
68
+
69
69
+
self.deps = {
70
70
+
prefix + dep_name: repo_from_dep(dep)
71
71
+
for dep_name, dep in evaluated.get("deps", {}).items()
72
72
+
if (
73
73
+
gclient_eval.EvaluateCondition(dep["condition"], repo_vars)
74
74
+
if "condition" in dep
75
75
+
else True
76
76
+
)
77
77
+
and repo_from_dep(dep) != None
78
78
+
}
79
79
+
80
80
+
for key in evaluated.get("recursedeps", []):
81
81
+
dep_path = prefix + key
82
82
+
if dep_path in self.deps:
83
83
+
self.deps[dep_path].get_deps(repo_vars, dep_path)
84
84
+
85
85
+
def eval(self) -> None:
86
86
+
self.get_deps(
87
87
+
{
88
88
+
**{
89
89
+
f"checkout_{platform}": platform == "linux"
90
90
+
for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
91
91
+
},
92
92
+
**{
93
93
+
f"checkout_{arch}": True
94
94
+
for arch in ["x64", "arm64", "arm", "x86", "mips", "mips64", "ppc"]
95
95
+
},
96
96
+
},
97
97
+
"",
98
98
+
)
99
99
+
100
100
+
def prefetch(self) -> None:
101
101
+
self.hash = get_repo_hash(self.fetcher, self.args)
102
102
+
103
103
+
def prefetch_all(self) -> int:
104
104
+
return sum(
105
105
+
[dep.prefetch_all() for [_, dep] in self.deps.items()],
106
106
+
[delayed(self.prefetch)()],
107
107
+
)
108
108
+
109
109
+
def flatten_repr(self) -> dict:
110
110
+
return {"fetcher": self.fetcher, "attrs": {**({"hash": self.hash} if hasattr(self, "hash") else {}), **self.args}}
111
111
+
112
112
+
def flatten(self, path: str) -> dict:
113
113
+
out = {path: self.flatten_repr()}
114
114
+
for dep_path, dep in self.deps.items():
115
115
+
out |= dep.flatten(dep_path)
116
116
+
return out
117
117
+
118
118
+
def get_file(self, filepath: str) -> str:
119
119
+
raise NotImplementedError
120
120
+
121
121
+
122
122
+
class GitRepo(Repo):
123
123
+
def __init__(self, url: str, rev: str) -> None:
124
124
+
super().__init__()
125
125
+
self.fetcher = "fetchgit"
126
126
+
self.args = {
127
127
+
"url": url,
128
128
+
"rev": rev,
129
129
+
}
130
130
+
131
131
+
132
132
+
class GitHubRepo(Repo):
133
133
+
def __init__(self, owner: str, repo: str, rev: str) -> None:
134
134
+
super().__init__()
135
135
+
self.fetcher = "fetchFromGitHub"
136
136
+
self.args = {
137
137
+
"owner": owner,
138
138
+
"repo": repo,
139
139
+
"rev": rev,
140
140
+
}
141
141
+
142
142
+
def get_file(self, filepath: str) -> str:
143
143
+
return (
144
144
+
urlopen(
145
145
+
f"https://raw.githubusercontent.com/{self.args['owner']}/{self.args['repo']}/{self.args['rev']}/{filepath}"
146
146
+
)
147
147
+
.read()
148
148
+
.decode("utf-8")
149
149
+
)
150
150
+
151
151
+
152
152
+
class GitilesRepo(Repo):
153
153
+
def __init__(self, url: str, rev: str) -> None:
154
154
+
super().__init__()
155
155
+
self.fetcher = "fetchFromGitiles"
156
156
+
self.args = {
157
157
+
"url": url,
158
158
+
"rev": rev,
159
159
+
}
160
160
+
161
161
+
# Quirk: Chromium source code exceeds the Hydra output limit
162
162
+
# We prefer deleting test data over recompressing the sources into a
163
163
+
# tarball, because the NAR will be compressed after the size check
164
164
+
# anyways, so recompressing is more like bypassing the size limit
165
165
+
# (making it count the compressed instead of uncompressed size)
166
166
+
# rather than complying with it.
167
167
+
if url == "https://chromium.googlesource.com/chromium/src.git":
168
168
+
self.args["postFetch"] = "rm -r $out/third_party/blink/web_tests; "
169
169
+
self.args["postFetch"] += "rm -r $out/content/test/data; "
170
170
+
self.args["postFetch"] += "rm -rf $out/courgette/testdata; "
171
171
+
self.args["postFetch"] += "rm -r $out/extensions/test/data; "
172
172
+
self.args["postFetch"] += "rm -r $out/media/test/data; "
173
173
+
174
174
+
def get_file(self, filepath: str) -> str:
175
175
+
return base64.b64decode(
176
176
+
urlopen(
177
177
+
f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT"
178
178
+
).read()
179
179
+
).decode("utf-8")
180
180
+
181
181
+
182
182
+
183
183
+
def repo_from_dep(dep: dict) -> Optional[Repo]:
184
184
+
if "url" in dep:
185
185
+
url, rev = gclient_utils.SplitUrlRevision(dep["url"])
186
186
+
187
187
+
search_object = re.search(r"https://github.com/(.+)/(.+?)(\.git)?$", url)
188
188
+
if search_object:
189
189
+
return GitHubRepo(search_object.group(1), search_object.group(2), rev)
190
190
+
191
191
+
if re.match(r"https://.+\.googlesource.com", url):
192
192
+
return GitilesRepo(url, rev)
193
193
+
194
194
+
return GitRepo(url, rev)
195
195
+
else:
196
196
+
# Not a git dependency; skip
197
197
+
return None
198
198
+
199
199
+
200
200
+
@click.group()
201
201
+
def cli() -> None:
202
202
+
"""gclient2nix"""
203
203
+
pass
204
204
+
205
205
+
206
206
+
@cli.command("eval", help="Evaluate and print the dependency tree of a gclient project")
207
207
+
@click.argument("url", required=True, type=str)
208
208
+
@click.option("--root", default="src", help="Root path, where the given url is placed", type=str)
209
209
+
def eval(url: str, root: str) -> None:
210
210
+
repo = repo_from_dep({"url": url})
211
211
+
repo.eval()
212
212
+
print(json.dumps(repo.flatten(root), sort_keys=True, indent=4))
213
213
+
214
214
+
215
215
+
@cli.command("generate", help="Generate a dependencies description for a gclient project")
216
216
+
@click.argument("url", required=True, type=str)
217
217
+
@click.option("--root", default="src", help="Root path, where the given url is placed", type=str)
218
218
+
def generate(url: str, root: str) -> None:
219
219
+
repo = repo_from_dep({"url": url})
220
220
+
repo.eval()
221
221
+
tasks = repo.prefetch_all()
222
222
+
random.shuffle(tasks)
223
223
+
task_results = {
224
224
+
n[0]: n[1]
225
225
+
for n in Parallel(n_jobs=20, require="sharedmem", return_as="generator")(tasks)
226
226
+
if n != None
227
227
+
}
228
228
+
print(json.dumps(repo.flatten(root), sort_keys=True, indent=4))
229
229
+
230
230
+
231
231
+
if __name__ == "__main__":
232
232
+
cli()
+42
pkgs/by-name/gc/gclient2nix/package.nix
···
1
1
+
{
2
2
+
lib,
3
3
+
python3,
4
4
+
runCommand,
5
5
+
makeWrapper,
6
6
+
path,
7
7
+
fetchgit,
8
8
+
nurl,
9
9
+
}:
10
10
+
11
11
+
let
12
12
+
python = python3.withPackages (
13
13
+
ps: with ps; [
14
14
+
joblib
15
15
+
platformdirs
16
16
+
click
17
17
+
click-log
18
18
+
]
19
19
+
);
20
20
+
21
21
+
in
22
22
+
23
23
+
runCommand "gclient2nix"
24
24
+
{
25
25
+
nativeBuildInputs = [ makeWrapper ];
26
26
+
buildInputs = [ python ];
27
27
+
28
28
+
# substitutions
29
29
+
nixpkgs_path = if builtins.pathExists (path + "/.git") then lib.cleanSource path else path;
30
30
+
depot_tools_checkout = fetchgit {
31
31
+
url = "https://chromium.googlesource.com/chromium/tools/depot_tools";
32
32
+
rev = "452fe3be37f78fbecefa1b4b0d359531bcd70d0d";
33
33
+
hash = "sha256-8IiJOm0FLa/u1Vd96tb33Ruj4IUTCeYgBpTk88znhPw=";
34
34
+
};
35
35
+
}
36
36
+
''
37
37
+
mkdir -p $out/bin
38
38
+
substituteAll ${./gclient2nix.py} $out/bin/gclient2nix
39
39
+
chmod u+x $out/bin/gclient2nix
40
40
+
patchShebangs $out/bin/gclient2nix
41
41
+
wrapProgram $out/bin/gclient2nix --set PATH "${lib.makeBinPath [ nurl ]}"
42
42
+
''