1#!/usr/bin/env nix-shell
2#!nix-shell -p nix-prefetch-git -p python3 nix -i python3
3
4# format:
5# $ nix run nixpkgs.python3Packages.black -c black update.py
6# type-check:
7# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
8# linted:
9# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
10
11import argparse
12import functools
13import json
14import os
15import subprocess
16import sys
17import traceback
18import urllib.error
19import urllib.request
20import xml.etree.ElementTree as ET
21from datetime import datetime
22from multiprocessing.dummy import Pool
23from pathlib import Path
24from typing import Dict, List, Optional, Tuple, Union, Any, Callable
25from urllib.parse import urljoin, urlparse
26from tempfile import NamedTemporaryFile
27
28ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
29ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
30ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
31
32ROOT = Path(__file__).parent
33DEFAULT_IN = ROOT.joinpath("vim-plugin-names")
34DEFAULT_OUT = ROOT.joinpath("generated.nix")
35
36import time
37from functools import wraps
38
39
40def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
41 """Retry calling the decorated function using an exponential backoff.
42
43 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
44 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
45 (BSD licensed)
46
47 :param ExceptionToCheck: the exception on which to retry
48 :param tries: number of times to try (not retry) before giving up
49 :param delay: initial delay between retries in seconds
50 :param backoff: backoff multiplier e.g. value of 2 will double the delay
51 each retry
52 """
53
54 def deco_retry(f: Callable) -> Callable:
55 @wraps(f)
56 def f_retry(*args: Any, **kwargs: Any) -> Any:
57 mtries, mdelay = tries, delay
58 while mtries > 1:
59 try:
60 return f(*args, **kwargs)
61 except ExceptionToCheck as e:
62 print(f"{str(e)}, Retrying in {mdelay} seconds...")
63 time.sleep(mdelay)
64 mtries -= 1
65 mdelay *= backoff
66 return f(*args, **kwargs)
67
68 return f_retry # true decorator
69
70 return deco_retry
71
72
73class Repo:
74 def __init__(self, owner: str, name: str) -> None:
75 self.owner = owner
76 self.name = name
77
78 def url(self, path: str) -> str:
79 return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
80
81 def __repr__(self) -> str:
82 return f"Repo({self.owner}, {self.name})"
83
84 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
85 def has_submodules(self) -> bool:
86 try:
87 urllib.request.urlopen(
88 self.url("blob/master/.gitmodules"), timeout=10
89 ).close()
90 except urllib.error.HTTPError as e:
91 if e.code == 404:
92 return False
93 else:
94 raise
95 return True
96
97 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
98 def latest_commit(self) -> Tuple[str, datetime]:
99 with urllib.request.urlopen(self.url("commits/master.atom"), timeout=10) as req:
100 xml = req.read()
101 root = ET.fromstring(xml)
102 latest_entry = root.find(ATOM_ENTRY)
103 assert latest_entry is not None, f"No commits found in repository {self}"
104 commit_link = latest_entry.find(ATOM_LINK)
105 assert commit_link is not None, f"No link tag found feed entry {xml}"
106 url = urlparse(commit_link.get("href"))
107 updated_tag = latest_entry.find(ATOM_UPDATED)
108 assert (
109 updated_tag is not None and updated_tag.text is not None
110 ), f"No updated tag found feed entry {xml}"
111 updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
112 return Path(str(url.path)).name, updated
113
114 def prefetch_git(self, ref: str) -> str:
115 data = subprocess.check_output(
116 ["nix-prefetch-git", "--fetch-submodules", self.url(""), ref]
117 )
118 return json.loads(data)["sha256"]
119
120 def prefetch_github(self, ref: str) -> str:
121 data = subprocess.check_output(
122 ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
123 )
124 return data.strip().decode("utf-8")
125
126
127class Plugin:
128 def __init__(
129 self,
130 name: str,
131 commit: str,
132 has_submodules: bool,
133 sha256: str,
134 date: Optional[datetime] = None,
135 ) -> None:
136 self.name = name
137 self.commit = commit
138 self.has_submodules = has_submodules
139 self.sha256 = sha256
140 self.date = date
141
142 @property
143 def normalized_name(self) -> str:
144 return self.name.replace(".", "-")
145
146 @property
147 def version(self) -> str:
148 assert self.date is not None
149 return self.date.strftime("%Y-%m-%d")
150
151 def as_json(self) -> Dict[str, str]:
152 copy = self.__dict__.copy()
153 del copy["date"]
154 return copy
155
156
157GET_PLUGINS = f"""(with import <localpkgs> {{}};
158let
159 inherit (vimUtils.override {{inherit vim;}}) buildVimPluginFrom2Nix;
160 generated = callPackage {ROOT}/generated.nix {{
161 inherit buildVimPluginFrom2Nix;
162 }};
163 hasChecksum = value: lib.isAttrs value && lib.hasAttrByPath ["src" "outputHash"] value;
164 getChecksum = name: value:
165 if hasChecksum value then {{
166 submodules = value.src.fetchSubmodules or false;
167 sha256 = value.src.outputHash;
168 rev = value.src.rev;
169 }} else null;
170 checksums = lib.mapAttrs getChecksum generated;
171in lib.filterAttrs (n: v: v != null) checksums)"""
172
173
174class CleanEnvironment(object):
175 def __enter__(self) -> None:
176 self.old_environ = os.environ.copy()
177 local_pkgs = str(ROOT.joinpath("../../.."))
178 os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
179 self.empty_config = NamedTemporaryFile()
180 self.empty_config.write(b"{}")
181 self.empty_config.flush()
182 os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
183
184 def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
185 os.environ.update(self.old_environ)
186 self.empty_config.close()
187
188
189def get_current_plugins() -> List[Plugin]:
190 with CleanEnvironment():
191 out = subprocess.check_output(["nix", "eval", "--json", GET_PLUGINS])
192 data = json.loads(out)
193 plugins = []
194 for name, attr in data.items():
195 p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
196 plugins.append(p)
197 return plugins
198
199
200def prefetch_plugin(user: str, repo_name: str, alias: str, cache: "Cache") -> Plugin:
201 repo = Repo(user, repo_name)
202 commit, date = repo.latest_commit()
203 has_submodules = repo.has_submodules()
204 cached_plugin = cache[commit]
205 if cached_plugin is not None:
206 cached_plugin.name = alias or repo_name
207 cached_plugin.date = date
208 return cached_plugin
209
210 print(f"prefetch {user}/{repo_name}")
211 if has_submodules:
212 sha256 = repo.prefetch_git(commit)
213 else:
214 sha256 = repo.prefetch_github(commit)
215
216 return Plugin(alias or repo_name, commit, has_submodules, sha256, date=date)
217
218
219def print_download_error(plugin: str, ex: Exception):
220 print(f"{plugin}: {ex}", file=sys.stderr)
221 ex_traceback = ex.__traceback__
222 tb_lines = [
223 line.rstrip("\n")
224 for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
225 ]
226 print("\n".join(tb_lines))
227
228
229def check_results(
230 results: List[Tuple[str, str, Union[Exception, Plugin]]]
231) -> List[Tuple[str, str, Plugin]]:
232 failures: List[Tuple[str, Exception]] = []
233 plugins = []
234 for (owner, name, result) in results:
235 if isinstance(result, Exception):
236 failures.append((name, result))
237 else:
238 plugins.append((owner, name, result))
239
240 print(f"{len(results) - len(failures)} plugins were checked", end="")
241 if len(failures) == 0:
242 print()
243 return plugins
244 else:
245 print(f", {len(failures)} plugin(s) could not be downloaded:\n")
246
247 for (plugin, exception) in failures:
248 print_download_error(plugin, exception)
249
250 sys.exit(1)
251
252
253def parse_plugin_line(line: str) -> Tuple[str, str, Optional[str]]:
254 name, repo = line.split("/")
255 try:
256 repo, alias = repo.split(" as ")
257 return (name, repo, alias.strip())
258 except ValueError:
259 # no alias defined
260 return (name, repo.strip(), None)
261
262
263def load_plugin_spec(plugin_file: str) -> List[Tuple[str, str, Optional[str]]]:
264 plugins = []
265 with open(plugin_file) as f:
266 for line in f:
267 plugin = parse_plugin_line(line)
268 if not plugin[0]:
269 msg = f"Invalid repository {line}, must be in the format owner/repo[ as alias]"
270 print(msg, file=sys.stderr)
271 sys.exit(1)
272 plugins.append(plugin)
273 return plugins
274
275
276def get_cache_path() -> Optional[Path]:
277 xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
278 if xdg_cache is None:
279 home = os.environ.get("HOME", None)
280 if home is None:
281 return None
282 xdg_cache = str(Path(home, ".cache"))
283
284 return Path(xdg_cache, "vim-plugin-cache.json")
285
286
287class Cache:
288 def __init__(self, initial_plugins: List[Plugin]) -> None:
289 self.cache_file = get_cache_path()
290
291 downloads = {}
292 for plugin in initial_plugins:
293 downloads[plugin.commit] = plugin
294 downloads.update(self.load())
295 self.downloads = downloads
296
297 def load(self) -> Dict[str, Plugin]:
298 if self.cache_file is None or not self.cache_file.exists():
299 return {}
300
301 downloads: Dict[str, Plugin] = {}
302 with open(self.cache_file) as f:
303 data = json.load(f)
304 for attr in data.values():
305 p = Plugin(
306 attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
307 )
308 downloads[attr["commit"]] = p
309 return downloads
310
311 def store(self) -> None:
312 if self.cache_file is None:
313 return
314
315 os.makedirs(self.cache_file.parent, exist_ok=True)
316 with open(self.cache_file, "w+") as f:
317 data = {}
318 for name, attr in self.downloads.items():
319 data[name] = attr.as_json()
320 json.dump(data, f, indent=4, sort_keys=True)
321
322 def __getitem__(self, key: str) -> Optional[Plugin]:
323 return self.downloads.get(key, None)
324
325 def __setitem__(self, key: str, value: Plugin) -> None:
326 self.downloads[key] = value
327
328
329def prefetch(
330 args: Tuple[str, str, str], cache: Cache
331) -> Tuple[str, str, Union[Exception, Plugin]]:
332 assert len(args) == 3
333 owner, repo, alias = args
334 try:
335 plugin = prefetch_plugin(owner, repo, alias, cache)
336 cache[plugin.commit] = plugin
337 return (owner, repo, plugin)
338 except Exception as e:
339 return (owner, repo, e)
340
341
342header = (
343 "# This file has been generated by ./pkgs/misc/vim-plugins/update.py. Do not edit!"
344)
345
346
347def generate_nix(plugins: List[Tuple[str, str, Plugin]], outfile: str):
348 sorted_plugins = sorted(plugins, key=lambda v: v[2].name.lower())
349
350 with open(outfile, "w+") as f:
351 f.write(header)
352 f.write(
353 """
354{ lib, buildVimPluginFrom2Nix, fetchFromGitHub, overrides ? (self: super: {}) }:
355
356let
357 packages = ( self:
358{"""
359 )
360 for owner, repo, plugin in sorted_plugins:
361 if plugin.has_submodules:
362 submodule_attr = "\n fetchSubmodules = true;"
363 else:
364 submodule_attr = ""
365
366 f.write(
367 f"""
368 {plugin.normalized_name} = buildVimPluginFrom2Nix {{
369 pname = "{plugin.normalized_name}";
370 version = "{plugin.version}";
371 src = fetchFromGitHub {{
372 owner = "{owner}";
373 repo = "{repo}";
374 rev = "{plugin.commit}";
375 sha256 = "{plugin.sha256}";{submodule_attr}
376 }};
377 }};
378"""
379 )
380 f.write(
381 """
382});
383in lib.fix' (lib.extends overrides packages)
384"""
385 )
386 print(f"updated {outfile}")
387
388
389def parse_args():
390 parser = argparse.ArgumentParser(
391 description=(
392 "Updates nix derivations for vim plugins"
393 f"By default from {DEFAULT_IN} to {DEFAULT_OUT}"
394 )
395 )
396 parser.add_argument(
397 "--input-names",
398 "-i",
399 dest="input_file",
400 default=DEFAULT_IN,
401 help="A list of plugins in the form owner/repo",
402 )
403 parser.add_argument(
404 "--out",
405 "-o",
406 dest="outfile",
407 default=DEFAULT_OUT,
408 help="Filename to save generated nix code",
409 )
410
411 return parser.parse_args()
412
413
414def main() -> None:
415
416 args = parse_args()
417 plugin_names = load_plugin_spec(args.input_file)
418 current_plugins = get_current_plugins()
419 cache = Cache(current_plugins)
420
421 prefetch_with_cache = functools.partial(prefetch, cache=cache)
422
423 try:
424 # synchronous variant for debugging
425 # results = list(map(prefetch_with_cache, plugin_names))
426 pool = Pool(processes=30)
427 results = pool.map(prefetch_with_cache, plugin_names)
428 finally:
429 cache.store()
430
431 plugins = check_results(results)
432
433 generate_nix(plugins, args.outfile)
434
435
436if __name__ == "__main__":
437 main()