nixpkgs mirror (for testing)
github.com/NixOS/nixpkgs
nix
1# python library used to update plugins:
2# - pkgs/applications/editors/vim/plugins/update.py
3# - pkgs/applications/editors/kakoune/plugins/update.py
4# - pkgs/development/lua-modules/updater/updater.py
5
6# format:
7# $ nix run nixpkgs#black maintainers/scripts/pluginupdate.py
8# type-check:
9# $ nix run nixpkgs#python3.pkgs.mypy maintainers/scripts/pluginupdate.py
10# linted:
11# $ nix run nixpkgs#python3.pkgs.flake8 -- --ignore E501,E265 maintainers/scripts/pluginupdate.py
12
13import argparse
14import csv
15import functools
16import http
17import json
18import logging
19import os
20import re
21import subprocess
22import sys
23import time
24import traceback
25import urllib.error
26import urllib.parse
27import urllib.request
28import xml.etree.ElementTree as ET
29from dataclasses import asdict, dataclass
30from datetime import UTC, datetime
31from functools import wraps
32from multiprocessing.dummy import Pool
33from pathlib import Path
34from tempfile import NamedTemporaryFile
35from typing import Any, Callable, Dict, List, Optional, Tuple, Union
36from urllib.parse import urljoin, urlparse
37
38import git
39
40ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
41ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
42ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
43
44LOG_LEVELS = {
45 logging.getLevelName(level): level
46 for level in [logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR]
47}
48
49log = logging.getLogger()
50
51
52def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
53 """Retry calling the decorated function using an exponential backoff.
54 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
55 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
56 (BSD licensed)
57 :param ExceptionToCheck: the exception on which to retry
58 :param tries: number of times to try (not retry) before giving up
59 :param delay: initial delay between retries in seconds
60 :param backoff: backoff multiplier e.g. value of 2 will double the delay
61 each retry
62 """
63
64 def deco_retry(f: Callable) -> Callable:
65 @wraps(f)
66 def f_retry(*args: Any, **kwargs: Any) -> Any:
67 mtries, mdelay = tries, delay
68 while mtries > 1:
69 try:
70 return f(*args, **kwargs)
71 except ExceptionToCheck as e:
72 print(f"{str(e)}, Retrying in {mdelay} seconds...")
73 time.sleep(mdelay)
74 mtries -= 1
75 mdelay *= backoff
76 return f(*args, **kwargs)
77
78 return f_retry # true decorator
79
80 return deco_retry
81
82
83@dataclass
84class FetchConfig:
85 proc: int
86 github_token: str
87
88
89def make_request(url: str, token=None) -> urllib.request.Request:
90 headers = {}
91 if token is not None:
92 headers["Authorization"] = f"token {token}"
93 return urllib.request.Request(url, headers=headers)
94
95
96# a dictionary of plugins and their new repositories
97Redirects = Dict["PluginDesc", "Repo"]
98
99
100class Repo:
101 def __init__(self, uri: str, branch: str) -> None:
102 self.uri = uri
103 """Url to the repo"""
104 self._branch = branch
105 # Redirect is the new Repo to use
106 self.redirect: Optional["Repo"] = None
107 self.token = "dummy_token"
108
109 @property
110 def name(self):
111 return self.uri.strip("/").split("/")[-1]
112
113 @property
114 def branch(self):
115 return self._branch or "HEAD"
116
117 def __str__(self) -> str:
118 return f"{self.uri}"
119
120 def __repr__(self) -> str:
121 return f"Repo({self.name}, {self.uri})"
122
123 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
124 def has_submodules(self) -> bool:
125 return True
126
127 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
128 def latest_commit(self) -> Tuple[str, datetime]:
129 log.debug("Latest commit")
130 loaded = self._prefetch(None)
131 updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
132
133 return loaded["rev"], updated
134
135 def _prefetch(self, ref: Optional[str]):
136 cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
137 if ref is not None:
138 cmd.append(ref)
139 log.debug(cmd)
140 data = subprocess.check_output(cmd)
141 loaded = json.loads(data)
142 return loaded
143
144 def prefetch(self, ref: Optional[str]) -> str:
145 print("Prefetching %s", self.uri)
146 loaded = self._prefetch(ref)
147 return loaded["sha256"]
148
149 def as_nix(self, plugin: "Plugin") -> str:
150 return f"""fetchgit {{
151 url = "{self.uri}";
152 rev = "{plugin.commit}";
153 sha256 = "{plugin.sha256}";
154 }}"""
155
156
157class RepoGitHub(Repo):
158 def __init__(self, owner: str, repo: str, branch: str) -> None:
159 self.owner = owner
160 self.repo = repo
161 self.token = None
162 """Url to the repo"""
163 super().__init__(self.url(""), branch)
164 log.debug(
165 "Instantiating github repo owner=%s and repo=%s", self.owner, self.repo
166 )
167
168 @property
169 def name(self):
170 return self.repo
171
172 def url(self, path: str) -> str:
173 res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
174 return res
175
176 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
177 def has_submodules(self) -> bool:
178 try:
179 req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
180 urllib.request.urlopen(req, timeout=10).close()
181 except urllib.error.HTTPError as e:
182 if e.code == 404:
183 return False
184 else:
185 raise
186 return True
187
188 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
189 def latest_commit(self) -> Tuple[str, datetime]:
190 commit_url = self.url(f"commits/{self.branch}.atom")
191 log.debug("Sending request to %s", commit_url)
192 commit_req = make_request(commit_url, self.token)
193 with urllib.request.urlopen(commit_req, timeout=10) as req:
194 self._check_for_redirect(commit_url, req)
195 xml = req.read()
196
197 # Filter out illegal XML characters
198 illegal_xml_regex = re.compile(b"[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F]")
199 xml = illegal_xml_regex.sub(b"", xml)
200
201 root = ET.fromstring(xml)
202 latest_entry = root.find(ATOM_ENTRY)
203 assert latest_entry is not None, f"No commits found in repository {self}"
204 commit_link = latest_entry.find(ATOM_LINK)
205 assert commit_link is not None, f"No link tag found feed entry {xml}"
206 url = urlparse(commit_link.get("href"))
207 updated_tag = latest_entry.find(ATOM_UPDATED)
208 assert (
209 updated_tag is not None and updated_tag.text is not None
210 ), f"No updated tag found feed entry {xml}"
211 updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
212 return Path(str(url.path)).name, updated
213
214 def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
215 response_url = req.geturl()
216 if url != response_url:
217 new_owner, new_name = (
218 urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
219 )
220
221 new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
222 self.redirect = new_repo
223
224 def prefetch(self, commit: str) -> str:
225 if self.has_submodules():
226 sha256 = super().prefetch(commit)
227 else:
228 sha256 = self.prefetch_github(commit)
229 return sha256
230
231 def prefetch_github(self, ref: str) -> str:
232 cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
233 log.debug("Running %s", cmd)
234 data = subprocess.check_output(cmd)
235 return data.strip().decode("utf-8")
236
237 def as_nix(self, plugin: "Plugin") -> str:
238 if plugin.has_submodules:
239 submodule_attr = "\n fetchSubmodules = true;"
240 else:
241 submodule_attr = ""
242
243 return f"""fetchFromGitHub {{
244 owner = "{self.owner}";
245 repo = "{self.repo}";
246 rev = "{plugin.commit}";
247 sha256 = "{plugin.sha256}";{submodule_attr}
248 }}"""
249
250
251@dataclass(frozen=True)
252class PluginDesc:
253 repo: Repo
254 branch: str
255 alias: Optional[str]
256
257 @property
258 def name(self):
259 if self.alias is None:
260 return self.repo.name
261 else:
262 return self.alias
263
264 def __lt__(self, other):
265 return self.repo.name < other.repo.name
266
267 @staticmethod
268 def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
269 log.debug("Loading row %s", row)
270 branch = row["branch"]
271 repo = make_repo(row["repo"], branch.strip())
272 repo.token = config.github_token
273 return PluginDesc(repo, branch.strip(), row["alias"])
274
275 @staticmethod
276 def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
277 branch = "HEAD"
278 alias = None
279 uri = line
280 if " as " in uri:
281 uri, alias = uri.split(" as ")
282 alias = alias.strip()
283 if "@" in uri:
284 uri, branch = uri.split("@")
285 repo = make_repo(uri.strip(), branch.strip())
286 repo.token = config.github_token
287 return PluginDesc(repo, branch.strip(), alias)
288
289
290@dataclass
291class Plugin:
292 name: str
293 commit: str
294 has_submodules: bool
295 sha256: str
296 date: Optional[datetime] = None
297
298 @property
299 def normalized_name(self) -> str:
300 return self.name.replace(".", "-")
301
302 @property
303 def version(self) -> str:
304 assert self.date is not None
305 return self.date.strftime("%Y-%m-%d")
306
307 def as_json(self) -> Dict[str, str]:
308 copy = self.__dict__.copy()
309 del copy["date"]
310 return copy
311
312
313def load_plugins_from_csv(
314 config: FetchConfig,
315 input_file: Path,
316) -> List[PluginDesc]:
317 log.debug("Load plugins from csv %s", input_file)
318 plugins = []
319 with open(input_file, newline="") as csvfile:
320 log.debug("Writing into %s", input_file)
321 reader = csv.DictReader(
322 csvfile,
323 )
324 for line in reader:
325 plugin = PluginDesc.load_from_csv(config, line)
326 plugins.append(plugin)
327
328 return plugins
329
330
331
332def run_nix_expr(expr, nixpkgs: str, **args):
333 '''
334 :param expr nix expression to fetch current plugins
335 :param nixpkgs Path towards a nixpkgs checkout
336 '''
337 with CleanEnvironment(nixpkgs) as nix_path:
338 cmd = [
339 "nix",
340 "eval",
341 "--extra-experimental-features",
342 "nix-command",
343 "--impure",
344 "--json",
345 "--expr",
346 expr,
347 "--nix-path",
348 nix_path,
349 ]
350 log.debug("Running command: %s", " ".join(cmd))
351 out = subprocess.check_output(cmd, **args)
352 data = json.loads(out)
353 return data
354
355
356class Editor:
357 """The configuration of the update script."""
358
359 def __init__(
360 self,
361 name: str,
362 root: Path,
363 get_plugins: str,
364 default_in: Optional[Path] = None,
365 default_out: Optional[Path] = None,
366 deprecated: Optional[Path] = None,
367 cache_file: Optional[str] = None,
368 ):
369 log.debug("get_plugins:", get_plugins)
370 self.name = name
371 self.root = root
372 self.get_plugins = get_plugins
373 self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
374 self.default_out = default_out or root.joinpath("generated.nix")
375 self.deprecated = deprecated or root.joinpath("deprecated.json")
376 self.cache_file = cache_file or f"{name}-plugin-cache.json"
377 self.nixpkgs_repo = None
378
379 def add(self, args):
380 """CSV spec"""
381 log.debug("called the 'add' command")
382 fetch_config = FetchConfig(args.proc, args.github_token)
383 editor = self
384 for plugin_line in args.add_plugins:
385 log.debug("using plugin_line", plugin_line)
386 pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
387 log.debug("loaded as pdesc", pdesc)
388 append = [pdesc]
389 editor.rewrite_input(
390 fetch_config, args.input_file, editor.deprecated, append=append
391 )
392 plugin, _ = prefetch_plugin(
393 pdesc,
394 )
395 autocommit = not args.no_commit
396 if autocommit:
397 commit(
398 editor.nixpkgs_repo,
399 "{drv_name}: init at {version}".format(
400 drv_name=editor.get_drv_name(plugin.normalized_name),
401 version=plugin.version,
402 ),
403 [args.outfile, args.input_file],
404 )
405
406 # Expects arguments generated by 'update' subparser
407 def update(self, args):
408 """CSV spec"""
409 print("the update member function should be overriden in subclasses")
410
411 def get_current_plugins(self, nixpkgs) -> List[Plugin]:
412 """To fill the cache"""
413 data = run_nix_expr(self.get_plugins, nixpkgs)
414 plugins = []
415 for name, attr in data.items():
416 p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
417 plugins.append(p)
418 return plugins
419
420 def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
421 """CSV spec"""
422 return load_plugins_from_csv(config, plugin_file)
423
424 def generate_nix(self, _plugins, _outfile: str):
425 """Returns nothing for now, writes directly to outfile"""
426 raise NotImplementedError()
427
428 def get_update(self, input_file: str, outfile: str, config: FetchConfig):
429 cache: Cache = Cache(self.get_current_plugins(self.nixpkgs), self.cache_file)
430 _prefetch = functools.partial(prefetch, cache=cache)
431
432 def update() -> dict:
433 plugins = self.load_plugin_spec(config, input_file)
434
435 try:
436 pool = Pool(processes=config.proc)
437 results = pool.map(_prefetch, plugins)
438 finally:
439 cache.store()
440
441 plugins, redirects = check_results(results)
442
443 self.generate_nix(plugins, outfile)
444
445 return redirects
446
447 return update
448
449 @property
450 def attr_path(self):
451 return self.name + "Plugins"
452
453 def get_drv_name(self, name: str):
454 return self.attr_path + "." + name
455
456 def rewrite_input(self, *args, **kwargs):
457 return rewrite_input(*args, **kwargs)
458
459 def create_parser(self):
460 common = argparse.ArgumentParser(
461 add_help=False,
462 description=(
463 f"""
464 Updates nix derivations for {self.name} plugins.\n
465 By default from {self.default_in} to {self.default_out}"""
466 ),
467 )
468 common.add_argument(
469 "--nixpkgs",
470 type=str,
471 default=os.getcwd(),
472 help="Adjust log level",
473 )
474 common.add_argument(
475 "--input-names",
476 "-i",
477 dest="input_file",
478 type=Path,
479 default=self.default_in,
480 help="A list of plugins in the form owner/repo",
481 )
482 common.add_argument(
483 "--out",
484 "-o",
485 dest="outfile",
486 default=self.default_out,
487 type=Path,
488 help="Filename to save generated nix code",
489 )
490 common.add_argument(
491 "--proc",
492 "-p",
493 dest="proc",
494 type=int,
495 default=30,
496 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
497 )
498 common.add_argument(
499 "--github-token",
500 "-t",
501 type=str,
502 default=os.getenv("GITHUB_API_TOKEN"),
503 help="""Allows to set --proc to higher values.
504 Uses GITHUB_API_TOKEN environment variables as the default value.""",
505 )
506 common.add_argument(
507 "--no-commit",
508 "-n",
509 action="store_true",
510 default=False,
511 help="Whether to autocommit changes",
512 )
513 common.add_argument(
514 "--debug",
515 "-d",
516 choices=LOG_LEVELS.keys(),
517 default=logging.getLevelName(logging.WARN),
518 help="Adjust log level",
519 )
520
521 main = argparse.ArgumentParser(
522 parents=[common],
523 description=(
524 f"""
525 Updates nix derivations for {self.name} plugins.\n
526 By default from {self.default_in} to {self.default_out}"""
527 ),
528 )
529
530 subparsers = main.add_subparsers(dest="command", required=False)
531 padd = subparsers.add_parser(
532 "add",
533 parents=[],
534 description="Add new plugin",
535 add_help=False,
536 )
537 padd.set_defaults(func=self.add)
538 padd.add_argument(
539 "add_plugins",
540 default=None,
541 nargs="+",
542 help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
543 )
544
545 pupdate = subparsers.add_parser(
546 "update",
547 description="Update all or a subset of existing plugins",
548 add_help=False,
549 )
550 pupdate.set_defaults(func=self.update)
551 return main
552
553 def run(
554 self,
555 ):
556 """
557 Convenience function
558 """
559 parser = self.create_parser()
560 args = parser.parse_args()
561 command = args.command or "update"
562 log.setLevel(LOG_LEVELS[args.debug])
563 log.info("Chose to run command: %s", command)
564 self.nixpkgs = args.nixpkgs
565
566 self.nixpkgs_repo = git.Repo(args.nixpkgs, search_parent_directories=True)
567
568 getattr(self, command)(args)
569
570
571class CleanEnvironment(object):
572 def __init__(self, nixpkgs):
573 self.local_pkgs = nixpkgs
574
575 def __enter__(self) -> str:
576 """
577 local_pkgs = str(Path(__file__).parent.parent.parent)
578 """
579 self.old_environ = os.environ.copy()
580 self.empty_config = NamedTemporaryFile()
581 self.empty_config.write(b"{}")
582 self.empty_config.flush()
583 return f"localpkgs={self.local_pkgs}"
584
585 def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
586 os.environ.update(self.old_environ)
587 self.empty_config.close()
588
589
590def prefetch_plugin(
591 p: PluginDesc,
592 cache: "Optional[Cache]" = None,
593) -> Tuple[Plugin, Optional[Repo]]:
594 repo, branch, alias = p.repo, p.branch, p.alias
595 name = alias or p.repo.name
596 commit = None
597 log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
598 commit, date = repo.latest_commit()
599 cached_plugin = cache[commit] if cache else None
600 if cached_plugin is not None:
601 log.debug("Cache hit !")
602 cached_plugin.name = name
603 cached_plugin.date = date
604 return cached_plugin, repo.redirect
605
606 has_submodules = repo.has_submodules()
607 log.debug(f"prefetch {name}")
608 sha256 = repo.prefetch(commit)
609
610 return (
611 Plugin(name, commit, has_submodules, sha256, date=date),
612 repo.redirect,
613 )
614
615
616def print_download_error(plugin: PluginDesc, ex: Exception):
617 print(f"{plugin}: {ex}", file=sys.stderr)
618 ex_traceback = ex.__traceback__
619 tb_lines = [
620 line.rstrip("\n")
621 for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
622 ]
623 print("\n".join(tb_lines))
624
625
626def check_results(
627 results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
628) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
629 """ """
630 failures: List[Tuple[PluginDesc, Exception]] = []
631 plugins = []
632 redirects: Redirects = {}
633 for pdesc, result, redirect in results:
634 if isinstance(result, Exception):
635 failures.append((pdesc, result))
636 else:
637 new_pdesc = pdesc
638 if redirect is not None:
639 redirects.update({pdesc: redirect})
640 new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
641 plugins.append((new_pdesc, result))
642
643 print(f"{len(results) - len(failures)} plugins were checked", end="")
644 if len(failures) == 0:
645 print()
646 return plugins, redirects
647 else:
648 print(f", {len(failures)} plugin(s) could not be downloaded:\n")
649
650 for plugin, exception in failures:
651 print_download_error(plugin, exception)
652
653 sys.exit(1)
654
655
656def make_repo(uri: str, branch) -> Repo:
657 """Instantiate a Repo with the correct specialization depending on server (gitub spec)"""
658 # dumb check to see if it's of the form owner/repo (=> github) or https://...
659 res = urlparse(uri)
660 if res.netloc in ["github.com", ""]:
661 res = res.path.strip("/").split("/")
662 repo = RepoGitHub(res[0], res[1], branch)
663 else:
664 repo = Repo(uri.strip(), branch)
665 return repo
666
667
668def get_cache_path(cache_file_name: str) -> Optional[Path]:
669 xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
670 if xdg_cache is None:
671 home = os.environ.get("HOME", None)
672 if home is None:
673 return None
674 xdg_cache = str(Path(home, ".cache"))
675
676 return Path(xdg_cache, cache_file_name)
677
678
679class Cache:
680 def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
681 self.cache_file = get_cache_path(cache_file_name)
682
683 downloads = {}
684 for plugin in initial_plugins:
685 downloads[plugin.commit] = plugin
686 downloads.update(self.load())
687 self.downloads = downloads
688
689 def load(self) -> Dict[str, Plugin]:
690 if self.cache_file is None or not self.cache_file.exists():
691 return {}
692
693 downloads: Dict[str, Plugin] = {}
694 with open(self.cache_file) as f:
695 data = json.load(f)
696 for attr in data.values():
697 p = Plugin(
698 attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
699 )
700 downloads[attr["commit"]] = p
701 return downloads
702
703 def store(self) -> None:
704 if self.cache_file is None:
705 return
706
707 os.makedirs(self.cache_file.parent, exist_ok=True)
708 with open(self.cache_file, "w+") as f:
709 data = {}
710 for name, attr in self.downloads.items():
711 data[name] = attr.as_json()
712 json.dump(data, f, indent=4, sort_keys=True)
713
714 def __getitem__(self, key: str) -> Optional[Plugin]:
715 return self.downloads.get(key, None)
716
717 def __setitem__(self, key: str, value: Plugin) -> None:
718 self.downloads[key] = value
719
720
721def prefetch(
722 pluginDesc: PluginDesc, cache: Cache
723) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
724 try:
725 plugin, redirect = prefetch_plugin(pluginDesc, cache)
726 cache[plugin.commit] = plugin
727 return (pluginDesc, plugin, redirect)
728 except Exception as e:
729 return (pluginDesc, e, None)
730
731
732def rewrite_input(
733 config: FetchConfig,
734 input_file: Path,
735 deprecated: Path,
736 # old pluginDesc and the new
737 redirects: Redirects = {},
738 append: List[PluginDesc] = [],
739):
740 log.info("Rewriting input file %s", input_file)
741 plugins = load_plugins_from_csv(
742 config,
743 input_file,
744 )
745
746 plugins.extend(append)
747
748 if redirects:
749 log.debug("Dealing with deprecated plugins listed in %s", deprecated)
750
751 cur_date_iso = datetime.now().strftime("%Y-%m-%d")
752 with open(deprecated, "r") as f:
753 deprecations = json.load(f)
754 # TODO parallelize this step
755 for pdesc, new_repo in redirects.items():
756 log.info("Rewriting input file %s", input_file)
757 new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
758 old_plugin, _ = prefetch_plugin(pdesc)
759 new_plugin, _ = prefetch_plugin(new_pdesc)
760 if old_plugin.normalized_name != new_plugin.normalized_name:
761 deprecations[old_plugin.normalized_name] = {
762 "new": new_plugin.normalized_name,
763 "date": cur_date_iso,
764 }
765 with open(deprecated, "w") as f:
766 json.dump(deprecations, f, indent=4, sort_keys=True)
767 f.write("\n")
768
769 with open(input_file, "w") as f:
770 log.debug("Writing into %s", input_file)
771 # fields = dataclasses.fields(PluginDesc)
772 fieldnames = ["repo", "branch", "alias"]
773 writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
774 writer.writeheader()
775 for plugin in sorted(plugins):
776 writer.writerow(asdict(plugin))
777
778
779def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
780 repo.index.add([str(f.resolve()) for f in files])
781
782 if repo.index.diff("HEAD"):
783 print(f'committing to nixpkgs "{message}"')
784 repo.index.commit(message)
785 else:
786 print("no changes in working tree to commit")
787
788
789def update_plugins(editor: Editor, args):
790 """The main entry function of this module.
791 All input arguments are grouped in the `Editor`."""
792
793 log.info("Start updating plugins")
794 fetch_config = FetchConfig(args.proc, args.github_token)
795 update = editor.get_update(args.input_file, args.outfile, fetch_config)
796
797 start_time = time.time()
798 redirects = update()
799 duration = time.time() - start_time
800 print(f"The plugin update took {duration:.2f}s.")
801 editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
802
803 autocommit = not args.no_commit
804
805 if autocommit:
806 try:
807 repo = git.Repo(os.getcwd())
808 updated = datetime.now(tz=UTC).strftime('%Y-%m-%d')
809 print(args.outfile)
810 commit(repo,
811 f"{editor.attr_path}: update on {updated}", [args.outfile]
812 )
813 except git.InvalidGitRepositoryError as e:
814 print(f"Not in a git repository: {e}", file=sys.stderr)
815 sys.exit(1)
816
817 if redirects:
818 update()
819 if autocommit:
820 commit(
821 editor.nixpkgs_repo,
822 f"{editor.attr_path}: resolve github repository redirects",
823 [args.outfile, args.input_file, editor.deprecated],
824 )