1# python library used to update plugins:
2# - pkgs/applications/editors/vim/plugins/update.py
3# - pkgs/applications/editors/kakoune/plugins/update.py
4# - maintainers/scripts/update-luarocks-packages
5
6# format:
7# $ nix run nixpkgs#black maintainers/scripts/pluginupdate.py
8# type-check:
9# $ nix run nixpkgs#python3.pkgs.mypy maintainers/scripts/pluginupdate.py
10# linted:
11# $ nix run nixpkgs#python3.pkgs.flake8 -- --ignore E501,E265 maintainers/scripts/pluginupdate.py
12
13import argparse
14import csv
15import functools
16import http
17import json
18import logging
19import os
20import re
21import subprocess
22import sys
23import time
24import traceback
25import urllib.error
26import urllib.parse
27import urllib.request
28import xml.etree.ElementTree as ET
29from dataclasses import asdict, dataclass
30from datetime import UTC, datetime
31from functools import wraps
32from multiprocessing.dummy import Pool
33from pathlib import Path
34from tempfile import NamedTemporaryFile
35from typing import Any, Callable, Dict, List, Optional, Tuple, Union
36from urllib.parse import urljoin, urlparse
37
38import git
39
40ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
41ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
42ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
43
44LOG_LEVELS = {
45 logging.getLevelName(level): level
46 for level in [logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR]
47}
48
49log = logging.getLogger()
50
51
52def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
53 """Retry calling the decorated function using an exponential backoff.
54 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
55 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
56 (BSD licensed)
57 :param ExceptionToCheck: the exception on which to retry
58 :param tries: number of times to try (not retry) before giving up
59 :param delay: initial delay between retries in seconds
60 :param backoff: backoff multiplier e.g. value of 2 will double the delay
61 each retry
62 """
63
64 def deco_retry(f: Callable) -> Callable:
65 @wraps(f)
66 def f_retry(*args: Any, **kwargs: Any) -> Any:
67 mtries, mdelay = tries, delay
68 while mtries > 1:
69 try:
70 return f(*args, **kwargs)
71 except ExceptionToCheck as e:
72 print(f"{str(e)}, Retrying in {mdelay} seconds...")
73 time.sleep(mdelay)
74 mtries -= 1
75 mdelay *= backoff
76 return f(*args, **kwargs)
77
78 return f_retry # true decorator
79
80 return deco_retry
81
82
83@dataclass
84class FetchConfig:
85 proc: int
86 github_token: str
87
88
89def make_request(url: str, token=None) -> urllib.request.Request:
90 headers = {}
91 if token is not None:
92 headers["Authorization"] = f"token {token}"
93 return urllib.request.Request(url, headers=headers)
94
95
96# a dictionary of plugins and their new repositories
97Redirects = Dict["PluginDesc", "Repo"]
98
99
100class Repo:
101 def __init__(self, uri: str, branch: str) -> None:
102 self.uri = uri
103 """Url to the repo"""
104 self._branch = branch
105 # Redirect is the new Repo to use
106 self.redirect: Optional["Repo"] = None
107 self.token = "dummy_token"
108
109 @property
110 def name(self):
111 return self.uri.split("/")[-1]
112
113 @property
114 def branch(self):
115 return self._branch or "HEAD"
116
117 def __str__(self) -> str:
118 return f"{self.uri}"
119
120 def __repr__(self) -> str:
121 return f"Repo({self.name}, {self.uri})"
122
123 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
124 def has_submodules(self) -> bool:
125 return True
126
127 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
128 def latest_commit(self) -> Tuple[str, datetime]:
129 log.debug("Latest commit")
130 loaded = self._prefetch(None)
131 updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
132
133 return loaded["rev"], updated
134
135 def _prefetch(self, ref: Optional[str]):
136 cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
137 if ref is not None:
138 cmd.append(ref)
139 log.debug(cmd)
140 data = subprocess.check_output(cmd)
141 loaded = json.loads(data)
142 return loaded
143
144 def prefetch(self, ref: Optional[str]) -> str:
145 print("Prefetching")
146 loaded = self._prefetch(ref)
147 return loaded["sha256"]
148
149 def as_nix(self, plugin: "Plugin") -> str:
150 return f"""fetchgit {{
151 url = "{self.uri}";
152 rev = "{plugin.commit}";
153 sha256 = "{plugin.sha256}";
154 }}"""
155
156
157class RepoGitHub(Repo):
158 def __init__(self, owner: str, repo: str, branch: str) -> None:
159 self.owner = owner
160 self.repo = repo
161 self.token = None
162 """Url to the repo"""
163 super().__init__(self.url(""), branch)
164 log.debug(
165 "Instantiating github repo owner=%s and repo=%s", self.owner, self.repo
166 )
167
168 @property
169 def name(self):
170 return self.repo
171
172 def url(self, path: str) -> str:
173 res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
174 return res
175
176 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
177 def has_submodules(self) -> bool:
178 try:
179 req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
180 urllib.request.urlopen(req, timeout=10).close()
181 except urllib.error.HTTPError as e:
182 if e.code == 404:
183 return False
184 else:
185 raise
186 return True
187
188 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
189 def latest_commit(self) -> Tuple[str, datetime]:
190 commit_url = self.url(f"commits/{self.branch}.atom")
191 log.debug("Sending request to %s", commit_url)
192 commit_req = make_request(commit_url, self.token)
193 with urllib.request.urlopen(commit_req, timeout=10) as req:
194 self._check_for_redirect(commit_url, req)
195 xml = req.read()
196
197 # Filter out illegal XML characters
198 illegal_xml_regex = re.compile(b"[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F]")
199 xml = illegal_xml_regex.sub(b"", xml)
200
201 root = ET.fromstring(xml)
202 latest_entry = root.find(ATOM_ENTRY)
203 assert latest_entry is not None, f"No commits found in repository {self}"
204 commit_link = latest_entry.find(ATOM_LINK)
205 assert commit_link is not None, f"No link tag found feed entry {xml}"
206 url = urlparse(commit_link.get("href"))
207 updated_tag = latest_entry.find(ATOM_UPDATED)
208 assert (
209 updated_tag is not None and updated_tag.text is not None
210 ), f"No updated tag found feed entry {xml}"
211 updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
212 return Path(str(url.path)).name, updated
213
214 def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
215 response_url = req.geturl()
216 if url != response_url:
217 new_owner, new_name = (
218 urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
219 )
220
221 new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
222 self.redirect = new_repo
223
224 def prefetch(self, commit: str) -> str:
225 if self.has_submodules():
226 sha256 = super().prefetch(commit)
227 else:
228 sha256 = self.prefetch_github(commit)
229 return sha256
230
231 def prefetch_github(self, ref: str) -> str:
232 cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
233 log.debug("Running %s", cmd)
234 data = subprocess.check_output(cmd)
235 return data.strip().decode("utf-8")
236
237 def as_nix(self, plugin: "Plugin") -> str:
238 if plugin.has_submodules:
239 submodule_attr = "\n fetchSubmodules = true;"
240 else:
241 submodule_attr = ""
242
243 return f"""fetchFromGitHub {{
244 owner = "{self.owner}";
245 repo = "{self.repo}";
246 rev = "{plugin.commit}";
247 sha256 = "{plugin.sha256}";{submodule_attr}
248 }}"""
249
250
251@dataclass(frozen=True)
252class PluginDesc:
253 repo: Repo
254 branch: str
255 alias: Optional[str]
256
257 @property
258 def name(self):
259 if self.alias is None:
260 return self.repo.name
261 else:
262 return self.alias
263
264 def __lt__(self, other):
265 return self.repo.name < other.repo.name
266
267 @staticmethod
268 def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
269 branch = row["branch"]
270 repo = make_repo(row["repo"], branch.strip())
271 repo.token = config.github_token
272 return PluginDesc(repo, branch.strip(), row["alias"])
273
274 @staticmethod
275 def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
276 branch = "HEAD"
277 alias = None
278 uri = line
279 if " as " in uri:
280 uri, alias = uri.split(" as ")
281 alias = alias.strip()
282 if "@" in uri:
283 uri, branch = uri.split("@")
284 repo = make_repo(uri.strip(), branch.strip())
285 repo.token = config.github_token
286 return PluginDesc(repo, branch.strip(), alias)
287
288
289@dataclass
290class Plugin:
291 name: str
292 commit: str
293 has_submodules: bool
294 sha256: str
295 date: Optional[datetime] = None
296
297 @property
298 def normalized_name(self) -> str:
299 return self.name.replace(".", "-")
300
301 @property
302 def version(self) -> str:
303 assert self.date is not None
304 return self.date.strftime("%Y-%m-%d")
305
306 def as_json(self) -> Dict[str, str]:
307 copy = self.__dict__.copy()
308 del copy["date"]
309 return copy
310
311
312def load_plugins_from_csv(
313 config: FetchConfig,
314 input_file: Path,
315) -> List[PluginDesc]:
316 log.debug("Load plugins from csv %s", input_file)
317 plugins = []
318 with open(input_file, newline="") as csvfile:
319 log.debug("Writing into %s", input_file)
320 reader = csv.DictReader(
321 csvfile,
322 )
323 for line in reader:
324 plugin = PluginDesc.load_from_csv(config, line)
325 plugins.append(plugin)
326
327 return plugins
328
329
330
331def run_nix_expr(expr, nixpkgs: str):
332 '''
333 :param expr nix expression to fetch current plugins
334 :param nixpkgs Path towards a nixpkgs checkout
335 '''
336 with CleanEnvironment(nixpkgs) as nix_path:
337 cmd = [
338 "nix",
339 "eval",
340 "--extra-experimental-features",
341 "nix-command",
342 "--impure",
343 "--json",
344 "--expr",
345 expr,
346 "--nix-path",
347 nix_path,
348 ]
349 log.debug("Running command: %s", " ".join(cmd))
350 out = subprocess.check_output(cmd, timeout=90)
351 data = json.loads(out)
352 return data
353
354
355class Editor:
356 """The configuration of the update script."""
357
358 def __init__(
359 self,
360 name: str,
361 root: Path,
362 get_plugins: str,
363 default_in: Optional[Path] = None,
364 default_out: Optional[Path] = None,
365 deprecated: Optional[Path] = None,
366 cache_file: Optional[str] = None,
367 ):
368 log.debug("get_plugins:", get_plugins)
369 self.name = name
370 self.root = root
371 self.get_plugins = get_plugins
372 self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
373 self.default_out = default_out or root.joinpath("generated.nix")
374 self.deprecated = deprecated or root.joinpath("deprecated.json")
375 self.cache_file = cache_file or f"{name}-plugin-cache.json"
376 self.nixpkgs_repo = None
377
378 def add(self, args):
379 """CSV spec"""
380 log.debug("called the 'add' command")
381 fetch_config = FetchConfig(args.proc, args.github_token)
382 editor = self
383 for plugin_line in args.add_plugins:
384 log.debug("using plugin_line", plugin_line)
385 pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
386 log.debug("loaded as pdesc", pdesc)
387 append = [pdesc]
388 editor.rewrite_input(
389 fetch_config, args.input_file, editor.deprecated, append=append
390 )
391 plugin, _ = prefetch_plugin(
392 pdesc,
393 )
394 autocommit = not args.no_commit
395 if autocommit:
396 commit(
397 editor.nixpkgs_repo,
398 "{drv_name}: init at {version}".format(
399 drv_name=editor.get_drv_name(plugin.normalized_name),
400 version=plugin.version,
401 ),
402 [args.outfile, args.input_file],
403 )
404
405 # Expects arguments generated by 'update' subparser
406 def update(self, args):
407 """CSV spec"""
408 print("the update member function should be overriden in subclasses")
409
410 def get_current_plugins(self, nixpkgs) -> List[Plugin]:
411 """To fill the cache"""
412 data = run_nix_expr(self.get_plugins, nixpkgs)
413 plugins = []
414 for name, attr in data.items():
415 p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
416 plugins.append(p)
417 return plugins
418
419 def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
420 """CSV spec"""
421 return load_plugins_from_csv(config, plugin_file)
422
423 def generate_nix(self, _plugins, _outfile: str):
424 """Returns nothing for now, writes directly to outfile"""
425 raise NotImplementedError()
426
427 def get_update(self, input_file: str, outfile: str, config: FetchConfig):
428 cache: Cache = Cache(self.get_current_plugins(self.nixpkgs), self.cache_file)
429 _prefetch = functools.partial(prefetch, cache=cache)
430
431 def update() -> dict:
432 plugins = self.load_plugin_spec(config, input_file)
433
434 try:
435 pool = Pool(processes=config.proc)
436 results = pool.map(_prefetch, plugins)
437 finally:
438 cache.store()
439
440 plugins, redirects = check_results(results)
441
442 self.generate_nix(plugins, outfile)
443
444 return redirects
445
446 return update
447
448 @property
449 def attr_path(self):
450 return self.name + "Plugins"
451
452 def get_drv_name(self, name: str):
453 return self.attr_path + "." + name
454
455 def rewrite_input(self, *args, **kwargs):
456 return rewrite_input(*args, **kwargs)
457
458 def create_parser(self):
459 common = argparse.ArgumentParser(
460 add_help=False,
461 description=(
462 f"""
463 Updates nix derivations for {self.name} plugins.\n
464 By default from {self.default_in} to {self.default_out}"""
465 ),
466 )
467 common.add_argument(
468 "--nixpkgs",
469 type=str,
470 default=os.getcwd(),
471 help="Adjust log level",
472 )
473 common.add_argument(
474 "--input-names",
475 "-i",
476 dest="input_file",
477 type=Path,
478 default=self.default_in,
479 help="A list of plugins in the form owner/repo",
480 )
481 common.add_argument(
482 "--out",
483 "-o",
484 dest="outfile",
485 default=self.default_out,
486 type=Path,
487 help="Filename to save generated nix code",
488 )
489 common.add_argument(
490 "--proc",
491 "-p",
492 dest="proc",
493 type=int,
494 default=30,
495 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
496 )
497 common.add_argument(
498 "--github-token",
499 "-t",
500 type=str,
501 default=os.getenv("GITHUB_API_TOKEN"),
502 help="""Allows to set --proc to higher values.
503 Uses GITHUB_API_TOKEN environment variables as the default value.""",
504 )
505 common.add_argument(
506 "--no-commit",
507 "-n",
508 action="store_true",
509 default=False,
510 help="Whether to autocommit changes",
511 )
512 common.add_argument(
513 "--debug",
514 "-d",
515 choices=LOG_LEVELS.keys(),
516 default=logging.getLevelName(logging.WARN),
517 help="Adjust log level",
518 )
519
520 main = argparse.ArgumentParser(
521 parents=[common],
522 description=(
523 f"""
524 Updates nix derivations for {self.name} plugins.\n
525 By default from {self.default_in} to {self.default_out}"""
526 ),
527 )
528
529 subparsers = main.add_subparsers(dest="command", required=False)
530 padd = subparsers.add_parser(
531 "add",
532 parents=[],
533 description="Add new plugin",
534 add_help=False,
535 )
536 padd.set_defaults(func=self.add)
537 padd.add_argument(
538 "add_plugins",
539 default=None,
540 nargs="+",
541 help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
542 )
543
544 pupdate = subparsers.add_parser(
545 "update",
546 description="Update all or a subset of existing plugins",
547 add_help=False,
548 )
549 pupdate.set_defaults(func=self.update)
550 return main
551
552 def run(
553 self,
554 ):
555 """
556 Convenience function
557 """
558 parser = self.create_parser()
559 args = parser.parse_args()
560 command = args.command or "update"
561 log.setLevel(LOG_LEVELS[args.debug])
562 log.info("Chose to run command: %s", command)
563 self.nixpkgs = args.nixpkgs
564
565 self.nixpkgs_repo = git.Repo(args.nixpkgs, search_parent_directories=True)
566
567 getattr(self, command)(args)
568
569
570class CleanEnvironment(object):
571 def __init__(self, nixpkgs):
572 self.local_pkgs = nixpkgs
573
574 def __enter__(self) -> str:
575 """
576 local_pkgs = str(Path(__file__).parent.parent.parent)
577 """
578 self.old_environ = os.environ.copy()
579 self.empty_config = NamedTemporaryFile()
580 self.empty_config.write(b"{}")
581 self.empty_config.flush()
582 return f"localpkgs={self.local_pkgs}"
583
584 def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
585 os.environ.update(self.old_environ)
586 self.empty_config.close()
587
588
589def prefetch_plugin(
590 p: PluginDesc,
591 cache: "Optional[Cache]" = None,
592) -> Tuple[Plugin, Optional[Repo]]:
593 repo, branch, alias = p.repo, p.branch, p.alias
594 name = alias or p.repo.name
595 commit = None
596 log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
597 commit, date = repo.latest_commit()
598 cached_plugin = cache[commit] if cache else None
599 if cached_plugin is not None:
600 log.debug("Cache hit !")
601 cached_plugin.name = name
602 cached_plugin.date = date
603 return cached_plugin, repo.redirect
604
605 has_submodules = repo.has_submodules()
606 log.debug(f"prefetch {name}")
607 sha256 = repo.prefetch(commit)
608
609 return (
610 Plugin(name, commit, has_submodules, sha256, date=date),
611 repo.redirect,
612 )
613
614
615def print_download_error(plugin: PluginDesc, ex: Exception):
616 print(f"{plugin}: {ex}", file=sys.stderr)
617 ex_traceback = ex.__traceback__
618 tb_lines = [
619 line.rstrip("\n")
620 for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
621 ]
622 print("\n".join(tb_lines))
623
624
625def check_results(
626 results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
627) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
628 """ """
629 failures: List[Tuple[PluginDesc, Exception]] = []
630 plugins = []
631 redirects: Redirects = {}
632 for pdesc, result, redirect in results:
633 if isinstance(result, Exception):
634 failures.append((pdesc, result))
635 else:
636 new_pdesc = pdesc
637 if redirect is not None:
638 redirects.update({pdesc: redirect})
639 new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
640 plugins.append((new_pdesc, result))
641
642 print(f"{len(results) - len(failures)} plugins were checked", end="")
643 if len(failures) == 0:
644 print()
645 return plugins, redirects
646 else:
647 print(f", {len(failures)} plugin(s) could not be downloaded:\n")
648
649 for plugin, exception in failures:
650 print_download_error(plugin, exception)
651
652 sys.exit(1)
653
654
655def make_repo(uri: str, branch) -> Repo:
656 """Instantiate a Repo with the correct specialization depending on server (gitub spec)"""
657 # dumb check to see if it's of the form owner/repo (=> github) or https://...
658 res = urlparse(uri)
659 if res.netloc in ["github.com", ""]:
660 res = res.path.strip("/").split("/")
661 repo = RepoGitHub(res[0], res[1], branch)
662 else:
663 repo = Repo(uri.strip(), branch)
664 return repo
665
666
667def get_cache_path(cache_file_name: str) -> Optional[Path]:
668 xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
669 if xdg_cache is None:
670 home = os.environ.get("HOME", None)
671 if home is None:
672 return None
673 xdg_cache = str(Path(home, ".cache"))
674
675 return Path(xdg_cache, cache_file_name)
676
677
678class Cache:
679 def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
680 self.cache_file = get_cache_path(cache_file_name)
681
682 downloads = {}
683 for plugin in initial_plugins:
684 downloads[plugin.commit] = plugin
685 downloads.update(self.load())
686 self.downloads = downloads
687
688 def load(self) -> Dict[str, Plugin]:
689 if self.cache_file is None or not self.cache_file.exists():
690 return {}
691
692 downloads: Dict[str, Plugin] = {}
693 with open(self.cache_file) as f:
694 data = json.load(f)
695 for attr in data.values():
696 p = Plugin(
697 attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
698 )
699 downloads[attr["commit"]] = p
700 return downloads
701
702 def store(self) -> None:
703 if self.cache_file is None:
704 return
705
706 os.makedirs(self.cache_file.parent, exist_ok=True)
707 with open(self.cache_file, "w+") as f:
708 data = {}
709 for name, attr in self.downloads.items():
710 data[name] = attr.as_json()
711 json.dump(data, f, indent=4, sort_keys=True)
712
713 def __getitem__(self, key: str) -> Optional[Plugin]:
714 return self.downloads.get(key, None)
715
716 def __setitem__(self, key: str, value: Plugin) -> None:
717 self.downloads[key] = value
718
719
720def prefetch(
721 pluginDesc: PluginDesc, cache: Cache
722) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
723 try:
724 plugin, redirect = prefetch_plugin(pluginDesc, cache)
725 cache[plugin.commit] = plugin
726 return (pluginDesc, plugin, redirect)
727 except Exception as e:
728 return (pluginDesc, e, None)
729
730
731def rewrite_input(
732 config: FetchConfig,
733 input_file: Path,
734 deprecated: Path,
735 # old pluginDesc and the new
736 redirects: Redirects = {},
737 append: List[PluginDesc] = [],
738):
739 plugins = load_plugins_from_csv(
740 config,
741 input_file,
742 )
743
744 plugins.extend(append)
745
746 if redirects:
747 cur_date_iso = datetime.now().strftime("%Y-%m-%d")
748 with open(deprecated, "r") as f:
749 deprecations = json.load(f)
750 for pdesc, new_repo in redirects.items():
751 new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
752 old_plugin, _ = prefetch_plugin(pdesc)
753 new_plugin, _ = prefetch_plugin(new_pdesc)
754 if old_plugin.normalized_name != new_plugin.normalized_name:
755 deprecations[old_plugin.normalized_name] = {
756 "new": new_plugin.normalized_name,
757 "date": cur_date_iso,
758 }
759 with open(deprecated, "w") as f:
760 json.dump(deprecations, f, indent=4, sort_keys=True)
761 f.write("\n")
762
763 with open(input_file, "w") as f:
764 log.debug("Writing into %s", input_file)
765 # fields = dataclasses.fields(PluginDesc)
766 fieldnames = ["repo", "branch", "alias"]
767 writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
768 writer.writeheader()
769 for plugin in sorted(plugins):
770 writer.writerow(asdict(plugin))
771
772
773def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
774 repo.index.add([str(f.resolve()) for f in files])
775
776 if repo.index.diff("HEAD"):
777 print(f'committing to nixpkgs "{message}"')
778 repo.index.commit(message)
779 else:
780 print("no changes in working tree to commit")
781
782
783def update_plugins(editor: Editor, args):
784 """The main entry function of this module.
785 All input arguments are grouped in the `Editor`."""
786
787 log.info("Start updating plugins")
788 fetch_config = FetchConfig(args.proc, args.github_token)
789 update = editor.get_update(args.input_file, args.outfile, fetch_config)
790
791 redirects = update()
792 editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
793
794 autocommit = not args.no_commit
795
796 if autocommit:
797 try:
798 repo = git.Repo(os.getcwd())
799 updated = datetime.now(tz=UTC).strftime('%Y-%m-%d')
800 print(args.outfile)
801 commit(repo,
802 f"{editor.attr_path}: update on {updated}", [args.outfile]
803 )
804 except git.InvalidGitRepositoryError as e:
805 print(f"Not in a git repository: {e}", file=sys.stderr)
806 sys.exit(1)
807
808 if redirects:
809 update()
810 if autocommit:
811 commit(
812 editor.nixpkgs_repo,
813 f"{editor.attr_path}: resolve github repository redirects",
814 [args.outfile, args.input_file, editor.deprecated],
815 )