1# python library used to update plugins:
2# - pkgs/applications/editors/vim/plugins/update.py
3# - pkgs/applications/editors/kakoune/plugins/update.py
4# - maintainers/scripts/update-luarocks-packages
5
6# format:
7# $ nix run nixpkgs#black maintainers/scripts/pluginupdate.py
8# type-check:
9# $ nix run nixpkgs#python3.pkgs.mypy maintainers/scripts/pluginupdate.py
10# linted:
11# $ nix run nixpkgs#python3.pkgs.flake8 -- --ignore E501,E265 maintainers/scripts/pluginupdate.py
12
13import argparse
14import csv
15import functools
16import http
17import json
18import logging
19import os
20import subprocess
21import sys
22import time
23import traceback
24import urllib.error
25import urllib.parse
26import urllib.request
27import xml.etree.ElementTree as ET
28from dataclasses import asdict, dataclass
29from datetime import UTC, datetime
30from functools import wraps
31from multiprocessing.dummy import Pool
32from pathlib import Path
33from tempfile import NamedTemporaryFile
34from typing import Any, Callable, Dict, List, Optional, Tuple, Union
35from urllib.parse import urljoin, urlparse
36
37import git
38
39ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
40ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
41ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
42
43LOG_LEVELS = {
44 logging.getLevelName(level): level
45 for level in [logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR]
46}
47
48log = logging.getLogger()
49
50
51def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
52 """Retry calling the decorated function using an exponential backoff.
53 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
54 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
55 (BSD licensed)
56 :param ExceptionToCheck: the exception on which to retry
57 :param tries: number of times to try (not retry) before giving up
58 :param delay: initial delay between retries in seconds
59 :param backoff: backoff multiplier e.g. value of 2 will double the delay
60 each retry
61 """
62
63 def deco_retry(f: Callable) -> Callable:
64 @wraps(f)
65 def f_retry(*args: Any, **kwargs: Any) -> Any:
66 mtries, mdelay = tries, delay
67 while mtries > 1:
68 try:
69 return f(*args, **kwargs)
70 except ExceptionToCheck as e:
71 print(f"{str(e)}, Retrying in {mdelay} seconds...")
72 time.sleep(mdelay)
73 mtries -= 1
74 mdelay *= backoff
75 return f(*args, **kwargs)
76
77 return f_retry # true decorator
78
79 return deco_retry
80
81
82@dataclass
83class FetchConfig:
84 proc: int
85 github_token: str
86
87
88def make_request(url: str, token=None) -> urllib.request.Request:
89 headers = {}
90 if token is not None:
91 headers["Authorization"] = f"token {token}"
92 return urllib.request.Request(url, headers=headers)
93
94
95# a dictionary of plugins and their new repositories
96Redirects = Dict["PluginDesc", "Repo"]
97
98
99class Repo:
100 def __init__(self, uri: str, branch: str) -> None:
101 self.uri = uri
102 """Url to the repo"""
103 self._branch = branch
104 # Redirect is the new Repo to use
105 self.redirect: Optional["Repo"] = None
106 self.token = "dummy_token"
107
108 @property
109 def name(self):
110 return self.uri.split("/")[-1]
111
112 @property
113 def branch(self):
114 return self._branch or "HEAD"
115
116 def __str__(self) -> str:
117 return f"{self.uri}"
118
119 def __repr__(self) -> str:
120 return f"Repo({self.name}, {self.uri})"
121
122 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
123 def has_submodules(self) -> bool:
124 return True
125
126 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
127 def latest_commit(self) -> Tuple[str, datetime]:
128 log.debug("Latest commit")
129 loaded = self._prefetch(None)
130 updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
131
132 return loaded["rev"], updated
133
134 def _prefetch(self, ref: Optional[str]):
135 cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
136 if ref is not None:
137 cmd.append(ref)
138 log.debug(cmd)
139 data = subprocess.check_output(cmd)
140 loaded = json.loads(data)
141 return loaded
142
143 def prefetch(self, ref: Optional[str]) -> str:
144 print("Prefetching")
145 loaded = self._prefetch(ref)
146 return loaded["sha256"]
147
148 def as_nix(self, plugin: "Plugin") -> str:
149 return f"""fetchgit {{
150 url = "{self.uri}";
151 rev = "{plugin.commit}";
152 sha256 = "{plugin.sha256}";
153 }}"""
154
155
156class RepoGitHub(Repo):
157 def __init__(self, owner: str, repo: str, branch: str) -> None:
158 self.owner = owner
159 self.repo = repo
160 self.token = None
161 """Url to the repo"""
162 super().__init__(self.url(""), branch)
163 log.debug(
164 "Instantiating github repo owner=%s and repo=%s", self.owner, self.repo
165 )
166
167 @property
168 def name(self):
169 return self.repo
170
171 def url(self, path: str) -> str:
172 res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
173 return res
174
175 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
176 def has_submodules(self) -> bool:
177 try:
178 req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
179 urllib.request.urlopen(req, timeout=10).close()
180 except urllib.error.HTTPError as e:
181 if e.code == 404:
182 return False
183 else:
184 raise
185 return True
186
187 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
188 def latest_commit(self) -> Tuple[str, datetime]:
189 commit_url = self.url(f"commits/{self.branch}.atom")
190 log.debug("Sending request to %s", commit_url)
191 commit_req = make_request(commit_url, self.token)
192 with urllib.request.urlopen(commit_req, timeout=10) as req:
193 self._check_for_redirect(commit_url, req)
194 xml = req.read()
195 root = ET.fromstring(xml)
196 latest_entry = root.find(ATOM_ENTRY)
197 assert latest_entry is not None, f"No commits found in repository {self}"
198 commit_link = latest_entry.find(ATOM_LINK)
199 assert commit_link is not None, f"No link tag found feed entry {xml}"
200 url = urlparse(commit_link.get("href"))
201 updated_tag = latest_entry.find(ATOM_UPDATED)
202 assert (
203 updated_tag is not None and updated_tag.text is not None
204 ), f"No updated tag found feed entry {xml}"
205 updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
206 return Path(str(url.path)).name, updated
207
208 def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
209 response_url = req.geturl()
210 if url != response_url:
211 new_owner, new_name = (
212 urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
213 )
214
215 new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
216 self.redirect = new_repo
217
218 def prefetch(self, commit: str) -> str:
219 if self.has_submodules():
220 sha256 = super().prefetch(commit)
221 else:
222 sha256 = self.prefetch_github(commit)
223 return sha256
224
225 def prefetch_github(self, ref: str) -> str:
226 cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
227 log.debug("Running %s", cmd)
228 data = subprocess.check_output(cmd)
229 return data.strip().decode("utf-8")
230
231 def as_nix(self, plugin: "Plugin") -> str:
232 if plugin.has_submodules:
233 submodule_attr = "\n fetchSubmodules = true;"
234 else:
235 submodule_attr = ""
236
237 return f"""fetchFromGitHub {{
238 owner = "{self.owner}";
239 repo = "{self.repo}";
240 rev = "{plugin.commit}";
241 sha256 = "{plugin.sha256}";{submodule_attr}
242 }}"""
243
244
245@dataclass(frozen=True)
246class PluginDesc:
247 repo: Repo
248 branch: str
249 alias: Optional[str]
250
251 @property
252 def name(self):
253 if self.alias is None:
254 return self.repo.name
255 else:
256 return self.alias
257
258 def __lt__(self, other):
259 return self.repo.name < other.repo.name
260
261 @staticmethod
262 def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
263 branch = row["branch"]
264 repo = make_repo(row["repo"], branch.strip())
265 repo.token = config.github_token
266 return PluginDesc(repo, branch.strip(), row["alias"])
267
268 @staticmethod
269 def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
270 branch = "HEAD"
271 alias = None
272 uri = line
273 if " as " in uri:
274 uri, alias = uri.split(" as ")
275 alias = alias.strip()
276 if "@" in uri:
277 uri, branch = uri.split("@")
278 repo = make_repo(uri.strip(), branch.strip())
279 repo.token = config.github_token
280 return PluginDesc(repo, branch.strip(), alias)
281
282
283@dataclass
284class Plugin:
285 name: str
286 commit: str
287 has_submodules: bool
288 sha256: str
289 date: Optional[datetime] = None
290
291 @property
292 def normalized_name(self) -> str:
293 return self.name.replace(".", "-")
294
295 @property
296 def version(self) -> str:
297 assert self.date is not None
298 return self.date.strftime("%Y-%m-%d")
299
300 def as_json(self) -> Dict[str, str]:
301 copy = self.__dict__.copy()
302 del copy["date"]
303 return copy
304
305
306def load_plugins_from_csv(
307 config: FetchConfig,
308 input_file: Path,
309) -> List[PluginDesc]:
310 log.debug("Load plugins from csv %s", input_file)
311 plugins = []
312 with open(input_file, newline="") as csvfile:
313 log.debug("Writing into %s", input_file)
314 reader = csv.DictReader(
315 csvfile,
316 )
317 for line in reader:
318 plugin = PluginDesc.load_from_csv(config, line)
319 plugins.append(plugin)
320
321 return plugins
322
323
324
325def run_nix_expr(expr, nixpkgs: str):
326 '''
327 :param expr nix expression to fetch current plugins
328 :param nixpkgs Path towards a nixpkgs checkout
329 '''
330 with CleanEnvironment(nixpkgs) as nix_path:
331 cmd = [
332 "nix",
333 "eval",
334 "--extra-experimental-features",
335 "nix-command",
336 "--impure",
337 "--json",
338 "--expr",
339 expr,
340 "--nix-path",
341 nix_path,
342 ]
343 log.debug("Running command: %s", " ".join(cmd))
344 out = subprocess.check_output(cmd, timeout=90)
345 data = json.loads(out)
346 return data
347
348
349class Editor:
350 """The configuration of the update script."""
351
352 def __init__(
353 self,
354 name: str,
355 root: Path,
356 get_plugins: str,
357 default_in: Optional[Path] = None,
358 default_out: Optional[Path] = None,
359 deprecated: Optional[Path] = None,
360 cache_file: Optional[str] = None,
361 ):
362 log.debug("get_plugins:", get_plugins)
363 self.name = name
364 self.root = root
365 self.get_plugins = get_plugins
366 self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
367 self.default_out = default_out or root.joinpath("generated.nix")
368 self.deprecated = deprecated or root.joinpath("deprecated.json")
369 self.cache_file = cache_file or f"{name}-plugin-cache.json"
370 self.nixpkgs_repo = None
371
372 def add(self, args):
373 """CSV spec"""
374 log.debug("called the 'add' command")
375 fetch_config = FetchConfig(args.proc, args.github_token)
376 editor = self
377 for plugin_line in args.add_plugins:
378 log.debug("using plugin_line", plugin_line)
379 pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
380 log.debug("loaded as pdesc", pdesc)
381 append = [pdesc]
382 editor.rewrite_input(
383 fetch_config, args.input_file, editor.deprecated, append=append
384 )
385 plugin, _ = prefetch_plugin(
386 pdesc,
387 )
388 autocommit = not args.no_commit
389 if autocommit:
390 commit(
391 editor.nixpkgs_repo,
392 "{drv_name}: init at {version}".format(
393 drv_name=editor.get_drv_name(plugin.normalized_name),
394 version=plugin.version,
395 ),
396 [args.outfile, args.input_file],
397 )
398
399 # Expects arguments generated by 'update' subparser
400 def update(self, args):
401 """CSV spec"""
402 print("the update member function should be overriden in subclasses")
403
404 def get_current_plugins(self, nixpkgs) -> List[Plugin]:
405 """To fill the cache"""
406 data = run_nix_expr(self.get_plugins, nixpkgs)
407 plugins = []
408 for name, attr in data.items():
409 p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
410 plugins.append(p)
411 return plugins
412
413 def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
414 """CSV spec"""
415 return load_plugins_from_csv(config, plugin_file)
416
417 def generate_nix(self, _plugins, _outfile: str):
418 """Returns nothing for now, writes directly to outfile"""
419 raise NotImplementedError()
420
421 def get_update(self, input_file: str, outfile: str, config: FetchConfig):
422 cache: Cache = Cache(self.get_current_plugins(self.nixpkgs), self.cache_file)
423 _prefetch = functools.partial(prefetch, cache=cache)
424
425 def update() -> dict:
426 plugins = self.load_plugin_spec(config, input_file)
427
428 try:
429 pool = Pool(processes=config.proc)
430 results = pool.map(_prefetch, plugins)
431 finally:
432 cache.store()
433
434 plugins, redirects = check_results(results)
435
436 self.generate_nix(plugins, outfile)
437
438 return redirects
439
440 return update
441
442 @property
443 def attr_path(self):
444 return self.name + "Plugins"
445
446 def get_drv_name(self, name: str):
447 return self.attr_path + "." + name
448
449 def rewrite_input(self, *args, **kwargs):
450 return rewrite_input(*args, **kwargs)
451
452 def create_parser(self):
453 common = argparse.ArgumentParser(
454 add_help=False,
455 description=(
456 f"""
457 Updates nix derivations for {self.name} plugins.\n
458 By default from {self.default_in} to {self.default_out}"""
459 ),
460 )
461 common.add_argument(
462 "--nixpkgs",
463 type=str,
464 default=os.getcwd(),
465 help="Adjust log level",
466 )
467 common.add_argument(
468 "--input-names",
469 "-i",
470 dest="input_file",
471 type=Path,
472 default=self.default_in,
473 help="A list of plugins in the form owner/repo",
474 )
475 common.add_argument(
476 "--out",
477 "-o",
478 dest="outfile",
479 default=self.default_out,
480 type=Path,
481 help="Filename to save generated nix code",
482 )
483 common.add_argument(
484 "--proc",
485 "-p",
486 dest="proc",
487 type=int,
488 default=30,
489 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
490 )
491 common.add_argument(
492 "--github-token",
493 "-t",
494 type=str,
495 default=os.getenv("GITHUB_API_TOKEN"),
496 help="""Allows to set --proc to higher values.
497 Uses GITHUB_API_TOKEN environment variables as the default value.""",
498 )
499 common.add_argument(
500 "--no-commit",
501 "-n",
502 action="store_true",
503 default=False,
504 help="Whether to autocommit changes",
505 )
506 common.add_argument(
507 "--debug",
508 "-d",
509 choices=LOG_LEVELS.keys(),
510 default=logging.getLevelName(logging.WARN),
511 help="Adjust log level",
512 )
513
514 main = argparse.ArgumentParser(
515 parents=[common],
516 description=(
517 f"""
518 Updates nix derivations for {self.name} plugins.\n
519 By default from {self.default_in} to {self.default_out}"""
520 ),
521 )
522
523 subparsers = main.add_subparsers(dest="command", required=False)
524 padd = subparsers.add_parser(
525 "add",
526 parents=[],
527 description="Add new plugin",
528 add_help=False,
529 )
530 padd.set_defaults(func=self.add)
531 padd.add_argument(
532 "add_plugins",
533 default=None,
534 nargs="+",
535 help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
536 )
537
538 pupdate = subparsers.add_parser(
539 "update",
540 description="Update all or a subset of existing plugins",
541 add_help=False,
542 )
543 pupdate.set_defaults(func=self.update)
544 return main
545
546 def run(
547 self,
548 ):
549 """
550 Convenience function
551 """
552 parser = self.create_parser()
553 args = parser.parse_args()
554 command = args.command or "update"
555 log.setLevel(LOG_LEVELS[args.debug])
556 log.info("Chose to run command: %s", command)
557 self.nixpkgs = args.nixpkgs
558
559 self.nixpkgs_repo = git.Repo(args.nixpkgs, search_parent_directories=True)
560
561 getattr(self, command)(args)
562
563
564class CleanEnvironment(object):
565 def __init__(self, nixpkgs):
566 self.local_pkgs = nixpkgs
567
568 def __enter__(self) -> str:
569 """
570 local_pkgs = str(Path(__file__).parent.parent.parent)
571 """
572 self.old_environ = os.environ.copy()
573 self.empty_config = NamedTemporaryFile()
574 self.empty_config.write(b"{}")
575 self.empty_config.flush()
576 return f"localpkgs={self.local_pkgs}"
577
578 def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
579 os.environ.update(self.old_environ)
580 self.empty_config.close()
581
582
583def prefetch_plugin(
584 p: PluginDesc,
585 cache: "Optional[Cache]" = None,
586) -> Tuple[Plugin, Optional[Repo]]:
587 repo, branch, alias = p.repo, p.branch, p.alias
588 name = alias or p.repo.name
589 commit = None
590 log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
591 commit, date = repo.latest_commit()
592 cached_plugin = cache[commit] if cache else None
593 if cached_plugin is not None:
594 log.debug("Cache hit !")
595 cached_plugin.name = name
596 cached_plugin.date = date
597 return cached_plugin, repo.redirect
598
599 has_submodules = repo.has_submodules()
600 log.debug(f"prefetch {name}")
601 sha256 = repo.prefetch(commit)
602
603 return (
604 Plugin(name, commit, has_submodules, sha256, date=date),
605 repo.redirect,
606 )
607
608
609def print_download_error(plugin: PluginDesc, ex: Exception):
610 print(f"{plugin}: {ex}", file=sys.stderr)
611 ex_traceback = ex.__traceback__
612 tb_lines = [
613 line.rstrip("\n")
614 for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
615 ]
616 print("\n".join(tb_lines))
617
618
619def check_results(
620 results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
621) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
622 """ """
623 failures: List[Tuple[PluginDesc, Exception]] = []
624 plugins = []
625 redirects: Redirects = {}
626 for pdesc, result, redirect in results:
627 if isinstance(result, Exception):
628 failures.append((pdesc, result))
629 else:
630 new_pdesc = pdesc
631 if redirect is not None:
632 redirects.update({pdesc: redirect})
633 new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
634 plugins.append((new_pdesc, result))
635
636 print(f"{len(results) - len(failures)} plugins were checked", end="")
637 if len(failures) == 0:
638 print()
639 return plugins, redirects
640 else:
641 print(f", {len(failures)} plugin(s) could not be downloaded:\n")
642
643 for plugin, exception in failures:
644 print_download_error(plugin, exception)
645
646 sys.exit(1)
647
648
649def make_repo(uri: str, branch) -> Repo:
650 """Instantiate a Repo with the correct specialization depending on server (gitub spec)"""
651 # dumb check to see if it's of the form owner/repo (=> github) or https://...
652 res = urlparse(uri)
653 if res.netloc in ["github.com", ""]:
654 res = res.path.strip("/").split("/")
655 repo = RepoGitHub(res[0], res[1], branch)
656 else:
657 repo = Repo(uri.strip(), branch)
658 return repo
659
660
661def get_cache_path(cache_file_name: str) -> Optional[Path]:
662 xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
663 if xdg_cache is None:
664 home = os.environ.get("HOME", None)
665 if home is None:
666 return None
667 xdg_cache = str(Path(home, ".cache"))
668
669 return Path(xdg_cache, cache_file_name)
670
671
672class Cache:
673 def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
674 self.cache_file = get_cache_path(cache_file_name)
675
676 downloads = {}
677 for plugin in initial_plugins:
678 downloads[plugin.commit] = plugin
679 downloads.update(self.load())
680 self.downloads = downloads
681
682 def load(self) -> Dict[str, Plugin]:
683 if self.cache_file is None or not self.cache_file.exists():
684 return {}
685
686 downloads: Dict[str, Plugin] = {}
687 with open(self.cache_file) as f:
688 data = json.load(f)
689 for attr in data.values():
690 p = Plugin(
691 attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
692 )
693 downloads[attr["commit"]] = p
694 return downloads
695
696 def store(self) -> None:
697 if self.cache_file is None:
698 return
699
700 os.makedirs(self.cache_file.parent, exist_ok=True)
701 with open(self.cache_file, "w+") as f:
702 data = {}
703 for name, attr in self.downloads.items():
704 data[name] = attr.as_json()
705 json.dump(data, f, indent=4, sort_keys=True)
706
707 def __getitem__(self, key: str) -> Optional[Plugin]:
708 return self.downloads.get(key, None)
709
710 def __setitem__(self, key: str, value: Plugin) -> None:
711 self.downloads[key] = value
712
713
714def prefetch(
715 pluginDesc: PluginDesc, cache: Cache
716) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
717 try:
718 plugin, redirect = prefetch_plugin(pluginDesc, cache)
719 cache[plugin.commit] = plugin
720 return (pluginDesc, plugin, redirect)
721 except Exception as e:
722 return (pluginDesc, e, None)
723
724
725def rewrite_input(
726 config: FetchConfig,
727 input_file: Path,
728 deprecated: Path,
729 # old pluginDesc and the new
730 redirects: Redirects = {},
731 append: List[PluginDesc] = [],
732):
733 plugins = load_plugins_from_csv(
734 config,
735 input_file,
736 )
737
738 plugins.extend(append)
739
740 if redirects:
741 cur_date_iso = datetime.now().strftime("%Y-%m-%d")
742 with open(deprecated, "r") as f:
743 deprecations = json.load(f)
744 for pdesc, new_repo in redirects.items():
745 new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
746 old_plugin, _ = prefetch_plugin(pdesc)
747 new_plugin, _ = prefetch_plugin(new_pdesc)
748 if old_plugin.normalized_name != new_plugin.normalized_name:
749 deprecations[old_plugin.normalized_name] = {
750 "new": new_plugin.normalized_name,
751 "date": cur_date_iso,
752 }
753 with open(deprecated, "w") as f:
754 json.dump(deprecations, f, indent=4, sort_keys=True)
755 f.write("\n")
756
757 with open(input_file, "w") as f:
758 log.debug("Writing into %s", input_file)
759 # fields = dataclasses.fields(PluginDesc)
760 fieldnames = ["repo", "branch", "alias"]
761 writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
762 writer.writeheader()
763 for plugin in sorted(plugins):
764 writer.writerow(asdict(plugin))
765
766
767def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
768 repo.index.add([str(f.resolve()) for f in files])
769
770 if repo.index.diff("HEAD"):
771 print(f'committing to nixpkgs "{message}"')
772 repo.index.commit(message)
773 else:
774 print("no changes in working tree to commit")
775
776
777def update_plugins(editor: Editor, args):
778 """The main entry function of this module.
779 All input arguments are grouped in the `Editor`."""
780
781 log.info("Start updating plugins")
782 fetch_config = FetchConfig(args.proc, args.github_token)
783 update = editor.get_update(args.input_file, args.outfile, fetch_config)
784
785 redirects = update()
786 editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
787
788 autocommit = not args.no_commit
789
790 if autocommit:
791 try:
792 repo = git.Repo(os.getcwd())
793 updated = datetime.now(tz=UTC).strftime('%Y-%m-%d')
794 print(args.outfile)
795 commit(repo,
796 f"{editor.attr_path}: update on {updated}", [args.outfile]
797 )
798 except git.InvalidGitRepositoryError as e:
799 print(f"Not in a git repository: {e}", file=sys.stderr)
800 sys.exit(1)
801
802 if redirects:
803 update()
804 if autocommit:
805 commit(
806 editor.nixpkgs_repo,
807 f"{editor.attr_path}: resolve github repository redirects",
808 [args.outfile, args.input_file, editor.deprecated],
809 )