1# python library used to update plugins:
2# - pkgs/applications/editors/vim/plugins/update.py
3# - pkgs/applications/editors/kakoune/plugins/update.py
4# - maintainers/scripts/update-luarocks-packages
5
6# format:
7# $ nix run nixpkgs#black maintainers/scripts/pluginupdate.py
8# type-check:
9# $ nix run nixpkgs#python3.pkgs.mypy maintainers/scripts/pluginupdate.py
10# linted:
11# $ nix run nixpkgs#python3.pkgs.flake8 -- --ignore E501,E265 maintainers/scripts/pluginupdate.py
12
13import argparse
14import csv
15import functools
16import http
17import json
18import logging
19import os
20import subprocess
21import sys
22import time
23import traceback
24import urllib.error
25import urllib.parse
26import urllib.request
27import xml.etree.ElementTree as ET
28from dataclasses import asdict, dataclass
29from datetime import datetime
30from functools import wraps
31from multiprocessing.dummy import Pool
32from pathlib import Path
33from tempfile import NamedTemporaryFile
34from typing import Any, Callable, Dict, List, Optional, Tuple, Union
35from urllib.parse import urljoin, urlparse
36
37import git
38
39ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
40ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
41ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
42
43LOG_LEVELS = {
44 logging.getLevelName(level): level
45 for level in [logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR]
46}
47
48log = logging.getLogger()
49
50
51def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
52 """Retry calling the decorated function using an exponential backoff.
53 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
54 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
55 (BSD licensed)
56 :param ExceptionToCheck: the exception on which to retry
57 :param tries: number of times to try (not retry) before giving up
58 :param delay: initial delay between retries in seconds
59 :param backoff: backoff multiplier e.g. value of 2 will double the delay
60 each retry
61 """
62
63 def deco_retry(f: Callable) -> Callable:
64 @wraps(f)
65 def f_retry(*args: Any, **kwargs: Any) -> Any:
66 mtries, mdelay = tries, delay
67 while mtries > 1:
68 try:
69 return f(*args, **kwargs)
70 except ExceptionToCheck as e:
71 print(f"{str(e)}, Retrying in {mdelay} seconds...")
72 time.sleep(mdelay)
73 mtries -= 1
74 mdelay *= backoff
75 return f(*args, **kwargs)
76
77 return f_retry # true decorator
78
79 return deco_retry
80
81
82@dataclass
83class FetchConfig:
84 proc: int
85 github_token: str
86
87
88def make_request(url: str, token=None) -> urllib.request.Request:
89 headers = {}
90 if token is not None:
91 headers["Authorization"] = f"token {token}"
92 return urllib.request.Request(url, headers=headers)
93
94
95# a dictionary of plugins and their new repositories
96Redirects = Dict["PluginDesc", "Repo"]
97
98
99class Repo:
100 def __init__(self, uri: str, branch: str) -> None:
101 self.uri = uri
102 """Url to the repo"""
103 self._branch = branch
104 # Redirect is the new Repo to use
105 self.redirect: Optional["Repo"] = None
106 self.token = "dummy_token"
107
108 @property
109 def name(self):
110 return self.uri.split("/")[-1]
111
112 @property
113 def branch(self):
114 return self._branch or "HEAD"
115
116 def __str__(self) -> str:
117 return f"{self.uri}"
118
119 def __repr__(self) -> str:
120 return f"Repo({self.name}, {self.uri})"
121
122 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
123 def has_submodules(self) -> bool:
124 return True
125
126 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
127 def latest_commit(self) -> Tuple[str, datetime]:
128 log.debug("Latest commit")
129 loaded = self._prefetch(None)
130 updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
131
132 return loaded["rev"], updated
133
134 def _prefetch(self, ref: Optional[str]):
135 cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
136 if ref is not None:
137 cmd.append(ref)
138 log.debug(cmd)
139 data = subprocess.check_output(cmd)
140 loaded = json.loads(data)
141 return loaded
142
143 def prefetch(self, ref: Optional[str]) -> str:
144 print("Prefetching")
145 loaded = self._prefetch(ref)
146 return loaded["sha256"]
147
148 def as_nix(self, plugin: "Plugin") -> str:
149 return f"""fetchgit {{
150 url = "{self.uri}";
151 rev = "{plugin.commit}";
152 sha256 = "{plugin.sha256}";
153 }}"""
154
155
156class RepoGitHub(Repo):
157 def __init__(self, owner: str, repo: str, branch: str) -> None:
158 self.owner = owner
159 self.repo = repo
160 self.token = None
161 """Url to the repo"""
162 super().__init__(self.url(""), branch)
163 log.debug(
164 "Instantiating github repo owner=%s and repo=%s", self.owner, self.repo
165 )
166
167 @property
168 def name(self):
169 return self.repo
170
171 def url(self, path: str) -> str:
172 res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
173 return res
174
175 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
176 def has_submodules(self) -> bool:
177 try:
178 req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
179 urllib.request.urlopen(req, timeout=10).close()
180 except urllib.error.HTTPError as e:
181 if e.code == 404:
182 return False
183 else:
184 raise
185 return True
186
187 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
188 def latest_commit(self) -> Tuple[str, datetime]:
189 commit_url = self.url(f"commits/{self.branch}.atom")
190 log.debug("Sending request to %s", commit_url)
191 commit_req = make_request(commit_url, self.token)
192 with urllib.request.urlopen(commit_req, timeout=10) as req:
193 self._check_for_redirect(commit_url, req)
194 xml = req.read()
195 root = ET.fromstring(xml)
196 latest_entry = root.find(ATOM_ENTRY)
197 assert latest_entry is not None, f"No commits found in repository {self}"
198 commit_link = latest_entry.find(ATOM_LINK)
199 assert commit_link is not None, f"No link tag found feed entry {xml}"
200 url = urlparse(commit_link.get("href"))
201 updated_tag = latest_entry.find(ATOM_UPDATED)
202 assert (
203 updated_tag is not None and updated_tag.text is not None
204 ), f"No updated tag found feed entry {xml}"
205 updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
206 return Path(str(url.path)).name, updated
207
208 def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
209 response_url = req.geturl()
210 if url != response_url:
211 new_owner, new_name = (
212 urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
213 )
214
215 new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
216 self.redirect = new_repo
217
218 def prefetch(self, commit: str) -> str:
219 if self.has_submodules():
220 sha256 = super().prefetch(commit)
221 else:
222 sha256 = self.prefetch_github(commit)
223 return sha256
224
225 def prefetch_github(self, ref: str) -> str:
226 cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
227 log.debug("Running %s", cmd)
228 data = subprocess.check_output(cmd)
229 return data.strip().decode("utf-8")
230
231 def as_nix(self, plugin: "Plugin") -> str:
232 if plugin.has_submodules:
233 submodule_attr = "\n fetchSubmodules = true;"
234 else:
235 submodule_attr = ""
236
237 return f"""fetchFromGitHub {{
238 owner = "{self.owner}";
239 repo = "{self.repo}";
240 rev = "{plugin.commit}";
241 sha256 = "{plugin.sha256}";{submodule_attr}
242 }}"""
243
244
245@dataclass(frozen=True)
246class PluginDesc:
247 repo: Repo
248 branch: str
249 alias: Optional[str]
250
251 @property
252 def name(self):
253 if self.alias is None:
254 return self.repo.name
255 else:
256 return self.alias
257
258 def __lt__(self, other):
259 return self.repo.name < other.repo.name
260
261 @staticmethod
262 def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
263 branch = row["branch"]
264 repo = make_repo(row["repo"], branch.strip())
265 repo.token = config.github_token
266 return PluginDesc(repo, branch.strip(), row["alias"])
267
268 @staticmethod
269 def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
270 branch = "HEAD"
271 alias = None
272 uri = line
273 if " as " in uri:
274 uri, alias = uri.split(" as ")
275 alias = alias.strip()
276 if "@" in uri:
277 uri, branch = uri.split("@")
278 repo = make_repo(uri.strip(), branch.strip())
279 repo.token = config.github_token
280 return PluginDesc(repo, branch.strip(), alias)
281
282
283@dataclass
284class Plugin:
285 name: str
286 commit: str
287 has_submodules: bool
288 sha256: str
289 date: Optional[datetime] = None
290
291 @property
292 def normalized_name(self) -> str:
293 return self.name.replace(".", "-")
294
295 @property
296 def version(self) -> str:
297 assert self.date is not None
298 return self.date.strftime("%Y-%m-%d")
299
300 def as_json(self) -> Dict[str, str]:
301 copy = self.__dict__.copy()
302 del copy["date"]
303 return copy
304
305
306def load_plugins_from_csv(
307 config: FetchConfig,
308 input_file: Path,
309) -> List[PluginDesc]:
310 log.debug("Load plugins from csv %s", input_file)
311 plugins = []
312 with open(input_file, newline="") as csvfile:
313 log.debug("Writing into %s", input_file)
314 reader = csv.DictReader(
315 csvfile,
316 )
317 for line in reader:
318 plugin = PluginDesc.load_from_csv(config, line)
319 plugins.append(plugin)
320
321 return plugins
322
323
324def run_nix_expr(expr):
325 with CleanEnvironment() as nix_path:
326 cmd = [
327 "nix",
328 "eval",
329 "--extra-experimental-features",
330 "nix-command",
331 "--impure",
332 "--json",
333 "--expr",
334 expr,
335 "--nix-path",
336 nix_path,
337 ]
338 log.debug("Running command %s", " ".join(cmd))
339 out = subprocess.check_output(cmd)
340 data = json.loads(out)
341 return data
342
343
344class Editor:
345 """The configuration of the update script."""
346
347 def __init__(
348 self,
349 name: str,
350 root: Path,
351 get_plugins: str,
352 default_in: Optional[Path] = None,
353 default_out: Optional[Path] = None,
354 deprecated: Optional[Path] = None,
355 cache_file: Optional[str] = None,
356 ):
357 log.debug("get_plugins:", get_plugins)
358 self.name = name
359 self.root = root
360 self.get_plugins = get_plugins
361 self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
362 self.default_out = default_out or root.joinpath("generated.nix")
363 self.deprecated = deprecated or root.joinpath("deprecated.json")
364 self.cache_file = cache_file or f"{name}-plugin-cache.json"
365 self.nixpkgs_repo = None
366
367 def add(self, args):
368 """CSV spec"""
369 log.debug("called the 'add' command")
370 fetch_config = FetchConfig(args.proc, args.github_token)
371 editor = self
372 for plugin_line in args.add_plugins:
373 log.debug("using plugin_line", plugin_line)
374 pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
375 log.debug("loaded as pdesc", pdesc)
376 append = [pdesc]
377 editor.rewrite_input(
378 fetch_config, args.input_file, editor.deprecated, append=append
379 )
380 plugin, _ = prefetch_plugin(
381 pdesc,
382 )
383 autocommit = not args.no_commit
384 if autocommit:
385 commit(
386 editor.nixpkgs_repo,
387 "{drv_name}: init at {version}".format(
388 drv_name=editor.get_drv_name(plugin.normalized_name),
389 version=plugin.version,
390 ),
391 [args.outfile, args.input_file],
392 )
393
394 # Expects arguments generated by 'update' subparser
395 def update(self, args):
396 """CSV spec"""
397 print("the update member function should be overriden in subclasses")
398
399 def get_current_plugins(self) -> List[Plugin]:
400 """To fill the cache"""
401 data = run_nix_expr(self.get_plugins)
402 plugins = []
403 for name, attr in data.items():
404 p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
405 plugins.append(p)
406 return plugins
407
408 def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
409 """CSV spec"""
410 return load_plugins_from_csv(config, plugin_file)
411
412 def generate_nix(self, _plugins, _outfile: str):
413 """Returns nothing for now, writes directly to outfile"""
414 raise NotImplementedError()
415
416 def get_update(self, input_file: str, outfile: str, config: FetchConfig):
417 cache: Cache = Cache(self.get_current_plugins(), self.cache_file)
418 _prefetch = functools.partial(prefetch, cache=cache)
419
420 def update() -> dict:
421 plugins = self.load_plugin_spec(config, input_file)
422
423 try:
424 pool = Pool(processes=config.proc)
425 results = pool.map(_prefetch, plugins)
426 finally:
427 cache.store()
428
429 plugins, redirects = check_results(results)
430
431 self.generate_nix(plugins, outfile)
432
433 return redirects
434
435 return update
436
437 @property
438 def attr_path(self):
439 return self.name + "Plugins"
440
441 def get_drv_name(self, name: str):
442 return self.attr_path + "." + name
443
444 def rewrite_input(self, *args, **kwargs):
445 return rewrite_input(*args, **kwargs)
446
447 def create_parser(self):
448 common = argparse.ArgumentParser(
449 add_help=False,
450 description=(
451 f"""
452 Updates nix derivations for {self.name} plugins.\n
453 By default from {self.default_in} to {self.default_out}"""
454 ),
455 )
456 common.add_argument(
457 "--input-names",
458 "-i",
459 dest="input_file",
460 default=self.default_in,
461 help="A list of plugins in the form owner/repo",
462 )
463 common.add_argument(
464 "--out",
465 "-o",
466 dest="outfile",
467 default=self.default_out,
468 help="Filename to save generated nix code",
469 )
470 common.add_argument(
471 "--proc",
472 "-p",
473 dest="proc",
474 type=int,
475 default=30,
476 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
477 )
478 common.add_argument(
479 "--github-token",
480 "-t",
481 type=str,
482 default=os.getenv("GITHUB_API_TOKEN"),
483 help="""Allows to set --proc to higher values.
484 Uses GITHUB_API_TOKEN environment variables as the default value.""",
485 )
486 common.add_argument(
487 "--no-commit",
488 "-n",
489 action="store_true",
490 default=False,
491 help="Whether to autocommit changes",
492 )
493 common.add_argument(
494 "--debug",
495 "-d",
496 choices=LOG_LEVELS.keys(),
497 default=logging.getLevelName(logging.WARN),
498 help="Adjust log level",
499 )
500
501 main = argparse.ArgumentParser(
502 parents=[common],
503 description=(
504 f"""
505 Updates nix derivations for {self.name} plugins.\n
506 By default from {self.default_in} to {self.default_out}"""
507 ),
508 )
509
510 subparsers = main.add_subparsers(dest="command", required=False)
511 padd = subparsers.add_parser(
512 "add",
513 parents=[],
514 description="Add new plugin",
515 add_help=False,
516 )
517 padd.set_defaults(func=self.add)
518 padd.add_argument(
519 "add_plugins",
520 default=None,
521 nargs="+",
522 help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
523 )
524
525 pupdate = subparsers.add_parser(
526 "update",
527 description="Update all or a subset of existing plugins",
528 add_help=False,
529 )
530 pupdate.set_defaults(func=self.update)
531 return main
532
533 def run(
534 self,
535 ):
536 """
537 Convenience function
538 """
539 parser = self.create_parser()
540 args = parser.parse_args()
541 command = args.command or "update"
542 log.setLevel(LOG_LEVELS[args.debug])
543 log.info("Chose to run command: %s", command)
544
545 if not args.no_commit:
546 self.nixpkgs_repo = git.Repo(self.root, search_parent_directories=True)
547
548 getattr(self, command)(args)
549
550
551class CleanEnvironment(object):
552 def __enter__(self) -> str:
553 self.old_environ = os.environ.copy()
554 local_pkgs = str(Path(__file__).parent.parent.parent)
555 self.empty_config = NamedTemporaryFile()
556 self.empty_config.write(b"{}")
557 self.empty_config.flush()
558 os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
559 return f"localpkgs={local_pkgs}"
560
561 def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
562 os.environ.update(self.old_environ)
563 self.empty_config.close()
564
565
566def prefetch_plugin(
567 p: PluginDesc,
568 cache: "Optional[Cache]" = None,
569) -> Tuple[Plugin, Optional[Repo]]:
570 repo, branch, alias = p.repo, p.branch, p.alias
571 name = alias or p.repo.name
572 commit = None
573 log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
574 commit, date = repo.latest_commit()
575 cached_plugin = cache[commit] if cache else None
576 if cached_plugin is not None:
577 log.debug("Cache hit !")
578 cached_plugin.name = name
579 cached_plugin.date = date
580 return cached_plugin, repo.redirect
581
582 has_submodules = repo.has_submodules()
583 log.debug(f"prefetch {name}")
584 sha256 = repo.prefetch(commit)
585
586 return (
587 Plugin(name, commit, has_submodules, sha256, date=date),
588 repo.redirect,
589 )
590
591
592def print_download_error(plugin: PluginDesc, ex: Exception):
593 print(f"{plugin}: {ex}", file=sys.stderr)
594 ex_traceback = ex.__traceback__
595 tb_lines = [
596 line.rstrip("\n")
597 for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
598 ]
599 print("\n".join(tb_lines))
600
601
602def check_results(
603 results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
604) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
605 """ """
606 failures: List[Tuple[PluginDesc, Exception]] = []
607 plugins = []
608 redirects: Redirects = {}
609 for pdesc, result, redirect in results:
610 if isinstance(result, Exception):
611 failures.append((pdesc, result))
612 else:
613 new_pdesc = pdesc
614 if redirect is not None:
615 redirects.update({pdesc: redirect})
616 new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
617 plugins.append((new_pdesc, result))
618
619 print(f"{len(results) - len(failures)} plugins were checked", end="")
620 if len(failures) == 0:
621 print()
622 return plugins, redirects
623 else:
624 print(f", {len(failures)} plugin(s) could not be downloaded:\n")
625
626 for plugin, exception in failures:
627 print_download_error(plugin, exception)
628
629 sys.exit(1)
630
631
632def make_repo(uri: str, branch) -> Repo:
633 """Instantiate a Repo with the correct specialization depending on server (gitub spec)"""
634 # dumb check to see if it's of the form owner/repo (=> github) or https://...
635 res = urlparse(uri)
636 if res.netloc in ["github.com", ""]:
637 res = res.path.strip("/").split("/")
638 repo = RepoGitHub(res[0], res[1], branch)
639 else:
640 repo = Repo(uri.strip(), branch)
641 return repo
642
643
644def get_cache_path(cache_file_name: str) -> Optional[Path]:
645 xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
646 if xdg_cache is None:
647 home = os.environ.get("HOME", None)
648 if home is None:
649 return None
650 xdg_cache = str(Path(home, ".cache"))
651
652 return Path(xdg_cache, cache_file_name)
653
654
655class Cache:
656 def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
657 self.cache_file = get_cache_path(cache_file_name)
658
659 downloads = {}
660 for plugin in initial_plugins:
661 downloads[plugin.commit] = plugin
662 downloads.update(self.load())
663 self.downloads = downloads
664
665 def load(self) -> Dict[str, Plugin]:
666 if self.cache_file is None or not self.cache_file.exists():
667 return {}
668
669 downloads: Dict[str, Plugin] = {}
670 with open(self.cache_file) as f:
671 data = json.load(f)
672 for attr in data.values():
673 p = Plugin(
674 attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
675 )
676 downloads[attr["commit"]] = p
677 return downloads
678
679 def store(self) -> None:
680 if self.cache_file is None:
681 return
682
683 os.makedirs(self.cache_file.parent, exist_ok=True)
684 with open(self.cache_file, "w+") as f:
685 data = {}
686 for name, attr in self.downloads.items():
687 data[name] = attr.as_json()
688 json.dump(data, f, indent=4, sort_keys=True)
689
690 def __getitem__(self, key: str) -> Optional[Plugin]:
691 return self.downloads.get(key, None)
692
693 def __setitem__(self, key: str, value: Plugin) -> None:
694 self.downloads[key] = value
695
696
697def prefetch(
698 pluginDesc: PluginDesc, cache: Cache
699) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
700 try:
701 plugin, redirect = prefetch_plugin(pluginDesc, cache)
702 cache[plugin.commit] = plugin
703 return (pluginDesc, plugin, redirect)
704 except Exception as e:
705 return (pluginDesc, e, None)
706
707
708def rewrite_input(
709 config: FetchConfig,
710 input_file: Path,
711 deprecated: Path,
712 # old pluginDesc and the new
713 redirects: Redirects = {},
714 append: List[PluginDesc] = [],
715):
716 plugins = load_plugins_from_csv(
717 config,
718 input_file,
719 )
720
721 plugins.extend(append)
722
723 if redirects:
724 cur_date_iso = datetime.now().strftime("%Y-%m-%d")
725 with open(deprecated, "r") as f:
726 deprecations = json.load(f)
727 for pdesc, new_repo in redirects.items():
728 new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
729 old_plugin, _ = prefetch_plugin(pdesc)
730 new_plugin, _ = prefetch_plugin(new_pdesc)
731 if old_plugin.normalized_name != new_plugin.normalized_name:
732 deprecations[old_plugin.normalized_name] = {
733 "new": new_plugin.normalized_name,
734 "date": cur_date_iso,
735 }
736 with open(deprecated, "w") as f:
737 json.dump(deprecations, f, indent=4, sort_keys=True)
738 f.write("\n")
739
740 with open(input_file, "w") as f:
741 log.debug("Writing into %s", input_file)
742 # fields = dataclasses.fields(PluginDesc)
743 fieldnames = ["repo", "branch", "alias"]
744 writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
745 writer.writeheader()
746 for plugin in sorted(plugins):
747 writer.writerow(asdict(plugin))
748
749
750def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
751 repo.index.add([str(f.resolve()) for f in files])
752
753 if repo.index.diff("HEAD"):
754 print(f'committing to nixpkgs "{message}"')
755 repo.index.commit(message)
756 else:
757 print("no changes in working tree to commit")
758
759
760def update_plugins(editor: Editor, args):
761 """The main entry function of this module. All input arguments are grouped in the `Editor`."""
762
763 log.info("Start updating plugins")
764 fetch_config = FetchConfig(args.proc, args.github_token)
765 update = editor.get_update(args.input_file, args.outfile, fetch_config)
766
767 redirects = update()
768 editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
769
770 autocommit = not args.no_commit
771
772 if autocommit:
773 editor.nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
774 commit(editor.nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
775
776 if redirects:
777 update()
778 if autocommit:
779 commit(
780 editor.nixpkgs_repo,
781 f"{editor.attr_path}: resolve github repository redirects",
782 [args.outfile, args.input_file, editor.deprecated],
783 )