1#!/usr/bin/env python3
2
3"""
4Update a Python package expression by passing in the `.nix` file, or the directory containing it.
5You can pass in multiple files or paths.
6
7You'll likely want to use
8``
9 $ ./update-python-libraries ../../pkgs/development/python-modules/**/default.nix
10``
11to update all non-pinned libraries in that folder.
12"""
13
14import argparse
15import os
16import pathlib
17import re
18import requests
19from concurrent.futures import ThreadPoolExecutor as Pool
20from packaging.version import Version as _Version
21from packaging.version import InvalidVersion
22from packaging.specifiers import SpecifierSet
23import collections
24import subprocess
25
26INDEX = "https://pypi.io/pypi"
27"""url of PyPI"""
28
29EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip', '.whl']
30"""Permitted file extensions. These are evaluated from left to right and the first occurance is returned."""
31
32PRERELEASES = False
33
34GIT = "git"
35
36NIXPGKS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip()
37
38import logging
39logging.basicConfig(level=logging.INFO)
40
41
42class Version(_Version, collections.abc.Sequence):
43
44 def __init__(self, version):
45 super().__init__(version)
46 # We cannot use `str(Version(0.04.21))` because that becomes `0.4.21`
47 # https://github.com/avian2/unidecode/issues/13#issuecomment-354538882
48 self.raw_version = version
49
50 def __getitem__(self, i):
51 return self._version.release[i]
52
53 def __len__(self):
54 return len(self._version.release)
55
56 def __iter__(self):
57 yield from self._version.release
58
59
60def _get_values(attribute, text):
61 """Match attribute in text and return all matches.
62
63 :returns: List of matches.
64 """
65 regex = '{}\s+=\s+"(.*)";'.format(attribute)
66 regex = re.compile(regex)
67 values = regex.findall(text)
68 return values
69
70def _get_unique_value(attribute, text):
71 """Match attribute in text and return unique match.
72
73 :returns: Single match.
74 """
75 values = _get_values(attribute, text)
76 n = len(values)
77 if n > 1:
78 raise ValueError("found too many values for {}".format(attribute))
79 elif n == 1:
80 return values[0]
81 else:
82 raise ValueError("no value found for {}".format(attribute))
83
84def _get_line_and_value(attribute, text):
85 """Match attribute in text. Return the line and the value of the attribute."""
86 regex = '({}\s+=\s+"(.*)";)'.format(attribute)
87 regex = re.compile(regex)
88 value = regex.findall(text)
89 n = len(value)
90 if n > 1:
91 raise ValueError("found too many values for {}".format(attribute))
92 elif n == 1:
93 return value[0]
94 else:
95 raise ValueError("no value found for {}".format(attribute))
96
97
98def _replace_value(attribute, value, text):
99 """Search and replace value of attribute in text."""
100 old_line, old_value = _get_line_and_value(attribute, text)
101 new_line = old_line.replace(old_value, value)
102 new_text = text.replace(old_line, new_line)
103 return new_text
104
105
106def _fetch_page(url):
107 r = requests.get(url)
108 if r.status_code == requests.codes.ok:
109 return r.json()
110 else:
111 raise ValueError("request for {} failed".format(url))
112
113
114def _fetch_github(url):
115 headers = {}
116 token = os.environ.get('GITHUB_API_TOKEN')
117 if token:
118 headers["Authorization"] = f"token {token}"
119 r = requests.get(url, headers=headers)
120
121 if r.status_code == requests.codes.ok:
122 return r.json()
123 else:
124 raise ValueError("request for {} failed".format(url))
125
126
127SEMVER = {
128 'major' : 0,
129 'minor' : 1,
130 'patch' : 2,
131}
132
133
134def _determine_latest_version(current_version, target, versions):
135 """Determine latest version, given `target`.
136 """
137 current_version = Version(current_version)
138
139 def _parse_versions(versions):
140 for v in versions:
141 try:
142 yield Version(v)
143 except InvalidVersion:
144 pass
145
146 versions = _parse_versions(versions)
147
148 index = SEMVER[target]
149
150 ceiling = list(current_version[0:index])
151 if len(ceiling) == 0:
152 ceiling = None
153 else:
154 ceiling[-1]+=1
155 ceiling = Version(".".join(map(str, ceiling)))
156
157 # We do not want prereleases
158 versions = SpecifierSet(prereleases=PRERELEASES).filter(versions)
159
160 if ceiling is not None:
161 versions = SpecifierSet(f"<{ceiling}").filter(versions)
162
163 return (max(sorted(versions))).raw_version
164
165
166def _get_latest_version_pypi(package, extension, current_version, target):
167 """Get latest version and hash from PyPI."""
168 url = "{}/{}/json".format(INDEX, package)
169 json = _fetch_page(url)
170
171 versions = json['releases'].keys()
172 version = _determine_latest_version(current_version, target, versions)
173
174 try:
175 releases = json['releases'][version]
176 except KeyError as e:
177 raise KeyError('Could not find version {} for {}'.format(version, package)) from e
178 for release in releases:
179 if release['filename'].endswith(extension):
180 # TODO: In case of wheel we need to do further checks!
181 sha256 = release['digests']['sha256']
182 break
183 else:
184 sha256 = None
185 return version, sha256, None
186
187
188def _get_latest_version_github(package, extension, current_version, target):
189 def strip_prefix(tag):
190 return re.sub("^[^0-9]*", "", tag)
191
192 def get_prefix(string):
193 matches = re.findall(r"^([^0-9]*)", string)
194 return next(iter(matches), "")
195
196 # when invoked as an updateScript, UPDATE_NIX_ATTR_PATH will be set
197 # this allows us to work with packages which live outside of python-modules
198 attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{package}")
199 try:
200 homepage = subprocess.check_output(
201 ["nix", "eval", "-f", f"{NIXPGKS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\
202 .decode('utf-8')
203 except Exception as e:
204 raise ValueError(f"Unable to determine homepage: {e}")
205 owner_repo = homepage[len("https://github.com/"):] # remove prefix
206 owner, repo = owner_repo.split("/")
207
208 url = f"https://api.github.com/repos/{owner}/{repo}/releases"
209 all_releases = _fetch_github(url)
210 releases = list(filter(lambda x: not x['prerelease'], all_releases))
211
212 if len(releases) == 0:
213 raise ValueError(f"{homepage} does not contain any stable releases")
214
215 versions = map(lambda x: strip_prefix(x['tag_name']), releases)
216 version = _determine_latest_version(current_version, target, versions)
217
218 release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases))
219 prefix = get_prefix(release['tag_name'])
220 try:
221 sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", f"{release['tarball_url']}"], stderr=subprocess.DEVNULL)\
222 .decode('utf-8').strip()
223 except:
224 # this may fail if they have both a branch and a tag of the same name, attempt tag name
225 tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags")
226 sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url], stderr=subprocess.DEVNULL)\
227 .decode('utf-8').strip()
228
229
230 return version, sha256, prefix
231
232
233FETCHERS = {
234 'fetchFromGitHub' : _get_latest_version_github,
235 'fetchPypi' : _get_latest_version_pypi,
236 'fetchurl' : _get_latest_version_pypi,
237}
238
239
240DEFAULT_SETUPTOOLS_EXTENSION = 'tar.gz'
241
242
243FORMATS = {
244 'setuptools' : DEFAULT_SETUPTOOLS_EXTENSION,
245 'wheel' : 'whl'
246}
247
248def _determine_fetcher(text):
249 # Count occurences of fetchers.
250 nfetchers = sum(text.count('src = {}'.format(fetcher)) for fetcher in FETCHERS.keys())
251 if nfetchers == 0:
252 raise ValueError("no fetcher.")
253 elif nfetchers > 1:
254 raise ValueError("multiple fetchers.")
255 else:
256 # Then we check which fetcher to use.
257 for fetcher in FETCHERS.keys():
258 if 'src = {}'.format(fetcher) in text:
259 return fetcher
260
261
262def _determine_extension(text, fetcher):
263 """Determine what extension is used in the expression.
264
265 If we use:
266 - fetchPypi, we check if format is specified.
267 - fetchurl, we determine the extension from the url.
268 - fetchFromGitHub we simply use `.tar.gz`.
269 """
270 if fetcher == 'fetchPypi':
271 try:
272 src_format = _get_unique_value('format', text)
273 except ValueError as e:
274 src_format = None # format was not given
275
276 try:
277 extension = _get_unique_value('extension', text)
278 except ValueError as e:
279 extension = None # extension was not given
280
281 if extension is None:
282 if src_format is None:
283 src_format = 'setuptools'
284 elif src_format == 'flit':
285 raise ValueError("Don't know how to update a Flit package.")
286 elif src_format == 'other':
287 raise ValueError("Don't know how to update a format='other' package.")
288 elif src_format == 'pyproject':
289 raise ValueError("Don't know how to update a pyproject package.")
290 extension = FORMATS[src_format]
291
292 elif fetcher == 'fetchurl':
293 url = _get_unique_value('url', text)
294 extension = os.path.splitext(url)[1]
295 if 'pypi' not in url:
296 raise ValueError('url does not point to PyPI.')
297
298 elif fetcher == 'fetchFromGitHub':
299 if "fetchSubmodules" in text:
300 raise ValueError("fetchFromGitHub fetcher doesn't support submodules")
301 extension = "tar.gz"
302
303 return extension
304
305
306def _update_package(path, target):
307
308 # Read the expression
309 with open(path, 'r') as f:
310 text = f.read()
311
312 # Determine pname.
313 pname = _get_unique_value('pname', text)
314
315 # Determine version.
316 version = _get_unique_value('version', text)
317
318 # First we check how many fetchers are mentioned.
319 fetcher = _determine_fetcher(text)
320
321 extension = _determine_extension(text, fetcher)
322
323 new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target)
324
325 if new_version == version:
326 logging.info("Path {}: no update available for {}.".format(path, pname))
327 return False
328 elif Version(new_version) <= Version(version):
329 raise ValueError("downgrade for {}.".format(pname))
330 if not new_sha256:
331 raise ValueError("no file available for {}.".format(pname))
332
333 text = _replace_value('version', new_version, text)
334 text = _replace_value('sha256', new_sha256, text)
335 if fetcher == 'fetchFromGitHub':
336 text = _replace_value('rev', f"{prefix}${{version}}", text)
337 # incase there's no prefix, just rewrite without interpolation
338 text = text.replace('"${version}";', 'version;')
339
340 with open(path, 'w') as f:
341 f.write(text)
342
343 logging.info("Path {}: updated {} from {} to {}".format(path, pname, version, new_version))
344
345 result = {
346 'path' : path,
347 'target': target,
348 'pname': pname,
349 'old_version' : version,
350 'new_version' : new_version,
351 #'fetcher' : fetcher,
352 }
353
354 return result
355
356
357def _update(path, target):
358
359 # We need to read and modify a Nix expression.
360 if os.path.isdir(path):
361 path = os.path.join(path, 'default.nix')
362
363 # If a default.nix does not exist, we quit.
364 if not os.path.isfile(path):
365 logging.info("Path {}: does not exist.".format(path))
366 return False
367
368 # If file is not a Nix expression, we quit.
369 if not path.endswith(".nix"):
370 logging.info("Path {}: does not end with `.nix`.".format(path))
371 return False
372
373 try:
374 return _update_package(path, target)
375 except ValueError as e:
376 logging.warning("Path {}: {}".format(path, e))
377 return False
378
379
380def _commit(path, pname, old_version, new_version, pkgs_prefix="python: ", **kwargs):
381 """Commit result.
382 """
383
384 msg = f'{pkgs_prefix}{pname}: {old_version} -> {new_version}'
385
386 try:
387 subprocess.check_call([GIT, 'add', path])
388 subprocess.check_call([GIT, 'commit', '-m', msg])
389 except subprocess.CalledProcessError as e:
390 subprocess.check_call([GIT, 'checkout', path])
391 raise subprocess.CalledProcessError(f'Could not commit {path}') from e
392
393 return True
394
395
396def main():
397
398 epilog = """
399environment variables:
400 GITHUB_API_TOKEN\tGitHub API token used when updating github packages
401 """
402 parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog)
403 parser.add_argument('package', type=str, nargs='+')
404 parser.add_argument('--target', type=str, choices=SEMVER.keys(), default='major')
405 parser.add_argument('--commit', action='store_true', help='Create a commit for each package update')
406 parser.add_argument('--use-pkgs-prefix', action='store_true', help='Use python3Packages.${pname}: instead of python: ${pname}: when making commits')
407
408 args = parser.parse_args()
409 target = args.target
410
411 packages = list(map(os.path.abspath, args.package))
412
413 logging.info("Updating packages...")
414
415 # Use threads to update packages concurrently
416 with Pool() as p:
417 results = list(filter(bool, p.map(lambda pkg: _update(pkg, target), packages)))
418
419 logging.info("Finished updating packages.")
420
421 commit_options = {}
422 if args.use_pkgs_prefix:
423 logging.info("Using python3Packages. prefix for commits")
424 commit_options["pkgs_prefix"] = "python3Packages."
425
426 # Commits are created sequentially.
427 if args.commit:
428 logging.info("Committing updates...")
429 # list forces evaluation
430 list(map(lambda x: _commit(**x, **commit_options), results))
431 logging.info("Finished committing updates")
432
433 count = len(results)
434 logging.info("{} package(s) updated".format(count))
435
436
437
438if __name__ == '__main__':
439 main()