Merge staging-next into staging

authored by

github-actions[bot] and committed by
GitHub
288c9d78 f4c041be

+243 -78
+5
doc/languages-frameworks/python.section.md
··· 1821 Updating packages in bulk leads to lots of breakages, which is why a 1822 stabilization period on the `python-unstable` branch is required. 1823 1824 Once the branch is sufficiently stable it should normally be merged 1825 into the `staging` branch. 1826
··· 1821 Updating packages in bulk leads to lots of breakages, which is why a 1822 stabilization period on the `python-unstable` branch is required. 1823 1824 + If a package is fragile and often breaks during these bulks updates, it 1825 + may be reasonable to set `passthru.skipBulkUpdate = true` in the 1826 + derivation. This decision should not be made on a whim and should 1827 + always be supported by a qualifying comment. 1828 + 1829 Once the branch is sufficiently stable it should normally be merged 1830 into the `staging` branch. 1831
+3 -5
maintainers/scripts/update-python-libraries
··· 1 - #!/bin/sh 2 - build=`nix-build -E "with import (fetchTarball "channel:nixpkgs-unstable") {}; python3.withPackages(ps: with ps; [ packaging requests toolz ])"` 3 - python=${build}/bin/python 4 - exec ${python} pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@ 5 -
··· 1 + #!/usr/bin/env nix-shell 2 + #!nix-shell -I nixpkgs=channel:nixpkgs-unstable -i bash -p "python3.withPackages (ps: with ps; [ packaging requests ])" -p nix-prefetch-git 3 + exec python3 pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
+1 -1
nixos/modules/services/matrix/synapse.md
··· 31 "m.homeserver".base_url = "https://${fqdn}"; 32 "m.identity_server" = {}; 33 }; 34 - serverConfig."m.server" = "${config.services.matrix-synapse.settings.server_name}:443"; 35 mkWellKnown = data: '' 36 add_header Content-Type application/json; 37 add_header Access-Control-Allow-Origin *;
··· 31 "m.homeserver".base_url = "https://${fqdn}"; 32 "m.identity_server" = {}; 33 }; 34 + serverConfig."m.server" = "${fqdn}:443"; 35 mkWellKnown = data: '' 36 add_header Content-Type application/json; 37 add_header Access-Control-Allow-Origin *;
+1
pkgs/applications/audio/psst/default.nix
··· 10 categories = [ "Audio" "AudioVideo" ]; 11 icon = "psst"; 12 terminal = false; 13 }; 14 15 in
··· 10 categories = [ "Audio" "AudioVideo" ]; 11 icon = "psst"; 12 terminal = false; 13 + startupWMClass = "psst-gui"; 14 }; 15 16 in
+17
pkgs/applications/editors/emacs/elisp-packages/melpa-packages.nix
··· 177 178 dune = dontConfigure super.dune; 179 180 emacsql-sqlite = super.emacsql-sqlite.overrideAttrs (old: { 181 buildInputs = old.buildInputs ++ [ pkgs.sqlite ]; 182
··· 177 178 dune = dontConfigure super.dune; 179 180 + emacsql = super.emacsql.overrideAttrs (old: { 181 + buildInputs = old.buildInputs ++ [ pkgs.sqlite ]; 182 + 183 + postBuild = '' 184 + cd source/sqlite 185 + make 186 + cd - 187 + ''; 188 + 189 + postInstall = (old.postInstall or "") + "\n" + '' 190 + install -m=755 -D source/sqlite/emacsql-sqlite \ 191 + $out/share/emacs/site-lisp/elpa/emacsql-${old.version}/sqlite/emacsql-sqlite 192 + ''; 193 + 194 + stripDebugList = [ "share" ]; 195 + }); 196 + 197 emacsql-sqlite = super.emacsql-sqlite.overrideAttrs (old: { 198 buildInputs = old.buildInputs ++ [ pkgs.sqlite ]; 199
+3 -3
pkgs/applications/misc/ticker/default.nix
··· 5 6 buildGoModule rec { 7 pname = "ticker"; 8 - version = "4.5.5"; 9 10 src = fetchFromGitHub { 11 owner = "achannarasappa"; 12 repo = pname; 13 rev = "refs/tags/v${version}"; 14 - hash = "sha256-7FSyW71NWmWmBNQ5QUqMJ4x9WLXpm0kvvjdjzx1yk/M="; 15 }; 16 17 - vendorHash = "sha256-6bosJ2AlbLZ551tCNPmvNyyReFJG+iS3SYUFti2/CAw="; 18 19 ldflags = [ 20 "-s"
··· 5 6 buildGoModule rec { 7 pname = "ticker"; 8 + version = "4.5.6"; 9 10 src = fetchFromGitHub { 11 owner = "achannarasappa"; 12 repo = pname; 13 rev = "refs/tags/v${version}"; 14 + hash = "sha256-h7k/zAYqpCAGn2dW+a3gOF/BN5ywjy/2Yx6THK9zk6k="; 15 }; 16 17 + vendorHash = "sha256-c7wU9LLRlS9kOhE4yAiKAs/npQe8lvSwPcd+/D8o9rk="; 18 19 ldflags = [ 20 "-s"
+10 -2
pkgs/development/compilers/mruby/default.nix
··· 2 3 stdenv.mkDerivation rec { 4 pname = "mruby"; 5 - version = "3.1.0"; 6 7 src = fetchFromGitHub { 8 owner = "mruby"; 9 repo = "mruby"; 10 rev = version; 11 - sha256 = "0gnzip7qfadnl0r1k8bpc9a6796sy503h77ggds02wrz7mpq32nf"; 12 }; 13 14 nativeBuildInputs = [ ruby bison rake ]; ··· 23 ''; 24 25 doCheck = true; 26 27 meta = with lib; { 28 description = "An embeddable implementation of the Ruby language";
··· 2 3 stdenv.mkDerivation rec { 4 pname = "mruby"; 5 + version = "3.2.0"; 6 7 src = fetchFromGitHub { 8 owner = "mruby"; 9 repo = "mruby"; 10 rev = version; 11 + sha256 = "sha256-MmrbWeg/G29YBvVrOtceTOZChrQ2kx9+apl7u7BiGjA="; 12 }; 13 14 nativeBuildInputs = [ ruby bison rake ]; ··· 23 ''; 24 25 doCheck = true; 26 + 27 + checkPhase = '' 28 + runHook preCheck 29 + 30 + rake test 31 + 32 + runHook postCheck 33 + ''; 34 35 meta = with lib; { 36 description = "An embeddable implementation of the Ruby language";
+2 -1
pkgs/development/interpreters/python/update-python-libraries/default.nix
··· 1 - { python3, runCommand, git, nix }: 2 3 runCommand "update-python-libraries" { 4 buildInputs = [ 5 nix 6 (python3.withPackages(ps: with ps; [ packaging requests toolz ])) 7 git 8 ];
··· 1 + { python3, runCommand, git, nix, nix-prefetch-git }: 2 3 runCommand "update-python-libraries" { 4 buildInputs = [ 5 nix 6 + nix-prefetch-git 7 (python3.withPackages(ps: with ps; [ packaging requests toolz ])) 8 git 9 ];
+111 -31
pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py
··· 12 """ 13 14 import argparse 15 import os 16 - import pathlib 17 import re 18 import requests 19 from concurrent.futures import ThreadPoolExecutor as Pool 20 from packaging.version import Version as _Version 21 from packaging.version import InvalidVersion 22 from packaging.specifiers import SpecifierSet 23 import collections 24 import subprocess 25 ··· 31 32 PRERELEASES = False 33 34 GIT = "git" 35 36 - NIXPGKS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip() 37 38 - import logging 39 logging.basicConfig(level=logging.INFO) 40 41 ··· 67 values = regex.findall(text) 68 return values 69 70 def _get_unique_value(attribute, text): 71 """Match attribute in text and return unique match. 72 ··· 81 else: 82 raise ValueError("no value found for {}".format(attribute)) 83 84 - def _get_line_and_value(attribute, text): 85 """Match attribute in text. Return the line and the value of the attribute.""" 86 - regex = '({}\s+=\s+"(.*)";)'.format(attribute) 87 regex = re.compile(regex) 88 - value = regex.findall(text) 89 - n = len(value) 90 if n > 1: 91 raise ValueError("found too many values for {}".format(attribute)) 92 elif n == 1: 93 - return value[0] 94 else: 95 raise ValueError("no value found for {}".format(attribute)) 96 97 98 - def _replace_value(attribute, value, text): 99 """Search and replace value of attribute in text.""" 100 - old_line, old_value = _get_line_and_value(attribute, text) 101 new_line = old_line.replace(old_value, value) 102 new_text = text.replace(old_line, new_line) 103 return new_text ··· 122 return r.json() 123 else: 124 raise ValueError("request for {} failed".format(url)) 125 126 127 SEMVER = { ··· 198 attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{package}") 199 try: 200 homepage = subprocess.check_output( 201 - ["nix", "eval", "-f", f"{NIXPGKS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\ 202 .decode('utf-8') 203 except Exception as e: 204 raise ValueError(f"Unable to determine homepage: {e}") ··· 217 218 release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases)) 219 prefix = get_prefix(release['tag_name']) 220 - try: 221 - sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", f"{release['tarball_url']}"], stderr=subprocess.DEVNULL)\ 222 - .decode('utf-8').strip() 223 - except: 224 - # this may fail if they have both a branch and a tag of the same name, attempt tag name 225 - tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags") 226 - sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url], stderr=subprocess.DEVNULL)\ 227 - .decode('utf-8').strip() 228 229 230 - return version, sha256, prefix 231 232 233 FETCHERS = { ··· 272 if fetcher == 'fetchPypi': 273 try: 274 src_format = _get_unique_value('format', text) 275 - except ValueError as e: 276 src_format = None # format was not given 277 278 try: 279 extension = _get_unique_value('extension', text) 280 - except ValueError as e: 281 extension = None # extension was not given 282 283 if extension is None: ··· 294 raise ValueError('url does not point to PyPI.') 295 296 elif fetcher == 'fetchFromGitHub': 297 - if "fetchSubmodules" in text: 298 - raise ValueError("fetchFromGitHub fetcher doesn't support submodules") 299 extension = "tar.gz" 300 301 return extension ··· 321 # Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo 322 successful_fetch = False 323 for pname in pnames: 324 try: 325 new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target) 326 successful_fetch = True ··· 340 raise ValueError("no file available for {}.".format(pname)) 341 342 text = _replace_value('version', new_version, text) 343 # hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts 344 # sri hashes have been the default format since nix 2.4+ 345 - sri_hash = subprocess.check_output(["nix", "--extra-experimental-features", "nix-command", "hash", "to-sri", "--type", "sha256", new_sha256]).decode('utf-8').strip() 346 347 - 348 - # fetchers can specify a sha256, or a sri hash 349 - try: 350 - text = _replace_value('sha256', sri_hash, text) 351 - except ValueError: 352 - text = _replace_value('hash', sri_hash, text) 353 354 if fetcher == 'fetchFromGitHub': 355 # in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";` ··· 440 target = args.target 441 442 packages = list(map(os.path.abspath, args.package)) 443 444 logging.info("Updating packages...") 445
··· 12 """ 13 14 import argparse 15 + import json 16 + import logging 17 import os 18 import re 19 import requests 20 from concurrent.futures import ThreadPoolExecutor as Pool 21 from packaging.version import Version as _Version 22 from packaging.version import InvalidVersion 23 from packaging.specifiers import SpecifierSet 24 + from typing import Optional, Any 25 import collections 26 import subprocess 27 ··· 33 34 PRERELEASES = False 35 36 + BULK_UPDATE = False 37 + 38 GIT = "git" 39 40 + NIXPKGS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip() 41 42 logging.basicConfig(level=logging.INFO) 43 44 ··· 70 values = regex.findall(text) 71 return values 72 73 + 74 + def _get_attr_value(attr_path: str) -> Optional[Any]: 75 + try: 76 + response = subprocess.check_output([ 77 + "nix", 78 + "--extra-experimental-features", "nix-command", 79 + "eval", 80 + "-f", f"{NIXPKGS_ROOT}/default.nix", 81 + "--json", 82 + f"{attr_path}" 83 + ]) 84 + return json.loads(response.decode()) 85 + except (subprocess.CalledProcessError, ValueError): 86 + return None 87 + 88 + 89 def _get_unique_value(attribute, text): 90 """Match attribute in text and return unique match. 91 ··· 100 else: 101 raise ValueError("no value found for {}".format(attribute)) 102 103 + def _get_line_and_value(attribute, text, value=None): 104 """Match attribute in text. Return the line and the value of the attribute.""" 105 + if value is None: 106 + regex = rf'({attribute}\s+=\s+\"(.*)\";)' 107 + else: 108 + regex = rf'({attribute}\s+=\s+\"({value})\";)' 109 regex = re.compile(regex) 110 + results = regex.findall(text) 111 + n = len(results) 112 if n > 1: 113 raise ValueError("found too many values for {}".format(attribute)) 114 elif n == 1: 115 + return results[0] 116 else: 117 raise ValueError("no value found for {}".format(attribute)) 118 119 120 + def _replace_value(attribute, value, text, oldvalue=None): 121 """Search and replace value of attribute in text.""" 122 + if oldvalue is None: 123 + old_line, old_value = _get_line_and_value(attribute, text) 124 + else: 125 + old_line, old_value = _get_line_and_value(attribute, text, oldvalue) 126 new_line = old_line.replace(old_value, value) 127 new_text = text.replace(old_line, new_line) 128 return new_text ··· 147 return r.json() 148 else: 149 raise ValueError("request for {} failed".format(url)) 150 + 151 + 152 + def _hash_to_sri(algorithm, value): 153 + """Convert a hash to its SRI representation""" 154 + return subprocess.check_output([ 155 + "nix", 156 + "hash", 157 + "to-sri", 158 + "--type", algorithm, 159 + value 160 + ]).decode().strip() 161 + 162 + 163 + def _skip_bulk_update(attr_name: str) -> bool: 164 + return bool(_get_attr_value( 165 + f"{attr_name}.skipBulkUpdate" 166 + )) 167 168 169 SEMVER = { ··· 240 attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{package}") 241 try: 242 homepage = subprocess.check_output( 243 + ["nix", "eval", "-f", f"{NIXPKGS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\ 244 .decode('utf-8') 245 except Exception as e: 246 raise ValueError(f"Unable to determine homepage: {e}") ··· 259 260 release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases)) 261 prefix = get_prefix(release['tag_name']) 262 263 + # some attributes require using the fetchgit 264 + git_fetcher_args = [] 265 + if (_get_attr_value(f"{attr_path}.src.fetchSubmodules")): 266 + git_fetcher_args.append("--fetch-submodules") 267 + if (_get_attr_value(f"{attr_path}.src.fetchLFS")): 268 + git_fetcher_args.append("--fetch-lfs") 269 + if (_get_attr_value(f"{attr_path}.src.leaveDotGit")): 270 + git_fetcher_args.append("--leave-dotGit") 271 272 + if git_fetcher_args: 273 + algorithm = "sha256" 274 + cmd = [ 275 + "nix-prefetch-git", 276 + f"https://github.com/{owner}/{repo}.git", 277 + "--hash", algorithm, 278 + "--rev", f"refs/tags/{release['tag_name']}" 279 + ] 280 + cmd.extend(git_fetcher_args) 281 + response = subprocess.check_output(cmd) 282 + document = json.loads(response.decode()) 283 + hash = _hash_to_sri(algorithm, document[algorithm]) 284 + else: 285 + try: 286 + hash = subprocess.check_output([ 287 + "nix-prefetch-url", 288 + "--type", "sha256", 289 + "--unpack", 290 + f"{release['tarball_url']}" 291 + ], stderr=subprocess.DEVNULL).decode('utf-8').strip() 292 + except (subprocess.CalledProcessError, UnicodeError): 293 + # this may fail if they have both a branch and a tag of the same name, attempt tag name 294 + tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags") 295 + hash = subprocess.check_output([ 296 + "nix-prefetch-url", 297 + "--type", "sha256", 298 + "--unpack", 299 + tag_url 300 + ], stderr=subprocess.DEVNULL).decode('utf-8').strip() 301 + 302 + return version, hash, prefix 303 304 305 FETCHERS = { ··· 344 if fetcher == 'fetchPypi': 345 try: 346 src_format = _get_unique_value('format', text) 347 + except ValueError: 348 src_format = None # format was not given 349 350 try: 351 extension = _get_unique_value('extension', text) 352 + except ValueError: 353 extension = None # extension was not given 354 355 if extension is None: ··· 366 raise ValueError('url does not point to PyPI.') 367 368 elif fetcher == 'fetchFromGitHub': 369 extension = "tar.gz" 370 371 return extension ··· 391 # Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo 392 successful_fetch = False 393 for pname in pnames: 394 + if BULK_UPDATE and _skip_bulk_update(f"python3Packages.{pname}"): 395 + raise ValueError(f"Bulk update skipped for {pname}") 396 try: 397 new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target) 398 successful_fetch = True ··· 412 raise ValueError("no file available for {}.".format(pname)) 413 414 text = _replace_value('version', new_version, text) 415 + 416 # hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts 417 # sri hashes have been the default format since nix 2.4+ 418 + sri_hash = _hash_to_sri("sha256", new_sha256) 419 420 + # retrieve the old output hash for a more precise match 421 + if old_hash := _get_attr_value(f"python3Packages.{pname}.src.outputHash"): 422 + # fetchers can specify a sha256, or a sri hash 423 + try: 424 + text = _replace_value('hash', sri_hash, text, old_hash) 425 + except ValueError: 426 + text = _replace_value('sha256', sri_hash, text, old_hash) 427 + else: 428 + raise ValueError(f"Unable to retrieve old hash for {pname}") 429 430 if fetcher == 'fetchFromGitHub': 431 # in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";` ··· 516 target = args.target 517 518 packages = list(map(os.path.abspath, args.package)) 519 + 520 + if len(packages) > 1: 521 + global BULK_UPDATE 522 + BULK_UPDATE = true 523 524 logging.info("Updating packages...") 525
+5 -4
pkgs/development/python-modules/caldav/default.nix
··· 4 , icalendar 5 , lxml 6 , pytestCheckHook 7 , pytz 8 , recurring-ical-events 9 , requests 10 - , six 11 , tzlocal 12 , vobject 13 }: 14 15 buildPythonPackage rec { 16 pname = "caldav"; 17 - version = "1.1.3"; 18 19 format = "setuptools"; 20 21 src = fetchFromGitHub { 22 owner = "python-caldav"; 23 repo = pname; 24 rev = "refs/tags/v${version}"; 25 - hash = "sha256-ZilsCYr1M2WKSz/g5JV41JVsuHopPerxOevoG7FrEjQ="; 26 }; 27 28 propagatedBuildInputs = [ 29 vobject 30 lxml 31 requests 32 - six 33 icalendar 34 recurring-ical-events 35 ]; ··· 52 meta = with lib; { 53 description = "CalDAV (RFC4791) client library"; 54 homepage = "https://github.com/python-caldav/caldav"; 55 license = licenses.asl20; 56 maintainers = with maintainers; [ marenz dotlambda ]; 57 };
··· 4 , icalendar 5 , lxml 6 , pytestCheckHook 7 + , pythonOlder 8 , pytz 9 , recurring-ical-events 10 , requests 11 , tzlocal 12 , vobject 13 }: 14 15 buildPythonPackage rec { 16 pname = "caldav"; 17 + version = "1.2.0"; 18 19 format = "setuptools"; 20 + disabled = pythonOlder "3.7"; 21 22 src = fetchFromGitHub { 23 owner = "python-caldav"; 24 repo = pname; 25 rev = "refs/tags/v${version}"; 26 + hash = "sha256-ibizwN4pxqzmVozVjrAPNSrmM1+8+/Qu6UnfRerrwUk="; 27 }; 28 29 propagatedBuildInputs = [ 30 vobject 31 lxml 32 requests 33 icalendar 34 recurring-ical-events 35 ]; ··· 52 meta = with lib; { 53 description = "CalDAV (RFC4791) client library"; 54 homepage = "https://github.com/python-caldav/caldav"; 55 + changelog = "https://github.com/python-caldav/caldav/releases/tag/v${version}"; 56 license = licenses.asl20; 57 maintainers = with maintainers; [ marenz dotlambda ]; 58 };
+9 -4
pkgs/development/python-modules/cnvkit/default.nix
··· 16 , pomegranate 17 , pyfaidx 18 , python 19 , R 20 }: 21 22 buildPythonPackage rec { 23 - pname = "CNVkit"; 24 - version = "0.9.9"; 25 26 src = fetchFromGitHub { 27 owner = "etal"; 28 repo = "cnvkit"; 29 - rev = "v${version}"; 30 - sha256 = "1q4l7jhr1k135an3n9aa9wsid5lk6fwxb0hcldrr6v6y76zi4gj1"; 31 }; 32 33 postPatch = '' ··· 74 meta = with lib; { 75 homepage = "https://cnvkit.readthedocs.io"; 76 description = "A Python library and command-line software toolkit to infer and visualize copy number from high-throughput DNA sequencing data"; 77 license = licenses.asl20; 78 maintainers = [ maintainers.jbedo ]; 79 };
··· 16 , pomegranate 17 , pyfaidx 18 , python 19 + , pythonOlder 20 , R 21 }: 22 23 buildPythonPackage rec { 24 + pname = "cnvkit"; 25 + version = "0.9.10"; 26 + format = "setuptools"; 27 + 28 + disabled = pythonOlder "3.7"; 29 30 src = fetchFromGitHub { 31 owner = "etal"; 32 repo = "cnvkit"; 33 + rev = "refs/tags/v${version}"; 34 + hash = "sha256-mCQXo3abwC06x/g51UBshqUk3dpqEVNUvx+cJ/EdYGQ="; 35 }; 36 37 postPatch = '' ··· 78 meta = with lib; { 79 homepage = "https://cnvkit.readthedocs.io"; 80 description = "A Python library and command-line software toolkit to infer and visualize copy number from high-throughput DNA sequencing data"; 81 + changelog = "https://github.com/etal/cnvkit/releases/tag/v${version}"; 82 license = licenses.asl20; 83 maintainers = [ maintainers.jbedo ]; 84 };
+19 -12
pkgs/development/tools/analysis/frama-c/default.nix
··· 1 { lib, stdenv, fetchurl, makeWrapper, writeText 2 - , autoconf, ncurses, graphviz, doxygen 3 , ocamlPackages, ltl2ba, coq, why3 4 , gdk-pixbuf, wrapGAppsHook 5 }: ··· 17 num 18 ocamlgraph 19 ppx_deriving 20 ppx_import 21 stdlib-shims 22 why3 ··· 35 36 stdenv.mkDerivation rec { 37 pname = "frama-c"; 38 - version = "25.0"; 39 - slang = "Manganese"; 40 41 src = fetchurl { 42 - url = "https://frama-c.com/download/frama-c-${version}-${slang}.tar.gz"; 43 - sha256 = "sha256-Ii3O/NJyBTVAv1ts/zae/Ee4HCjzYOthZmnD8wqLwp8="; 44 }; 45 46 - preConfigure = lib.optionalString stdenv.cc.isClang "configureFlagsArray=(\"--with-cpp=clang -E -C\")"; 47 - postConfigure = "patchShebangs src/plugins/value/gen-api.sh"; 48 49 strictDeps = true; 50 51 - nativeBuildInputs = [ autoconf wrapGAppsHook ] ++ (with ocamlPackages; [ ocaml findlib ]); 52 53 buildInputs = with ocamlPackages; [ 54 - ncurses ltl2ba ocamlgraph yojson menhirLib camlzip 55 lablgtk3 lablgtk3-sourceview3 coq graphviz zarith apron why3 mlgmpidl doxygen 56 - ppx_deriving ppx_import 57 gdk-pixbuf 58 ]; 59 60 - enableParallelBuilding = true; 61 62 preFixup = '' 63 - gappsWrapperArgs+=(--prefix OCAMLPATH ':' ${ocamlpath}) 64 ''; 65 66 # Allow loading of external Frama-C plugins
··· 1 { lib, stdenv, fetchurl, makeWrapper, writeText 2 + , graphviz, doxygen 3 , ocamlPackages, ltl2ba, coq, why3 4 , gdk-pixbuf, wrapGAppsHook 5 }: ··· 17 num 18 ocamlgraph 19 ppx_deriving 20 + ppx_deriving_yojson 21 ppx_import 22 stdlib-shims 23 why3 ··· 36 37 stdenv.mkDerivation rec { 38 pname = "frama-c"; 39 + version = "26.1"; 40 + slang = "Iron"; 41 42 src = fetchurl { 43 + url = "https://frama-c.com/download/frama-c-${version}-${slang}.tar.gz"; 44 + hash = "sha256-UT7ajIyu8e5vzrz2oBKDDrtZqUacgUP/TRi0/kz9Qkg="; 45 }; 46 47 + postConfigure = "patchShebangs src/plugins/eva/gen-api.sh"; 48 49 strictDeps = true; 50 51 + nativeBuildInputs = [ wrapGAppsHook ] ++ (with ocamlPackages; [ ocaml findlib dune_3 ]); 52 53 buildInputs = with ocamlPackages; [ 54 + dune-site dune-configurator 55 + ltl2ba ocamlgraph yojson menhirLib camlzip 56 lablgtk3 lablgtk3-sourceview3 coq graphviz zarith apron why3 mlgmpidl doxygen 57 + ppx_deriving ppx_import ppx_deriving_yojson 58 gdk-pixbuf 59 ]; 60 61 + buildPhase = '' 62 + runHook preBuild 63 + dune build -j$NIX_BUILD_CORES --release @install 64 + runHook postBuild 65 + ''; 66 + 67 + installFlags = [ "PREFIX=$(out)" ]; 68 69 preFixup = '' 70 + gappsWrapperArgs+=(--prefix OCAMLPATH ':' ${ocamlpath}:$out/lib/) 71 ''; 72 73 # Allow loading of external Frama-C plugins
+2 -2
pkgs/development/tools/database/sqlc/default.nix
··· 1 { lib, buildGoModule, fetchFromGitHub }: 2 3 let 4 - version = "1.17.0"; 5 in 6 buildGoModule { 7 pname = "sqlc"; ··· 11 owner = "kyleconroy"; 12 repo = "sqlc"; 13 rev = "v${version}"; 14 - sha256 = "sha256-knblQwO+c8AD0WJ+1l6FJP8j8pdsVhKa/oiPqUJfsVY="; 15 }; 16 17 proxyVendor = true;
··· 1 { lib, buildGoModule, fetchFromGitHub }: 2 3 let 4 + version = "1.17.2"; 5 in 6 buildGoModule { 7 pname = "sqlc"; ··· 11 owner = "kyleconroy"; 12 repo = "sqlc"; 13 rev = "v${version}"; 14 + sha256 = "sha256-30dIFo07C+noWdnq2sL1pEQZzTR4FfaV0FvyW4BxCU8="; 15 }; 16 17 proxyVendor = true;
+24 -10
pkgs/development/tools/rbspy/default.nix
··· 4 , fetchFromGitHub 5 , ruby 6 , which 7 }: 8 rustPlatform.buildRustPackage rec { 9 pname = "rbspy"; 10 - version = "0.15.0"; 11 12 src = fetchFromGitHub { 13 owner = pname; 14 repo = pname; 15 rev = "v${version}"; 16 - hash = "sha256-e6ZCRIJVKl3xbJym+h+ah/J4c+s7wf1laF7p63ubE4A="; 17 }; 18 19 - cargoHash = "sha256-yhZ0QM9vZxyFCjTShbV7+Rn8w4lkPW7E7zKhrK4qa1E="; 20 doCheck = true; 21 22 # The current implementation of rbspy fails to detect the version of ruby ··· 25 substituteInPlace src/core/process.rs \ 26 --replace /usr/bin/which '${which}/bin/which' 27 substituteInPlace src/sampler/mod.rs \ 28 - --replace /usr/bin/which '${which}/bin/which' \ 29 - --replace 'fn test_sample_single_process_with_time_limit(' '#[ignore] fn test_sample_single_process_with_time_limit(' \ 30 - --replace 'fn test_sample_single_process(' '#[ignore] fn test_sample_single_process(' \ 31 - --replace 'fn test_sample_subprocesses(' '#[ignore] fn test_sample_subprocesses(' 32 - substituteInPlace src/core/ruby_spy.rs \ 33 - --replace 'fn test_get_trace(' '#[ignore] fn test_get_trace(' \ 34 - --replace 'fn test_get_trace_when_process_has_exited(' '#[ignore] fn test_get_trace_when_process_has_exited(' 35 ''; 36 37 nativeBuildInputs = [ ruby which ]; 38 39 meta = with lib; { 40 broken = (stdenv.isLinux && stdenv.isAarch64);
··· 4 , fetchFromGitHub 5 , ruby 6 , which 7 + , runCommand 8 + , darwin 9 }: 10 + 11 rustPlatform.buildRustPackage rec { 12 pname = "rbspy"; 13 + version = "0.16.0"; 14 15 src = fetchFromGitHub { 16 owner = pname; 17 repo = pname; 18 rev = "v${version}"; 19 + hash = "sha256-yM3bE79flvFSZvpkHXhhEh1MJrSSJzqZcX9aVRmz1ew="; 20 }; 21 22 + cargoHash = "sha256-qvx5zPEIwvh2AIFCGNbVMNIRFtVjSLR9+exbSeQ9oXI="; 23 doCheck = true; 24 25 # The current implementation of rbspy fails to detect the version of ruby ··· 28 substituteInPlace src/core/process.rs \ 29 --replace /usr/bin/which '${which}/bin/which' 30 substituteInPlace src/sampler/mod.rs \ 31 + --replace /usr/bin/which '${which}/bin/which' 32 ''; 33 34 + checkFlags = [ 35 + "--skip=test_get_trace" 36 + "--skip=test_get_trace_when_process_has_exited" 37 + "--skip=test_sample_single_process" 38 + "--skip=test_sample_single_process_with_time_limit" 39 + "--skip=test_sample_subprocesses" 40 + ]; 41 + 42 nativeBuildInputs = [ ruby which ]; 43 + 44 + buildInputs = lib.optionals (stdenv.isDarwin && stdenv.isx86_64) [ 45 + # Pull a header that contains a definition of proc_pid_rusage(). 46 + (runCommand "${pname}_headers" { } '' 47 + install -Dm444 ${lib.getDev darwin.apple_sdk.sdk}/include/libproc.h $out/include/libproc.h 48 + '') 49 + ]; 50 + 51 + LIBCLANG_PATH = lib.optionalString stdenv.isDarwin "${stdenv.cc.cc.lib}/lib"; 52 53 meta = with lib; { 54 broken = (stdenv.isLinux && stdenv.isAarch64);
+3 -3
pkgs/tools/admin/trivy/default.nix
··· 5 6 buildGoModule rec { 7 pname = "trivy"; 8 - version = "0.37.2"; 9 10 src = fetchFromGitHub { 11 owner = "aquasecurity"; 12 repo = pname; 13 rev = "v${version}"; 14 - sha256 = "sha256-k5S0ttOhI+vjiGJpIPVi9ro6n3f2Cxe7HiADvs14Zuo="; 15 }; 16 # hash missmatch on across linux and darwin 17 proxyVendor = true; 18 - vendorSha256 = "sha256-EJw5DxiBF+gw5X+vqrnZsNCm2umOHEq6GeQ5V/Z0DrE="; 19 20 excludedPackages = "misc"; 21
··· 5 6 buildGoModule rec { 7 pname = "trivy"; 8 + version = "0.37.3"; 9 10 src = fetchFromGitHub { 11 owner = "aquasecurity"; 12 repo = pname; 13 rev = "v${version}"; 14 + sha256 = "sha256-fndA2rApDXwKeQEQ9Vy/9iJBJPcRWt+yJfvRdNDOwZU="; 15 }; 16 # hash missmatch on across linux and darwin 17 proxyVendor = true; 18 + vendorHash = "sha256-91UPIz5HM82d6s8kHEb9w/vLQgXmoV8fIcbRyXDMNL8="; 19 20 excludedPackages = "misc"; 21
+26
pkgs/tools/security/go-dork/default.nix
···
··· 1 + { lib 2 + , buildGoModule 3 + , fetchFromGitHub 4 + }: 5 + 6 + buildGoModule rec { 7 + pname = "go-dork"; 8 + version = "1.0.2"; 9 + 10 + src = fetchFromGitHub { 11 + owner = "dwisiswant0"; 12 + repo = pname; 13 + rev = "refs/tags/v${version}"; 14 + hash = "sha256-tFmXutX3UnKAFFS4mO4PCv7Bhw1wJ7qjdA1ROryqYZU="; 15 + }; 16 + 17 + vendorHash = "sha256-6V58RRRPamBMDAf0gg4sQMQkoD5dWauCFtPrwf5EasI="; 18 + 19 + meta = with lib; { 20 + description = "Dork scanner"; 21 + homepage = "https://github.com/dwisiswant0/go-dork"; 22 + changelog = "https://github.com/dwisiswant0/go-dork/releases/tag/v${version}"; 23 + license = licenses.mit; 24 + maintainers = with maintainers; [ fab ]; 25 + }; 26 + }
+2
pkgs/top-level/all-packages.nix
··· 3059 3060 go-cve-search = callPackage ../tools/security/go-cve-search { }; 3061 3062 chkcrontab = callPackage ../tools/admin/chkcrontab { }; 3063 3064 claws = callPackage ../tools/misc/claws { };
··· 3059 3060 go-cve-search = callPackage ../tools/security/go-cve-search { }; 3061 3062 + go-dork = callPackage ../tools/security/go-dork { }; 3063 + 3064 chkcrontab = callPackage ../tools/admin/chkcrontab { }; 3065 3066 claws = callPackage ../tools/misc/claws { };