Merge staging-next into staging

authored by

github-actions[bot] and committed by
GitHub
288c9d78 f4c041be

+243 -78
+5
doc/languages-frameworks/python.section.md
··· 1821 1821 Updating packages in bulk leads to lots of breakages, which is why a 1822 1822 stabilization period on the `python-unstable` branch is required. 1823 1823 1824 + If a package is fragile and often breaks during these bulks updates, it 1825 + may be reasonable to set `passthru.skipBulkUpdate = true` in the 1826 + derivation. This decision should not be made on a whim and should 1827 + always be supported by a qualifying comment. 1828 + 1824 1829 Once the branch is sufficiently stable it should normally be merged 1825 1830 into the `staging` branch. 1826 1831
+3 -5
maintainers/scripts/update-python-libraries
··· 1 - #!/bin/sh 2 - build=`nix-build -E "with import (fetchTarball "channel:nixpkgs-unstable") {}; python3.withPackages(ps: with ps; [ packaging requests toolz ])"` 3 - python=${build}/bin/python 4 - exec ${python} pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@ 5 - 1 + #!/usr/bin/env nix-shell 2 + #!nix-shell -I nixpkgs=channel:nixpkgs-unstable -i bash -p "python3.withPackages (ps: with ps; [ packaging requests ])" -p nix-prefetch-git 3 + exec python3 pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
+1 -1
nixos/modules/services/matrix/synapse.md
··· 31 31 "m.homeserver".base_url = "https://${fqdn}"; 32 32 "m.identity_server" = {}; 33 33 }; 34 - serverConfig."m.server" = "${config.services.matrix-synapse.settings.server_name}:443"; 34 + serverConfig."m.server" = "${fqdn}:443"; 35 35 mkWellKnown = data: '' 36 36 add_header Content-Type application/json; 37 37 add_header Access-Control-Allow-Origin *;
+1
pkgs/applications/audio/psst/default.nix
··· 10 10 categories = [ "Audio" "AudioVideo" ]; 11 11 icon = "psst"; 12 12 terminal = false; 13 + startupWMClass = "psst-gui"; 13 14 }; 14 15 15 16 in
+17
pkgs/applications/editors/emacs/elisp-packages/melpa-packages.nix
··· 177 177 178 178 dune = dontConfigure super.dune; 179 179 180 + emacsql = super.emacsql.overrideAttrs (old: { 181 + buildInputs = old.buildInputs ++ [ pkgs.sqlite ]; 182 + 183 + postBuild = '' 184 + cd source/sqlite 185 + make 186 + cd - 187 + ''; 188 + 189 + postInstall = (old.postInstall or "") + "\n" + '' 190 + install -m=755 -D source/sqlite/emacsql-sqlite \ 191 + $out/share/emacs/site-lisp/elpa/emacsql-${old.version}/sqlite/emacsql-sqlite 192 + ''; 193 + 194 + stripDebugList = [ "share" ]; 195 + }); 196 + 180 197 emacsql-sqlite = super.emacsql-sqlite.overrideAttrs (old: { 181 198 buildInputs = old.buildInputs ++ [ pkgs.sqlite ]; 182 199
+3 -3
pkgs/applications/misc/ticker/default.nix
··· 5 5 6 6 buildGoModule rec { 7 7 pname = "ticker"; 8 - version = "4.5.5"; 8 + version = "4.5.6"; 9 9 10 10 src = fetchFromGitHub { 11 11 owner = "achannarasappa"; 12 12 repo = pname; 13 13 rev = "refs/tags/v${version}"; 14 - hash = "sha256-7FSyW71NWmWmBNQ5QUqMJ4x9WLXpm0kvvjdjzx1yk/M="; 14 + hash = "sha256-h7k/zAYqpCAGn2dW+a3gOF/BN5ywjy/2Yx6THK9zk6k="; 15 15 }; 16 16 17 - vendorHash = "sha256-6bosJ2AlbLZ551tCNPmvNyyReFJG+iS3SYUFti2/CAw="; 17 + vendorHash = "sha256-c7wU9LLRlS9kOhE4yAiKAs/npQe8lvSwPcd+/D8o9rk="; 18 18 19 19 ldflags = [ 20 20 "-s"
+10 -2
pkgs/development/compilers/mruby/default.nix
··· 2 2 3 3 stdenv.mkDerivation rec { 4 4 pname = "mruby"; 5 - version = "3.1.0"; 5 + version = "3.2.0"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "mruby"; 9 9 repo = "mruby"; 10 10 rev = version; 11 - sha256 = "0gnzip7qfadnl0r1k8bpc9a6796sy503h77ggds02wrz7mpq32nf"; 11 + sha256 = "sha256-MmrbWeg/G29YBvVrOtceTOZChrQ2kx9+apl7u7BiGjA="; 12 12 }; 13 13 14 14 nativeBuildInputs = [ ruby bison rake ]; ··· 23 23 ''; 24 24 25 25 doCheck = true; 26 + 27 + checkPhase = '' 28 + runHook preCheck 29 + 30 + rake test 31 + 32 + runHook postCheck 33 + ''; 26 34 27 35 meta = with lib; { 28 36 description = "An embeddable implementation of the Ruby language";
+2 -1
pkgs/development/interpreters/python/update-python-libraries/default.nix
··· 1 - { python3, runCommand, git, nix }: 1 + { python3, runCommand, git, nix, nix-prefetch-git }: 2 2 3 3 runCommand "update-python-libraries" { 4 4 buildInputs = [ 5 5 nix 6 + nix-prefetch-git 6 7 (python3.withPackages(ps: with ps; [ packaging requests toolz ])) 7 8 git 8 9 ];
+111 -31
pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py
··· 12 12 """ 13 13 14 14 import argparse 15 + import json 16 + import logging 15 17 import os 16 - import pathlib 17 18 import re 18 19 import requests 19 20 from concurrent.futures import ThreadPoolExecutor as Pool 20 21 from packaging.version import Version as _Version 21 22 from packaging.version import InvalidVersion 22 23 from packaging.specifiers import SpecifierSet 24 + from typing import Optional, Any 23 25 import collections 24 26 import subprocess 25 27 ··· 31 33 32 34 PRERELEASES = False 33 35 36 + BULK_UPDATE = False 37 + 34 38 GIT = "git" 35 39 36 - NIXPGKS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip() 40 + NIXPKGS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip() 37 41 38 - import logging 39 42 logging.basicConfig(level=logging.INFO) 40 43 41 44 ··· 67 70 values = regex.findall(text) 68 71 return values 69 72 73 + 74 + def _get_attr_value(attr_path: str) -> Optional[Any]: 75 + try: 76 + response = subprocess.check_output([ 77 + "nix", 78 + "--extra-experimental-features", "nix-command", 79 + "eval", 80 + "-f", f"{NIXPKGS_ROOT}/default.nix", 81 + "--json", 82 + f"{attr_path}" 83 + ]) 84 + return json.loads(response.decode()) 85 + except (subprocess.CalledProcessError, ValueError): 86 + return None 87 + 88 + 70 89 def _get_unique_value(attribute, text): 71 90 """Match attribute in text and return unique match. 72 91 ··· 81 100 else: 82 101 raise ValueError("no value found for {}".format(attribute)) 83 102 84 - def _get_line_and_value(attribute, text): 103 + def _get_line_and_value(attribute, text, value=None): 85 104 """Match attribute in text. Return the line and the value of the attribute.""" 86 - regex = '({}\s+=\s+"(.*)";)'.format(attribute) 105 + if value is None: 106 + regex = rf'({attribute}\s+=\s+\"(.*)\";)' 107 + else: 108 + regex = rf'({attribute}\s+=\s+\"({value})\";)' 87 109 regex = re.compile(regex) 88 - value = regex.findall(text) 89 - n = len(value) 110 + results = regex.findall(text) 111 + n = len(results) 90 112 if n > 1: 91 113 raise ValueError("found too many values for {}".format(attribute)) 92 114 elif n == 1: 93 - return value[0] 115 + return results[0] 94 116 else: 95 117 raise ValueError("no value found for {}".format(attribute)) 96 118 97 119 98 - def _replace_value(attribute, value, text): 120 + def _replace_value(attribute, value, text, oldvalue=None): 99 121 """Search and replace value of attribute in text.""" 100 - old_line, old_value = _get_line_and_value(attribute, text) 122 + if oldvalue is None: 123 + old_line, old_value = _get_line_and_value(attribute, text) 124 + else: 125 + old_line, old_value = _get_line_and_value(attribute, text, oldvalue) 101 126 new_line = old_line.replace(old_value, value) 102 127 new_text = text.replace(old_line, new_line) 103 128 return new_text ··· 122 147 return r.json() 123 148 else: 124 149 raise ValueError("request for {} failed".format(url)) 150 + 151 + 152 + def _hash_to_sri(algorithm, value): 153 + """Convert a hash to its SRI representation""" 154 + return subprocess.check_output([ 155 + "nix", 156 + "hash", 157 + "to-sri", 158 + "--type", algorithm, 159 + value 160 + ]).decode().strip() 161 + 162 + 163 + def _skip_bulk_update(attr_name: str) -> bool: 164 + return bool(_get_attr_value( 165 + f"{attr_name}.skipBulkUpdate" 166 + )) 125 167 126 168 127 169 SEMVER = { ··· 198 240 attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{package}") 199 241 try: 200 242 homepage = subprocess.check_output( 201 - ["nix", "eval", "-f", f"{NIXPGKS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\ 243 + ["nix", "eval", "-f", f"{NIXPKGS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\ 202 244 .decode('utf-8') 203 245 except Exception as e: 204 246 raise ValueError(f"Unable to determine homepage: {e}") ··· 217 259 218 260 release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases)) 219 261 prefix = get_prefix(release['tag_name']) 220 - try: 221 - sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", f"{release['tarball_url']}"], stderr=subprocess.DEVNULL)\ 222 - .decode('utf-8').strip() 223 - except: 224 - # this may fail if they have both a branch and a tag of the same name, attempt tag name 225 - tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags") 226 - sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url], stderr=subprocess.DEVNULL)\ 227 - .decode('utf-8').strip() 228 262 263 + # some attributes require using the fetchgit 264 + git_fetcher_args = [] 265 + if (_get_attr_value(f"{attr_path}.src.fetchSubmodules")): 266 + git_fetcher_args.append("--fetch-submodules") 267 + if (_get_attr_value(f"{attr_path}.src.fetchLFS")): 268 + git_fetcher_args.append("--fetch-lfs") 269 + if (_get_attr_value(f"{attr_path}.src.leaveDotGit")): 270 + git_fetcher_args.append("--leave-dotGit") 229 271 230 - return version, sha256, prefix 272 + if git_fetcher_args: 273 + algorithm = "sha256" 274 + cmd = [ 275 + "nix-prefetch-git", 276 + f"https://github.com/{owner}/{repo}.git", 277 + "--hash", algorithm, 278 + "--rev", f"refs/tags/{release['tag_name']}" 279 + ] 280 + cmd.extend(git_fetcher_args) 281 + response = subprocess.check_output(cmd) 282 + document = json.loads(response.decode()) 283 + hash = _hash_to_sri(algorithm, document[algorithm]) 284 + else: 285 + try: 286 + hash = subprocess.check_output([ 287 + "nix-prefetch-url", 288 + "--type", "sha256", 289 + "--unpack", 290 + f"{release['tarball_url']}" 291 + ], stderr=subprocess.DEVNULL).decode('utf-8').strip() 292 + except (subprocess.CalledProcessError, UnicodeError): 293 + # this may fail if they have both a branch and a tag of the same name, attempt tag name 294 + tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags") 295 + hash = subprocess.check_output([ 296 + "nix-prefetch-url", 297 + "--type", "sha256", 298 + "--unpack", 299 + tag_url 300 + ], stderr=subprocess.DEVNULL).decode('utf-8').strip() 301 + 302 + return version, hash, prefix 231 303 232 304 233 305 FETCHERS = { ··· 272 344 if fetcher == 'fetchPypi': 273 345 try: 274 346 src_format = _get_unique_value('format', text) 275 - except ValueError as e: 347 + except ValueError: 276 348 src_format = None # format was not given 277 349 278 350 try: 279 351 extension = _get_unique_value('extension', text) 280 - except ValueError as e: 352 + except ValueError: 281 353 extension = None # extension was not given 282 354 283 355 if extension is None: ··· 294 366 raise ValueError('url does not point to PyPI.') 295 367 296 368 elif fetcher == 'fetchFromGitHub': 297 - if "fetchSubmodules" in text: 298 - raise ValueError("fetchFromGitHub fetcher doesn't support submodules") 299 369 extension = "tar.gz" 300 370 301 371 return extension ··· 321 391 # Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo 322 392 successful_fetch = False 323 393 for pname in pnames: 394 + if BULK_UPDATE and _skip_bulk_update(f"python3Packages.{pname}"): 395 + raise ValueError(f"Bulk update skipped for {pname}") 324 396 try: 325 397 new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target) 326 398 successful_fetch = True ··· 340 412 raise ValueError("no file available for {}.".format(pname)) 341 413 342 414 text = _replace_value('version', new_version, text) 415 + 343 416 # hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts 344 417 # sri hashes have been the default format since nix 2.4+ 345 - sri_hash = subprocess.check_output(["nix", "--extra-experimental-features", "nix-command", "hash", "to-sri", "--type", "sha256", new_sha256]).decode('utf-8').strip() 418 + sri_hash = _hash_to_sri("sha256", new_sha256) 346 419 347 - 348 - # fetchers can specify a sha256, or a sri hash 349 - try: 350 - text = _replace_value('sha256', sri_hash, text) 351 - except ValueError: 352 - text = _replace_value('hash', sri_hash, text) 420 + # retrieve the old output hash for a more precise match 421 + if old_hash := _get_attr_value(f"python3Packages.{pname}.src.outputHash"): 422 + # fetchers can specify a sha256, or a sri hash 423 + try: 424 + text = _replace_value('hash', sri_hash, text, old_hash) 425 + except ValueError: 426 + text = _replace_value('sha256', sri_hash, text, old_hash) 427 + else: 428 + raise ValueError(f"Unable to retrieve old hash for {pname}") 353 429 354 430 if fetcher == 'fetchFromGitHub': 355 431 # in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";` ··· 440 516 target = args.target 441 517 442 518 packages = list(map(os.path.abspath, args.package)) 519 + 520 + if len(packages) > 1: 521 + global BULK_UPDATE 522 + BULK_UPDATE = true 443 523 444 524 logging.info("Updating packages...") 445 525
+5 -4
pkgs/development/python-modules/caldav/default.nix
··· 4 4 , icalendar 5 5 , lxml 6 6 , pytestCheckHook 7 + , pythonOlder 7 8 , pytz 8 9 , recurring-ical-events 9 10 , requests 10 - , six 11 11 , tzlocal 12 12 , vobject 13 13 }: 14 14 15 15 buildPythonPackage rec { 16 16 pname = "caldav"; 17 - version = "1.1.3"; 17 + version = "1.2.0"; 18 18 19 19 format = "setuptools"; 20 + disabled = pythonOlder "3.7"; 20 21 21 22 src = fetchFromGitHub { 22 23 owner = "python-caldav"; 23 24 repo = pname; 24 25 rev = "refs/tags/v${version}"; 25 - hash = "sha256-ZilsCYr1M2WKSz/g5JV41JVsuHopPerxOevoG7FrEjQ="; 26 + hash = "sha256-ibizwN4pxqzmVozVjrAPNSrmM1+8+/Qu6UnfRerrwUk="; 26 27 }; 27 28 28 29 propagatedBuildInputs = [ 29 30 vobject 30 31 lxml 31 32 requests 32 - six 33 33 icalendar 34 34 recurring-ical-events 35 35 ]; ··· 52 52 meta = with lib; { 53 53 description = "CalDAV (RFC4791) client library"; 54 54 homepage = "https://github.com/python-caldav/caldav"; 55 + changelog = "https://github.com/python-caldav/caldav/releases/tag/v${version}"; 55 56 license = licenses.asl20; 56 57 maintainers = with maintainers; [ marenz dotlambda ]; 57 58 };
+9 -4
pkgs/development/python-modules/cnvkit/default.nix
··· 16 16 , pomegranate 17 17 , pyfaidx 18 18 , python 19 + , pythonOlder 19 20 , R 20 21 }: 21 22 22 23 buildPythonPackage rec { 23 - pname = "CNVkit"; 24 - version = "0.9.9"; 24 + pname = "cnvkit"; 25 + version = "0.9.10"; 26 + format = "setuptools"; 27 + 28 + disabled = pythonOlder "3.7"; 25 29 26 30 src = fetchFromGitHub { 27 31 owner = "etal"; 28 32 repo = "cnvkit"; 29 - rev = "v${version}"; 30 - sha256 = "1q4l7jhr1k135an3n9aa9wsid5lk6fwxb0hcldrr6v6y76zi4gj1"; 33 + rev = "refs/tags/v${version}"; 34 + hash = "sha256-mCQXo3abwC06x/g51UBshqUk3dpqEVNUvx+cJ/EdYGQ="; 31 35 }; 32 36 33 37 postPatch = '' ··· 74 78 meta = with lib; { 75 79 homepage = "https://cnvkit.readthedocs.io"; 76 80 description = "A Python library and command-line software toolkit to infer and visualize copy number from high-throughput DNA sequencing data"; 81 + changelog = "https://github.com/etal/cnvkit/releases/tag/v${version}"; 77 82 license = licenses.asl20; 78 83 maintainers = [ maintainers.jbedo ]; 79 84 };
+19 -12
pkgs/development/tools/analysis/frama-c/default.nix
··· 1 1 { lib, stdenv, fetchurl, makeWrapper, writeText 2 - , autoconf, ncurses, graphviz, doxygen 2 + , graphviz, doxygen 3 3 , ocamlPackages, ltl2ba, coq, why3 4 4 , gdk-pixbuf, wrapGAppsHook 5 5 }: ··· 17 17 num 18 18 ocamlgraph 19 19 ppx_deriving 20 + ppx_deriving_yojson 20 21 ppx_import 21 22 stdlib-shims 22 23 why3 ··· 35 36 36 37 stdenv.mkDerivation rec { 37 38 pname = "frama-c"; 38 - version = "25.0"; 39 - slang = "Manganese"; 39 + version = "26.1"; 40 + slang = "Iron"; 40 41 41 42 src = fetchurl { 42 - url = "https://frama-c.com/download/frama-c-${version}-${slang}.tar.gz"; 43 - sha256 = "sha256-Ii3O/NJyBTVAv1ts/zae/Ee4HCjzYOthZmnD8wqLwp8="; 43 + url = "https://frama-c.com/download/frama-c-${version}-${slang}.tar.gz"; 44 + hash = "sha256-UT7ajIyu8e5vzrz2oBKDDrtZqUacgUP/TRi0/kz9Qkg="; 44 45 }; 45 46 46 - preConfigure = lib.optionalString stdenv.cc.isClang "configureFlagsArray=(\"--with-cpp=clang -E -C\")"; 47 - postConfigure = "patchShebangs src/plugins/value/gen-api.sh"; 47 + postConfigure = "patchShebangs src/plugins/eva/gen-api.sh"; 48 48 49 49 strictDeps = true; 50 50 51 - nativeBuildInputs = [ autoconf wrapGAppsHook ] ++ (with ocamlPackages; [ ocaml findlib ]); 51 + nativeBuildInputs = [ wrapGAppsHook ] ++ (with ocamlPackages; [ ocaml findlib dune_3 ]); 52 52 53 53 buildInputs = with ocamlPackages; [ 54 - ncurses ltl2ba ocamlgraph yojson menhirLib camlzip 54 + dune-site dune-configurator 55 + ltl2ba ocamlgraph yojson menhirLib camlzip 55 56 lablgtk3 lablgtk3-sourceview3 coq graphviz zarith apron why3 mlgmpidl doxygen 56 - ppx_deriving ppx_import 57 + ppx_deriving ppx_import ppx_deriving_yojson 57 58 gdk-pixbuf 58 59 ]; 59 60 60 - enableParallelBuilding = true; 61 + buildPhase = '' 62 + runHook preBuild 63 + dune build -j$NIX_BUILD_CORES --release @install 64 + runHook postBuild 65 + ''; 66 + 67 + installFlags = [ "PREFIX=$(out)" ]; 61 68 62 69 preFixup = '' 63 - gappsWrapperArgs+=(--prefix OCAMLPATH ':' ${ocamlpath}) 70 + gappsWrapperArgs+=(--prefix OCAMLPATH ':' ${ocamlpath}:$out/lib/) 64 71 ''; 65 72 66 73 # Allow loading of external Frama-C plugins
+2 -2
pkgs/development/tools/database/sqlc/default.nix
··· 1 1 { lib, buildGoModule, fetchFromGitHub }: 2 2 3 3 let 4 - version = "1.17.0"; 4 + version = "1.17.2"; 5 5 in 6 6 buildGoModule { 7 7 pname = "sqlc"; ··· 11 11 owner = "kyleconroy"; 12 12 repo = "sqlc"; 13 13 rev = "v${version}"; 14 - sha256 = "sha256-knblQwO+c8AD0WJ+1l6FJP8j8pdsVhKa/oiPqUJfsVY="; 14 + sha256 = "sha256-30dIFo07C+noWdnq2sL1pEQZzTR4FfaV0FvyW4BxCU8="; 15 15 }; 16 16 17 17 proxyVendor = true;
+24 -10
pkgs/development/tools/rbspy/default.nix
··· 4 4 , fetchFromGitHub 5 5 , ruby 6 6 , which 7 + , runCommand 8 + , darwin 7 9 }: 10 + 8 11 rustPlatform.buildRustPackage rec { 9 12 pname = "rbspy"; 10 - version = "0.15.0"; 13 + version = "0.16.0"; 11 14 12 15 src = fetchFromGitHub { 13 16 owner = pname; 14 17 repo = pname; 15 18 rev = "v${version}"; 16 - hash = "sha256-e6ZCRIJVKl3xbJym+h+ah/J4c+s7wf1laF7p63ubE4A="; 19 + hash = "sha256-yM3bE79flvFSZvpkHXhhEh1MJrSSJzqZcX9aVRmz1ew="; 17 20 }; 18 21 19 - cargoHash = "sha256-yhZ0QM9vZxyFCjTShbV7+Rn8w4lkPW7E7zKhrK4qa1E="; 22 + cargoHash = "sha256-qvx5zPEIwvh2AIFCGNbVMNIRFtVjSLR9+exbSeQ9oXI="; 20 23 doCheck = true; 21 24 22 25 # The current implementation of rbspy fails to detect the version of ruby ··· 25 28 substituteInPlace src/core/process.rs \ 26 29 --replace /usr/bin/which '${which}/bin/which' 27 30 substituteInPlace src/sampler/mod.rs \ 28 - --replace /usr/bin/which '${which}/bin/which' \ 29 - --replace 'fn test_sample_single_process_with_time_limit(' '#[ignore] fn test_sample_single_process_with_time_limit(' \ 30 - --replace 'fn test_sample_single_process(' '#[ignore] fn test_sample_single_process(' \ 31 - --replace 'fn test_sample_subprocesses(' '#[ignore] fn test_sample_subprocesses(' 32 - substituteInPlace src/core/ruby_spy.rs \ 33 - --replace 'fn test_get_trace(' '#[ignore] fn test_get_trace(' \ 34 - --replace 'fn test_get_trace_when_process_has_exited(' '#[ignore] fn test_get_trace_when_process_has_exited(' 31 + --replace /usr/bin/which '${which}/bin/which' 35 32 ''; 36 33 34 + checkFlags = [ 35 + "--skip=test_get_trace" 36 + "--skip=test_get_trace_when_process_has_exited" 37 + "--skip=test_sample_single_process" 38 + "--skip=test_sample_single_process_with_time_limit" 39 + "--skip=test_sample_subprocesses" 40 + ]; 41 + 37 42 nativeBuildInputs = [ ruby which ]; 43 + 44 + buildInputs = lib.optionals (stdenv.isDarwin && stdenv.isx86_64) [ 45 + # Pull a header that contains a definition of proc_pid_rusage(). 46 + (runCommand "${pname}_headers" { } '' 47 + install -Dm444 ${lib.getDev darwin.apple_sdk.sdk}/include/libproc.h $out/include/libproc.h 48 + '') 49 + ]; 50 + 51 + LIBCLANG_PATH = lib.optionalString stdenv.isDarwin "${stdenv.cc.cc.lib}/lib"; 38 52 39 53 meta = with lib; { 40 54 broken = (stdenv.isLinux && stdenv.isAarch64);
+3 -3
pkgs/tools/admin/trivy/default.nix
··· 5 5 6 6 buildGoModule rec { 7 7 pname = "trivy"; 8 - version = "0.37.2"; 8 + version = "0.37.3"; 9 9 10 10 src = fetchFromGitHub { 11 11 owner = "aquasecurity"; 12 12 repo = pname; 13 13 rev = "v${version}"; 14 - sha256 = "sha256-k5S0ttOhI+vjiGJpIPVi9ro6n3f2Cxe7HiADvs14Zuo="; 14 + sha256 = "sha256-fndA2rApDXwKeQEQ9Vy/9iJBJPcRWt+yJfvRdNDOwZU="; 15 15 }; 16 16 # hash missmatch on across linux and darwin 17 17 proxyVendor = true; 18 - vendorSha256 = "sha256-EJw5DxiBF+gw5X+vqrnZsNCm2umOHEq6GeQ5V/Z0DrE="; 18 + vendorHash = "sha256-91UPIz5HM82d6s8kHEb9w/vLQgXmoV8fIcbRyXDMNL8="; 19 19 20 20 excludedPackages = "misc"; 21 21
+26
pkgs/tools/security/go-dork/default.nix
··· 1 + { lib 2 + , buildGoModule 3 + , fetchFromGitHub 4 + }: 5 + 6 + buildGoModule rec { 7 + pname = "go-dork"; 8 + version = "1.0.2"; 9 + 10 + src = fetchFromGitHub { 11 + owner = "dwisiswant0"; 12 + repo = pname; 13 + rev = "refs/tags/v${version}"; 14 + hash = "sha256-tFmXutX3UnKAFFS4mO4PCv7Bhw1wJ7qjdA1ROryqYZU="; 15 + }; 16 + 17 + vendorHash = "sha256-6V58RRRPamBMDAf0gg4sQMQkoD5dWauCFtPrwf5EasI="; 18 + 19 + meta = with lib; { 20 + description = "Dork scanner"; 21 + homepage = "https://github.com/dwisiswant0/go-dork"; 22 + changelog = "https://github.com/dwisiswant0/go-dork/releases/tag/v${version}"; 23 + license = licenses.mit; 24 + maintainers = with maintainers; [ fab ]; 25 + }; 26 + }
+2
pkgs/top-level/all-packages.nix
··· 3059 3059 3060 3060 go-cve-search = callPackage ../tools/security/go-cve-search { }; 3061 3061 3062 + go-dork = callPackage ../tools/security/go-dork { }; 3063 + 3062 3064 chkcrontab = callPackage ../tools/admin/chkcrontab { }; 3063 3065 3064 3066 claws = callPackage ../tools/misc/claws { };