Merge master into staging-next

authored by github-actions[bot] and committed by GitHub 7f11b9b4 b4a443d8

+906 -444
+4
nixos/modules/system/activation/switch-to-configuration.pl
··· 22 use IPC::Cmd; 23 use Sys::Syslog qw(:standard :macros); 24 use Cwd qw(abs_path); 25 26 ## no critic(ControlStructures::ProhibitDeepNests) 27 ## no critic(ErrorHandling::RequireCarping) ··· 91 } 92 93 make_path("/run/nixos", { mode => oct(755) }); 94 openlog("nixos", "", LOG_USER); 95 96 # Install or update the bootloader. ··· 985 syslog(LOG_ERR, "switching to system configuration $toplevel failed (status $res)"); 986 } 987 988 exit($res);
··· 22 use IPC::Cmd; 23 use Sys::Syslog qw(:standard :macros); 24 use Cwd qw(abs_path); 25 + use Fcntl ':flock'; 26 27 ## no critic(ControlStructures::ProhibitDeepNests) 28 ## no critic(ErrorHandling::RequireCarping) ··· 92 } 93 94 make_path("/run/nixos", { mode => oct(755) }); 95 + open(my $stc_lock, '>>', '/run/nixos/switch-to-configuration.lock') or die "Could not open lock - $!"; 96 + flock($stc_lock, LOCK_EX) or die "Could not acquire lock - $!"; 97 openlog("nixos", "", LOG_USER); 98 99 # Install or update the bootloader. ··· 988 syslog(LOG_ERR, "switching to system configuration $toplevel failed (status $res)"); 989 } 990 991 + close($stc_lock) or die "Could not close lock - $!"; 992 exit($res);
+2 -2
pkgs/applications/video/qmplay2/default.nix
··· 26 27 stdenv.mkDerivation (finalAttrs: { 28 pname = "qmplay2"; 29 - version = "23.08.22"; 30 31 src = fetchFromGitHub { 32 owner = "zaps166"; 33 repo = "QMPlay2"; 34 rev = finalAttrs.version; 35 fetchSubmodules = true; 36 - hash = "sha256-Ug7WAqZ+BxspQUXweL/OnVBGCsU60DOWNexbi0GpDo0="; 37 }; 38 39 nativeBuildInputs = [
··· 26 27 stdenv.mkDerivation (finalAttrs: { 28 pname = "qmplay2"; 29 + version = "23.10.22"; 30 31 src = fetchFromGitHub { 32 owner = "zaps166"; 33 repo = "QMPlay2"; 34 rev = finalAttrs.version; 35 fetchSubmodules = true; 36 + hash = "sha256-yDymUXuILgT4AFTt302GniPi/WNwrTCOuOfdUiKOIyk="; 37 }; 38 39 nativeBuildInputs = [
+18 -8
pkgs/data/themes/nordic/default.nix
··· 80 81 sourceRoot = "."; 82 83 nativeBuildInputs = [ jdupes ]; 84 85 - propagatedUserEnvPkgs = [ 86 - gtk-engine-murrine 87 - breeze-icons 88 - plasma-framework 89 - plasma-workspace 90 - ]; 91 92 dontWrapQtApps = true; 93 ··· 119 rmdir $out/share/themes/Nordic/extras{/wallpapers,} 120 121 # move kde related contents to appropriate directories 122 - mkdir -p $out/share/{aurorae/themes,color-schemes,Kvantum,plasma,sddm/themes,icons} 123 mv -v $out/share/themes/Nordic/kde/aurorae/* $out/share/aurorae/themes/ 124 mv -v $out/share/themes/Nordic/kde/colorschemes/* $out/share/color-schemes/ 125 mv -v $out/share/themes/Nordic/kde/konsole $out/share/ 126 mv -v $out/share/themes/Nordic/kde/kvantum/* $out/share/Kvantum/ 127 mv -v $out/share/themes/Nordic/kde/plasma/look-and-feel $out/share/plasma/ 128 - mv -v $out/share/themes/Nordic/kde/sddm/* $out/share/sddm/themes/ 129 mv -v $out/share/themes/Nordic/kde/folders/* $out/share/icons/ 130 mv -v $out/share/themes/Nordic/kde/cursors/*-cursors $out/share/icons/ 131 rm -rf $out/share/themes/Nordic/kde 132 133 # Replace duplicate files with symbolic links to the first file in ··· 135 jdupes --quiet --link-soft --recurse $out/share 136 137 runHook postInstall 138 ''; 139 140 meta = with lib; {
··· 80 81 sourceRoot = "."; 82 83 + outputs = [ "out" "sddm" ]; 84 + 85 nativeBuildInputs = [ jdupes ]; 86 87 + propagatedUserEnvPkgs = [ gtk-engine-murrine ]; 88 89 dontWrapQtApps = true; 90 ··· 116 rmdir $out/share/themes/Nordic/extras{/wallpapers,} 117 118 # move kde related contents to appropriate directories 119 + mkdir -p $out/share/{aurorae/themes,color-schemes,Kvantum,plasma,icons} 120 mv -v $out/share/themes/Nordic/kde/aurorae/* $out/share/aurorae/themes/ 121 mv -v $out/share/themes/Nordic/kde/colorschemes/* $out/share/color-schemes/ 122 mv -v $out/share/themes/Nordic/kde/konsole $out/share/ 123 mv -v $out/share/themes/Nordic/kde/kvantum/* $out/share/Kvantum/ 124 mv -v $out/share/themes/Nordic/kde/plasma/look-and-feel $out/share/plasma/ 125 mv -v $out/share/themes/Nordic/kde/folders/* $out/share/icons/ 126 mv -v $out/share/themes/Nordic/kde/cursors/*-cursors $out/share/icons/ 127 + 128 + mkdir -p $sddm/share/sddm/themes 129 + mv -v $out/share/themes/Nordic/kde/sddm/* $sddm/share/sddm/themes/ 130 + 131 rm -rf $out/share/themes/Nordic/kde 132 133 # Replace duplicate files with symbolic links to the first file in ··· 135 jdupes --quiet --link-soft --recurse $out/share 136 137 runHook postInstall 138 + ''; 139 + 140 + postFixup = '' 141 + # Propagate sddm theme dependencies to user env otherwise sddm 142 + # does find them. Putting them in buildInputs is not enough. 143 + 144 + mkdir -p $sddm/nix-support 145 + 146 + printWords ${breeze-icons} ${plasma-framework} ${plasma-workspace} \ 147 + >> $sddm/nix-support/propagated-user-env-packages 148 ''; 149 150 meta = with lib; {
+6 -9
pkgs/development/python-modules/azure-mgmt-containerregistry/default.nix
··· 4 , pythonOlder 5 , azure-common 6 , azure-mgmt-core 7 - , msrest 8 - , typing-extensions 9 }: 10 11 buildPythonPackage rec { 12 pname = "azure-mgmt-containerregistry"; 13 - version = "10.1.0"; 14 format = "setuptools"; 15 16 - disabled = pythonOlder "3.7"; 17 18 src = fetchPypi { 19 inherit pname version; 20 - hash = "sha256-VrX9YfYNvlA8+eNqHCp35BAeQZzQKakZs7ZZKwT8oYc="; 21 - extension = "zip"; 22 }; 23 24 propagatedBuildInputs = [ 25 azure-common 26 azure-mgmt-core 27 - msrest 28 - ] ++ lib.optionals (pythonOlder "3.8") [ 29 - typing-extensions 30 ]; 31 32 # no tests included ··· 40 meta = with lib; { 41 description = "Microsoft Azure Container Registry Client Library for Python"; 42 homepage = "https://github.com/Azure/azure-sdk-for-python"; 43 license = licenses.mit; 44 maintainers = with maintainers; [ jonringer ]; 45 };
··· 4 , pythonOlder 5 , azure-common 6 , azure-mgmt-core 7 + , isodate 8 }: 9 10 buildPythonPackage rec { 11 pname = "azure-mgmt-containerregistry"; 12 + version = "10.2.0"; 13 format = "setuptools"; 14 15 + disabled = pythonOlder "3.8"; 16 17 src = fetchPypi { 18 inherit pname version; 19 + hash = "sha256-i7i/5ofGxiF9/wTAPnUOaZ6FAgK3EaBqoHeSC8HuXCo="; 20 }; 21 22 propagatedBuildInputs = [ 23 azure-common 24 azure-mgmt-core 25 + isodate 26 ]; 27 28 # no tests included ··· 36 meta = with lib; { 37 description = "Microsoft Azure Container Registry Client Library for Python"; 38 homepage = "https://github.com/Azure/azure-sdk-for-python"; 39 + changelog = "https://github.com/Azure/azure-sdk-for-python/blob/azure-mgmt-containerregistry_${version}/sdk/containerregistry/azure-mgmt-containerregistry/CHANGELOG.md"; 40 license = licenses.mit; 41 maintainers = with maintainers; [ jonringer ]; 42 };
+6 -8
pkgs/development/python-modules/azure-mgmt-netapp/default.nix
··· 4 , pythonOlder 5 , azure-common 6 , azure-mgmt-core 7 - , msrest 8 - , msrestazure 9 }: 10 11 buildPythonPackage rec { 12 pname = "azure-mgmt-netapp"; 13 - version = "10.1.0"; 14 format = "setuptools"; 15 16 - disabled = pythonOlder "3.7"; 17 18 src = fetchPypi { 19 inherit pname version; 20 - hash = "sha256-eJiWTOCk2C79Jotku9bKlu3vU6H8004hWrX+h76MjQM="; 21 - extension = "zip"; 22 }; 23 24 propagatedBuildInputs = [ 25 azure-common 26 azure-mgmt-core 27 - msrest 28 - msrestazure 29 ]; 30 31 # no tests included ··· 39 meta = with lib; { 40 description = "Microsoft Azure NetApp Files Management Client Library for Python"; 41 homepage = "https://github.com/Azure/azure-sdk-for-python"; 42 license = licenses.mit; 43 maintainers = with maintainers; [ jonringer ]; 44 };
··· 4 , pythonOlder 5 , azure-common 6 , azure-mgmt-core 7 + , isodate 8 }: 9 10 buildPythonPackage rec { 11 pname = "azure-mgmt-netapp"; 12 + version = "11.0.0"; 13 format = "setuptools"; 14 15 + disabled = pythonOlder "3.8"; 16 17 src = fetchPypi { 18 inherit pname version; 19 + hash = "sha256-00cDFHpaEciRQLHM+Kt3uOtw/geOn5+onrY7lav6EeU="; 20 }; 21 22 propagatedBuildInputs = [ 23 azure-common 24 azure-mgmt-core 25 + isodate 26 ]; 27 28 # no tests included ··· 36 meta = with lib; { 37 description = "Microsoft Azure NetApp Files Management Client Library for Python"; 38 homepage = "https://github.com/Azure/azure-sdk-for-python"; 39 + changelog = "https://github.com/Azure/azure-sdk-for-python/blob/azure-mgmt-netapp_${version}/sdk/netapp/azure-mgmt-netapp/CHANGELOG.md"; 40 license = licenses.mit; 41 maintainers = with maintainers; [ jonringer ]; 42 };
+20 -15
pkgs/development/python-modules/flask-security-too/default.nix
··· 2 , buildPythonPackage 3 , fetchPypi 4 , pythonOlder 5 6 # extras: babel 7 , babel ··· 11 , bcrypt 12 , bleach 13 , flask-mailman 14 - , qrcode 15 16 # extras: fsqla 17 , flask-sqlalchemy ··· 21 # extras: mfa 22 , cryptography 23 , phonenumbers 24 25 # propagates 26 - , blinker 27 , email-validator 28 , flask 29 , flask-login 30 , flask-principal 31 , flask-wtf 32 - , itsdangerous 33 , passlib 34 35 # tests 36 , argon2-cffi 37 - , flask-mongoengine 38 , mongoengine 39 , mongomock 40 , peewee ··· 46 47 buildPythonPackage rec { 48 pname = "flask-security-too"; 49 - version = "5.3.0"; 50 - format = "setuptools"; 51 52 disabled = pythonOlder "3.7"; 53 54 src = fetchPypi { 55 pname = "Flask-Security-Too"; 56 inherit version; 57 - hash = "sha256-n12DCRPqxm8YhFeVrl99BEvdDYNq6rzP662rain3k1Q="; 58 }; 59 60 - postPatch = '' 61 - # This should be removed after updating to version 5.3.0. 62 - sed -i '/filterwarnings =/a ignore:pkg_resources is deprecated:DeprecationWarning' pytest.ini 63 - ''; 64 65 propagatedBuildInputs = [ 66 - blinker 67 email-validator 68 flask 69 flask-login 70 flask-principal 71 flask-wtf 72 - itsdangerous 73 passlib 74 ]; 75 76 passthru.optional-dependencies = { ··· 82 bcrypt 83 bleach 84 flask-mailman 85 - qrcode 86 ]; 87 fsqla = [ 88 flask-sqlalchemy ··· 92 mfa = [ 93 cryptography 94 phonenumbers 95 ]; 96 }; 97 98 nativeCheckInputs = [ 99 argon2-cffi 100 - flask-mongoengine 101 mongoengine 102 mongomock 103 peewee ··· 111 ++ passthru.optional-dependencies.fsqla 112 ++ passthru.optional-dependencies.mfa; 113 114 115 pythonImportsCheck = [ 116 "flask_security"
··· 2 , buildPythonPackage 3 , fetchPypi 4 , pythonOlder 5 + , setuptools 6 7 # extras: babel 8 , babel ··· 12 , bcrypt 13 , bleach 14 , flask-mailman 15 16 # extras: fsqla 17 , flask-sqlalchemy ··· 21 # extras: mfa 22 , cryptography 23 , phonenumbers 24 + , webauthn 25 + , qrcode 26 27 # propagates 28 , email-validator 29 , flask 30 , flask-login 31 , flask-principal 32 , flask-wtf 33 , passlib 34 + , importlib-resources 35 + , wtforms 36 37 # tests 38 , argon2-cffi 39 , mongoengine 40 , mongomock 41 , peewee ··· 47 48 buildPythonPackage rec { 49 pname = "flask-security-too"; 50 + version = "5.3.2"; 51 + pyproject = true; 52 53 disabled = pythonOlder "3.7"; 54 55 src = fetchPypi { 56 pname = "Flask-Security-Too"; 57 inherit version; 58 + hash = "sha256-wLUHXfDWSp7zWwTIjTH79AWlkkNzb21tChpLSEWr8+U="; 59 }; 60 61 + nativeBuildInputs = [ 62 + setuptools 63 + ]; 64 65 propagatedBuildInputs = [ 66 email-validator 67 flask 68 flask-login 69 flask-principal 70 flask-wtf 71 passlib 72 + importlib-resources 73 + wtforms 74 ]; 75 76 passthru.optional-dependencies = { ··· 82 bcrypt 83 bleach 84 flask-mailman 85 ]; 86 fsqla = [ 87 flask-sqlalchemy ··· 91 mfa = [ 92 cryptography 93 phonenumbers 94 + webauthn 95 + qrcode 96 ]; 97 }; 98 99 nativeCheckInputs = [ 100 argon2-cffi 101 mongoengine 102 mongomock 103 peewee ··· 111 ++ passthru.optional-dependencies.fsqla 112 ++ passthru.optional-dependencies.mfa; 113 114 + 115 + disabledTests = [ 116 + # needs /etc/resolv.conf 117 + "test_login_email_whatever" 118 + ]; 119 120 pythonImportsCheck = [ 121 "flask_security"
+5 -5
pkgs/development/python-modules/garminconnect/default.nix
··· 1 { lib 2 , buildPythonPackage 3 - , cloudscraper 4 , fetchFromGitHub 5 , garth 6 , pdm-backend 7 , pythonOlder 8 , requests 9 }: 10 11 buildPythonPackage rec { 12 pname = "garminconnect"; 13 - version = "0.2.8"; 14 format = "pyproject"; 15 16 - disabled = pythonOlder "3.7"; 17 18 src = fetchFromGitHub { 19 owner = "cyberjunky"; 20 repo = "python-garminconnect"; 21 rev = "refs/tags/${version}"; 22 - hash = "sha256-jNDFSA6Mz0+7UhEVrCKcKDEX3B7yk6igBf59A6YlW2M="; 23 }; 24 25 nativeBuildInputs = [ ··· 27 ]; 28 29 propagatedBuildInputs = [ 30 - cloudscraper 31 garth 32 requests 33 ]; 34 35 # Tests require a token
··· 1 { lib 2 , buildPythonPackage 3 , fetchFromGitHub 4 , garth 5 , pdm-backend 6 , pythonOlder 7 , requests 8 + , withings-sync 9 }: 10 11 buildPythonPackage rec { 12 pname = "garminconnect"; 13 + version = "0.2.9"; 14 format = "pyproject"; 15 16 + disabled = pythonOlder "3.10"; 17 18 src = fetchFromGitHub { 19 owner = "cyberjunky"; 20 repo = "python-garminconnect"; 21 rev = "refs/tags/${version}"; 22 + hash = "sha256-wQWOksI0nfzIMdxgZehMmNytuXWD22GLUNoI7Ki0C3s="; 23 }; 24 25 nativeBuildInputs = [ ··· 27 ]; 28 29 propagatedBuildInputs = [ 30 garth 31 requests 32 + withings-sync 33 ]; 34 35 # Tests require a token
+2 -2
pkgs/development/python-modules/google-cloud-vision/default.nix
··· 12 13 buildPythonPackage rec { 14 pname = "google-cloud-vision"; 15 - version = "3.4.4"; 16 format = "setuptools"; 17 18 disabled = pythonOlder "3.7"; 19 20 src = fetchPypi { 21 inherit pname version; 22 - hash = "sha256-QFdErlCFIDTMR7MqmxuuUNP7Cc0eIWABQYKJHvV2ZpU="; 23 }; 24 25 propagatedBuildInputs = [
··· 12 13 buildPythonPackage rec { 14 pname = "google-cloud-vision"; 15 + version = "3.4.5"; 16 format = "setuptools"; 17 18 disabled = pythonOlder "3.7"; 19 20 src = fetchPypi { 21 inherit pname version; 22 + hash = "sha256-DfgkGrJ3GZuRnKODen3oUFk2P+oOPWYAYIcL587/wEc="; 23 }; 24 25 propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/hap-python/default.nix
··· 17 18 buildPythonPackage rec { 19 pname = "hap-python"; 20 - version = "4.9.0"; 21 format = "setuptools"; 22 23 disabled = pythonOlder "3.6"; ··· 26 owner = "ikalchev"; 27 repo = "HAP-python"; 28 rev = "refs/tags/${version}"; 29 - hash = "sha256-bFSqMAZWE3xTfnc7FSQMfAhxhKlYm65VFpm+q3yrqpE="; 30 }; 31 32 propagatedBuildInputs = [
··· 17 18 buildPythonPackage rec { 19 pname = "hap-python"; 20 + version = "4.9.1"; 21 format = "setuptools"; 22 23 disabled = pythonOlder "3.6"; ··· 26 owner = "ikalchev"; 27 repo = "HAP-python"; 28 rev = "refs/tags/${version}"; 29 + hash = "sha256-nnh8PSEcuPN1qGuInJ7uYe83zdne8axbTrHd4g1xoJs="; 30 }; 31 32 propagatedBuildInputs = [
+2 -2
pkgs/development/python-modules/peaqevcore/default.nix
··· 6 7 buildPythonPackage rec { 8 pname = "peaqevcore"; 9 - version = "19.5.10"; 10 format = "setuptools"; 11 12 disabled = pythonOlder "3.7"; 13 14 src = fetchPypi { 15 inherit pname version; 16 - hash = "sha256-izw41TUmqKOy34/RMHjBROQr88SChheKJVpPMaOubnE="; 17 }; 18 19 postPatch = ''
··· 6 7 buildPythonPackage rec { 8 pname = "peaqevcore"; 9 + version = "19.5.12"; 10 format = "setuptools"; 11 12 disabled = pythonOlder "3.7"; 13 14 src = fetchPypi { 15 inherit pname version; 16 + hash = "sha256-NsQrfJQ1+WZ4wNBH8ZGGo9IMJ+yvWrVQmesDBQrfRKg="; 17 }; 18 19 postPatch = ''
+8 -3
pkgs/development/python-modules/publicsuffixlist/default.nix
··· 5 , pytestCheckHook 6 , pythonOlder 7 , requests 8 }: 9 10 buildPythonPackage rec { 11 pname = "publicsuffixlist"; 12 - version = "0.10.0.20231026"; 13 - format = "setuptools"; 14 15 disabled = pythonOlder "3.7"; 16 17 src = fetchPypi { 18 inherit pname version; 19 - hash = "sha256-q2rUBjbue3I3VnRLTF7UscBs51bGxUGjMYwAkgX5UMs="; 20 }; 21 22 passthru.optional-dependencies = { 23 update = [
··· 5 , pytestCheckHook 6 , pythonOlder 7 , requests 8 + , setuptools 9 }: 10 11 buildPythonPackage rec { 12 pname = "publicsuffixlist"; 13 + version = "0.10.0.20231030"; 14 + pyproject = true; 15 16 disabled = pythonOlder "3.7"; 17 18 src = fetchPypi { 19 inherit pname version; 20 + hash = "sha256-1yRv6zg9mKJTinR57QHvCx/0mi0b2O3CkcoH1v4QuNo="; 21 }; 22 + 23 + nativeBuildInputs = [ 24 + setuptools 25 + ]; 26 27 passthru.optional-dependencies = { 28 update = [
+48
pkgs/development/python-modules/withings-sync/default.nix
···
··· 1 + { lib 2 + , buildPythonPackage 3 + , fetchFromGitHub 4 + , garth 5 + , lxml 6 + , pythonOlder 7 + , requests 8 + , setuptools 9 + , wheel 10 + }: 11 + 12 + buildPythonPackage rec { 13 + pname = "withings-sync"; 14 + version = "4.2.1"; 15 + pyproject = true; 16 + 17 + disabled = pythonOlder "3.10"; 18 + 19 + src = fetchFromGitHub { 20 + owner = "jaroslawhartman"; 21 + repo = "withings-sync"; 22 + rev = "refs/tags/v${version}"; 23 + hash = "sha256-6igjUmgIA077/1SQMt10tRpnLVKxGFNJN1GeLhQLROg="; 24 + }; 25 + 26 + nativeBuildInputs = [ 27 + setuptools 28 + wheel 29 + ]; 30 + 31 + propagatedBuildInputs = [ 32 + garth 33 + lxml 34 + requests 35 + ]; 36 + 37 + pythonImportsCheck = [ 38 + "withings_sync" 39 + ]; 40 + 41 + meta = with lib; { 42 + description = "Synchronisation of Withings weight"; 43 + homepage = "https://github.com/jaroslawhartman/withings-sync"; 44 + changelog = "https://github.com/jaroslawhartman/withings-sync/releases/tag/v${version}"; 45 + license = licenses.mit; 46 + maintainers = with maintainers; [ fab ]; 47 + }; 48 + }
+2 -2
pkgs/games/brogue-ce/default.nix
··· 9 10 stdenv.mkDerivation rec { 11 pname = "brogue-ce"; 12 - version = "1.12"; 13 14 src = fetchFromGitHub { 15 owner = "tmewett"; 16 repo = "BrogueCE"; 17 rev = "v${version}"; 18 - hash = "sha256-bGAE0hRiKBo3ikyObGxAiPRRO24KtC+upO3XLj+f4yo="; 19 }; 20 21 postPatch = ''
··· 9 10 stdenv.mkDerivation rec { 11 pname = "brogue-ce"; 12 + version = "1.13"; 13 14 src = fetchFromGitHub { 15 owner = "tmewett"; 16 repo = "BrogueCE"; 17 rev = "v${version}"; 18 + hash = "sha256-FUIdi1Ytn+INeD9550MW41qXtLb6in0QS3Snt8QaXUA="; 19 }; 20 21 postPatch = ''
+4
pkgs/os-specific/linux/kernel/common-config.nix
··· 558 PERSISTENT_KEYRINGS = yes; 559 # enable temporary caching of the last request_key() result 560 KEYS_REQUEST_CACHE = whenAtLeast "5.3" yes; 561 } // optionalAttrs stdenv.hostPlatform.isx86_64 { 562 # Enable Intel SGX 563 X86_SGX = whenAtLeast "5.11" yes; ··· 572 KVM_AMD_SEV = yes; 573 # AMD SEV-SNP 574 SEV_GUEST = whenAtLeast "5.19" module; 575 }; 576 577 microcode = {
··· 558 PERSISTENT_KEYRINGS = yes; 559 # enable temporary caching of the last request_key() result 560 KEYS_REQUEST_CACHE = whenAtLeast "5.3" yes; 561 + # randomized slab caches 562 + RANDOM_KMALLOC_CACHES = whenAtLeast "6.6" yes; 563 } // optionalAttrs stdenv.hostPlatform.isx86_64 { 564 # Enable Intel SGX 565 X86_SGX = whenAtLeast "5.11" yes; ··· 574 KVM_AMD_SEV = yes; 575 # AMD SEV-SNP 576 SEV_GUEST = whenAtLeast "5.19" module; 577 + # Shadow stacks 578 + X86_USER_SHADOW_STACK = whenAtLeast "6.6" yes; 579 }; 580 581 microcode = {
+4
pkgs/os-specific/linux/kernel/kernels-org.json
··· 30 "4.14": { 31 "version": "4.14.328", 32 "hash": "sha256:1igcpvnhwwrczfdsafmszvi0456k7f6j4cgpfw6v6afw09p95d8x" 33 } 34 }
··· 30 "4.14": { 31 "version": "4.14.328", 32 "hash": "sha256:1igcpvnhwwrczfdsafmszvi0456k7f6j4cgpfw6v6afw09p95d8x" 33 + }, 34 + "6.6": { 35 + "version": "6.6", 36 + "hash": "sha256:1l2nisx9lf2vdgkq910n5ldbi8z25ky1zvl67zgwg2nxcdna09nr" 37 } 38 }
+4 -4
pkgs/os-specific/linux/kernel/xanmod-kernels.nix
··· 6 # NOTE: When updating these, please also take a look at the changes done to 7 # kernel config in the xanmod version commit 8 ltsVariant = { 9 - version = "6.1.58"; 10 - hash = "sha256-Lnp1CSh1jLbIkEx9hLfxhdIA12iQZmywhOec9uZ7UjI="; 11 variant = "lts"; 12 }; 13 14 mainVariant = { 15 - version = "6.5.8"; 16 - hash = "sha256-lHi+O7RE6YdiqPmuxHajGkc7jS9F5cB89+JbTVKkB/c="; 17 variant = "main"; 18 }; 19
··· 6 # NOTE: When updating these, please also take a look at the changes done to 7 # kernel config in the xanmod version commit 8 ltsVariant = { 9 + version = "6.1.60"; 10 + hash = "sha256-KYCeONJxyFPee4pvBLRw/MBTzPU7D2oZCrAVr3t/yPM="; 11 variant = "lts"; 12 }; 13 14 mainVariant = { 15 + version = "6.5.9"; 16 + hash = "sha256-5SFPBsDTmq7tA6pyM7rbIjBPAtPbqhUl6VfA2z5baPA="; 17 variant = "main"; 18 }; 19
+2 -4
pkgs/servers/calibre-web/default.nix
··· 25 in 26 python.pkgs.buildPythonApplication rec { 27 pname = "calibre-web"; 28 - version = "0.6.20"; 29 30 src = fetchFromGitHub { 31 owner = "janeczku"; 32 repo = "calibre-web"; 33 rev = version; 34 - hash = "sha256-0lArY1aTpO4sgIVDSqClYMGlip92f9hE/L2UouTLK8Q="; 35 }; 36 37 propagatedBuildInputs = with python.pkgs; [ ··· 64 # and exit. This is gonna be used to configure calibre-web declaratively, as most of its configuration parameters 65 # are stored in the DB. 66 ./db-migrations.patch 67 - # environ in tornado.wsgi.WSGIContainer no longer a static method from 6.3 version 68 - ./static_environ.patch 69 ]; 70 71 # calibre-web doesn't follow setuptools directory structure. The following is taken from the script
··· 25 in 26 python.pkgs.buildPythonApplication rec { 27 pname = "calibre-web"; 28 + version = "0.6.21"; 29 30 src = fetchFromGitHub { 31 owner = "janeczku"; 32 repo = "calibre-web"; 33 rev = version; 34 + hash = "sha256-tRrOquetn3P2NmrXq7DQHRGP1sWnLR7bV2Lw0W/lUPQ="; 35 }; 36 37 propagatedBuildInputs = with python.pkgs; [ ··· 64 # and exit. This is gonna be used to configure calibre-web declaratively, as most of its configuration parameters 65 # are stored in the DB. 66 ./db-migrations.patch 67 ]; 68 69 # calibre-web doesn't follow setuptools directory structure. The following is taken from the script
-25
pkgs/servers/calibre-web/static_environ.patch
··· 1 - diff --git a/cps/tornado_wsgi.py b/cps/tornado_wsgi.py 2 - index af93219c..cf302042 100644 3 - --- a/cps/tornado_wsgi.py 4 - +++ b/cps/tornado_wsgi.py 5 - @@ -53,7 +53,7 @@ class MyWSGIContainer(WSGIContainer): 6 - return response.append 7 - 8 - app_response = self.wsgi_application( 9 - - MyWSGIContainer.environ(request), start_response 10 - + self.environ(request), start_response 11 - ) 12 - try: 13 - response.extend(app_response) 14 - @@ -86,9 +86,8 @@ class MyWSGIContainer(WSGIContainer): 15 - request.connection.finish() 16 - self._log(status_code, request) 17 - 18 - - @staticmethod 19 - - def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]: 20 - - environ = WSGIContainer.environ(request) 21 - + def environ(self, request: httputil.HTTPServerRequest) -> Dict[Text, Any]: 22 - + environ = super().environ(request) 23 - environ['RAW_URI'] = request.path 24 - return environ 25 -
···
+5 -5
pkgs/servers/pr-tracker/default.nix
··· 1 { rustPlatform 2 , lib 3 - , fetchurl 4 , openssl 5 , pkg-config 6 , systemd ··· 8 9 rustPlatform.buildRustPackage rec { 10 pname = "pr-tracker"; 11 - version = "1.2.0"; 12 13 - src = fetchurl { 14 url = "https://git.qyliss.net/pr-tracker/snapshot/pr-tracker-${version}.tar.xz"; 15 - sha256 = "sha256-Tru9DsitRQLiO4Ln70J9LvkEqcj2i4A+eArBvIhd/ls="; 16 }; 17 18 - cargoSha256 = "0q3ibxnzw8gngvrgfkv4m64dr411c511xkvb6j9k63vhy9vwarz7"; 19 20 nativeBuildInputs = [ pkg-config ]; 21 buildInputs = [ openssl systemd ];
··· 1 { rustPlatform 2 , lib 3 + , fetchzip 4 , openssl 5 , pkg-config 6 , systemd ··· 8 9 rustPlatform.buildRustPackage rec { 10 pname = "pr-tracker"; 11 + version = "1.3.0"; 12 13 + src = fetchzip { 14 url = "https://git.qyliss.net/pr-tracker/snapshot/pr-tracker-${version}.tar.xz"; 15 + hash = "sha256-JetfcA7Pn6nsCxCkgxP4jS6tijx89any/0GrmLa+DR0="; 16 }; 17 18 + cargoSha256 = "sha256-QUr0IHmzbhFNd6rBDEX8RZul/d1TLv0t+ySCQYMlpmE="; 19 20 nativeBuildInputs = [ pkg-config ]; 21 buildInputs = [ openssl systemd ];
+16
pkgs/test/nixpkgs-check-by-name/Cargo.lock
··· 163 checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" 164 165 [[package]] 166 name = "errno" 167 version = "0.3.2" 168 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 219 ] 220 221 [[package]] 222 name = "itoa" 223 version = "1.0.9" 224 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 274 "anyhow", 275 "clap", 276 "colored", 277 "lazy_static", 278 "regex", 279 "rnix",
··· 163 checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" 164 165 [[package]] 166 + name = "either" 167 + version = "1.9.0" 168 + source = "registry+https://github.com/rust-lang/crates.io-index" 169 + checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" 170 + 171 + [[package]] 172 name = "errno" 173 version = "0.3.2" 174 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 225 ] 226 227 [[package]] 228 + name = "itertools" 229 + version = "0.11.0" 230 + source = "registry+https://github.com/rust-lang/crates.io-index" 231 + checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" 232 + dependencies = [ 233 + "either", 234 + ] 235 + 236 + [[package]] 237 name = "itoa" 238 version = "1.0.9" 239 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 289 "anyhow", 290 "clap", 291 "colored", 292 + "itertools", 293 "lazy_static", 294 "regex", 295 "rnix",
+1
pkgs/test/nixpkgs-check-by-name/Cargo.toml
··· 13 anyhow = "1.0" 14 lazy_static = "1.4.0" 15 colored = "2.0.4" 16 17 [dev-dependencies] 18 temp-env = "0.3.5"
··· 13 anyhow = "1.0" 14 lazy_static = "1.4.0" 15 colored = "2.0.4" 16 + itertools = "0.11.0" 17 18 [dev-dependencies] 19 temp-env = "0.3.5"
+2 -2
pkgs/test/nixpkgs-check-by-name/README.md
··· 1 # Nixpkgs pkgs/by-name checker 2 3 - This directory implements a program to check the [validity](#validity-checks) of the `pkgs/by-name` Nixpkgs directory once introduced. 4 It is being used by [this GitHub Actions workflow](../../../.github/workflows/check-by-name.yml). 5 This is part of the implementation of [RFC 140](https://github.com/NixOS/rfcs/pull/140). 6 ··· 24 - `2`: If an unexpected I/O error occurs 25 - Standard error: 26 - Informative messages 27 - - Error messages if validation is not successful 28 29 ## Validity checks 30
··· 1 # Nixpkgs pkgs/by-name checker 2 3 + This directory implements a program to check the [validity](#validity-checks) of the `pkgs/by-name` Nixpkgs directory. 4 It is being used by [this GitHub Actions workflow](../../../.github/workflows/check-by-name.yml). 5 This is part of the implementation of [RFC 140](https://github.com/NixOS/rfcs/pull/140). 6 ··· 24 - `2`: If an unexpected I/O error occurs 25 - Standard error: 26 - Informative messages 27 + - Detected problems if validation is not successful 28 29 ## Validity checks 30
+56 -54
pkgs/test/nixpkgs-check-by-name/src/eval.rs
··· 1 use crate::structure; 2 - use crate::utils::ErrorWriter; 3 use crate::Version; 4 use std::path::Path; 5 6 use anyhow::Context; 7 use serde::Deserialize; 8 use std::collections::HashMap; 9 - use std::io; 10 use std::path::PathBuf; 11 use std::process; 12 use tempfile::NamedTempFile; ··· 40 /// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are 41 /// of the form `callPackage <package_file> { ... }`. 42 /// See the `eval.nix` file for how this is achieved on the Nix side 43 - pub fn check_values<W: io::Write>( 44 version: Version, 45 - error_writer: &mut ErrorWriter<W>, 46 - nixpkgs: &structure::Nixpkgs, 47 eval_accessible_paths: Vec<&Path>, 48 - ) -> anyhow::Result<()> { 49 // Write the list of packages we need to check into a temporary JSON file. 50 // This can then get read by the Nix evaluation. 51 let attrs_file = NamedTempFile::new().context("Failed to create a temporary file")?; ··· 55 // entry is needed. 56 let attrs_file_path = attrs_file.path().canonicalize()?; 57 58 - serde_json::to_writer(&attrs_file, &nixpkgs.package_names).context(format!( 59 "Failed to serialise the package names to the temporary path {}", 60 attrs_file_path.display() 61 ))?; ··· 87 .arg(&attrs_file_path) 88 // Same for the nixpkgs to test 89 .args(["--arg", "nixpkgsPath"]) 90 - .arg(&nixpkgs.path) 91 .arg("-I") 92 - .arg(&nixpkgs.path); 93 94 // Also add extra paths that need to be accessible 95 for path in eval_accessible_paths { ··· 111 String::from_utf8_lossy(&result.stdout) 112 ))?; 113 114 - for package_name in &nixpkgs.package_names { 115 - let relative_package_file = structure::Nixpkgs::relative_file_for_package(package_name); 116 - let absolute_package_file = nixpkgs.path.join(&relative_package_file); 117 118 - if let Some(attribute_info) = actual_files.get(package_name) { 119 - let valid = match &attribute_info.variant { 120 - AttributeVariant::AutoCalled => true, 121 - AttributeVariant::CallPackage { path, empty_arg } => { 122 - let correct_file = if let Some(call_package_path) = path { 123 - absolute_package_file == *call_package_path 124 - } else { 125 - false 126 - }; 127 - // Only check for the argument to be non-empty if the version is V1 or 128 - // higher 129 - let non_empty = if version >= Version::V1 { 130 - !empty_arg 131 - } else { 132 - true 133 - }; 134 - correct_file && non_empty 135 - } 136 - AttributeVariant::Other => false, 137 - }; 138 - 139 - if !valid { 140 - error_writer.write(&format!( 141 - "pkgs.{package_name}: This attribute is manually defined (most likely in pkgs/top-level/all-packages.nix), which is only allowed if the definition is of the form `pkgs.callPackage {} {{ ... }}` with a non-empty second argument.", 142 - relative_package_file.display() 143 - ))?; 144 - continue; 145 - } 146 147 - if !attribute_info.is_derivation { 148 - error_writer.write(&format!( 149 - "pkgs.{package_name}: This attribute defined by {} is not a derivation", 150 - relative_package_file.display() 151 - ))?; 152 } 153 - } else { 154 - error_writer.write(&format!( 155 - "pkgs.{package_name}: This attribute is not defined but it should be defined automatically as {}", 156 - relative_package_file.display() 157 - ))?; 158 - continue; 159 - } 160 - } 161 - Ok(()) 162 }
··· 1 + use crate::nixpkgs_problem::NixpkgsProblem; 2 use crate::structure; 3 + use crate::validation::{self, Validation::Success}; 4 use crate::Version; 5 use std::path::Path; 6 7 use anyhow::Context; 8 use serde::Deserialize; 9 use std::collections::HashMap; 10 use std::path::PathBuf; 11 use std::process; 12 use tempfile::NamedTempFile; ··· 40 /// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are 41 /// of the form `callPackage <package_file> { ... }`. 42 /// See the `eval.nix` file for how this is achieved on the Nix side 43 + pub fn check_values( 44 version: Version, 45 + nixpkgs_path: &Path, 46 + package_names: Vec<String>, 47 eval_accessible_paths: Vec<&Path>, 48 + ) -> validation::Result<()> { 49 // Write the list of packages we need to check into a temporary JSON file. 50 // This can then get read by the Nix evaluation. 51 let attrs_file = NamedTempFile::new().context("Failed to create a temporary file")?; ··· 55 // entry is needed. 56 let attrs_file_path = attrs_file.path().canonicalize()?; 57 58 + serde_json::to_writer(&attrs_file, &package_names).context(format!( 59 "Failed to serialise the package names to the temporary path {}", 60 attrs_file_path.display() 61 ))?; ··· 87 .arg(&attrs_file_path) 88 // Same for the nixpkgs to test 89 .args(["--arg", "nixpkgsPath"]) 90 + .arg(nixpkgs_path) 91 .arg("-I") 92 + .arg(nixpkgs_path); 93 94 // Also add extra paths that need to be accessible 95 for path in eval_accessible_paths { ··· 111 String::from_utf8_lossy(&result.stdout) 112 ))?; 113 114 + Ok(validation::sequence_(package_names.iter().map( 115 + |package_name| { 116 + let relative_package_file = structure::relative_file_for_package(package_name); 117 + let absolute_package_file = nixpkgs_path.join(&relative_package_file); 118 119 + if let Some(attribute_info) = actual_files.get(package_name) { 120 + let valid = match &attribute_info.variant { 121 + AttributeVariant::AutoCalled => true, 122 + AttributeVariant::CallPackage { path, empty_arg } => { 123 + let correct_file = if let Some(call_package_path) = path { 124 + absolute_package_file == *call_package_path 125 + } else { 126 + false 127 + }; 128 + // Only check for the argument to be non-empty if the version is V1 or 129 + // higher 130 + let non_empty = if version >= Version::V1 { 131 + !empty_arg 132 + } else { 133 + true 134 + }; 135 + correct_file && non_empty 136 + } 137 + AttributeVariant::Other => false, 138 + }; 139 140 + if !valid { 141 + NixpkgsProblem::WrongCallPackage { 142 + relative_package_file: relative_package_file.clone(), 143 + package_name: package_name.clone(), 144 + } 145 + .into() 146 + } else if !attribute_info.is_derivation { 147 + NixpkgsProblem::NonDerivation { 148 + relative_package_file: relative_package_file.clone(), 149 + package_name: package_name.clone(), 150 + } 151 + .into() 152 + } else { 153 + Success(()) 154 + } 155 + } else { 156 + NixpkgsProblem::UndefinedAttr { 157 + relative_package_file: relative_package_file.clone(), 158 + package_name: package_name.clone(), 159 + } 160 + .into() 161 } 162 + }, 163 + ))) 164 }
+28 -18
pkgs/test/nixpkgs-check-by-name/src/main.rs
··· 1 mod eval; 2 mod references; 3 mod structure; 4 mod utils; 5 6 use anyhow::Context; 7 use clap::{Parser, ValueEnum}; 8 use colored::Colorize; 9 use std::io; 10 use std::path::{Path, PathBuf}; 11 use std::process::ExitCode; 12 - use structure::Nixpkgs; 13 - use utils::ErrorWriter; 14 15 /// Program to check the validity of pkgs/by-name 16 #[derive(Parser, Debug)] ··· 63 /// 64 /// # Return value 65 /// - `Err(e)` if an I/O-related error `e` occurred. 66 - /// - `Ok(false)` if the structure is invalid, all the structural errors have been written to `error_writer`. 67 - /// - `Ok(true)` if the structure is valid, nothing will have been written to `error_writer`. 68 pub fn check_nixpkgs<W: io::Write>( 69 nixpkgs_path: &Path, 70 version: Version, ··· 76 nixpkgs_path.display() 77 ))?; 78 79 - // Wraps the error_writer to print everything in red, and tracks whether anything was printed 80 - // at all. Later used to figure out if the structure was valid or not. 81 - let mut error_writer = ErrorWriter::new(error_writer); 82 - 83 - if !nixpkgs_path.join(structure::BASE_SUBPATH).exists() { 84 eprintln!( 85 "Given Nixpkgs path does not contain a {} subdirectory, no check necessary.", 86 - structure::BASE_SUBPATH 87 ); 88 } else { 89 - let nixpkgs = Nixpkgs::new(&nixpkgs_path, &mut error_writer)?; 90 91 - if error_writer.empty { 92 - // Only if we could successfully parse the structure, we do the semantic checks 93 - eval::check_values(version, &mut error_writer, &nixpkgs, eval_accessible_paths)?; 94 - references::check_references(&mut error_writer, &nixpkgs)?; 95 } 96 } 97 - Ok(error_writer.empty) 98 } 99 100 #[cfg(test)] 101 mod tests { 102 use crate::check_nixpkgs; 103 - use crate::structure; 104 use crate::Version; 105 use anyhow::Context; 106 use std::fs; ··· 145 return Ok(()); 146 } 147 148 - let base = path.join(structure::BASE_SUBPATH); 149 150 fs::create_dir_all(base.join("fo/foo"))?; 151 fs::write(base.join("fo/foo/package.nix"), "{ someDrv }: someDrv")?;
··· 1 mod eval; 2 + mod nixpkgs_problem; 3 mod references; 4 mod structure; 5 mod utils; 6 + mod validation; 7 8 + use crate::structure::check_structure; 9 + use crate::validation::Validation::Failure; 10 + use crate::validation::Validation::Success; 11 use anyhow::Context; 12 use clap::{Parser, ValueEnum}; 13 use colored::Colorize; 14 use std::io; 15 use std::path::{Path, PathBuf}; 16 use std::process::ExitCode; 17 18 /// Program to check the validity of pkgs/by-name 19 #[derive(Parser, Debug)] ··· 66 /// 67 /// # Return value 68 /// - `Err(e)` if an I/O-related error `e` occurred. 69 + /// - `Ok(false)` if there are problems, all of which will be written to `error_writer`. 70 + /// - `Ok(true)` if there are no problems 71 pub fn check_nixpkgs<W: io::Write>( 72 nixpkgs_path: &Path, 73 version: Version, ··· 79 nixpkgs_path.display() 80 ))?; 81 82 + let check_result = if !nixpkgs_path.join(utils::BASE_SUBPATH).exists() { 83 eprintln!( 84 "Given Nixpkgs path does not contain a {} subdirectory, no check necessary.", 85 + utils::BASE_SUBPATH 86 ); 87 + Success(()) 88 } else { 89 + match check_structure(&nixpkgs_path)? { 90 + Failure(errors) => Failure(errors), 91 + Success(package_names) => 92 + // Only if we could successfully parse the structure, we do the evaluation checks 93 + { 94 + eval::check_values(version, &nixpkgs_path, package_names, eval_accessible_paths)? 95 + } 96 + } 97 + }; 98 99 + match check_result { 100 + Failure(errors) => { 101 + for error in errors { 102 + writeln!(error_writer, "{}", error.to_string().red())? 103 + } 104 + Ok(false) 105 } 106 + Success(_) => Ok(true), 107 } 108 } 109 110 #[cfg(test)] 111 mod tests { 112 use crate::check_nixpkgs; 113 + use crate::utils; 114 use crate::Version; 115 use anyhow::Context; 116 use std::fs; ··· 155 return Ok(()); 156 } 157 158 + let base = path.join(utils::BASE_SUBPATH); 159 160 fs::create_dir_all(base.join("fo/foo"))?; 161 fs::write(base.join("fo/foo/package.nix"), "{ someDrv }: someDrv")?;
+218
pkgs/test/nixpkgs-check-by-name/src/nixpkgs_problem.rs
···
··· 1 + use crate::utils::PACKAGE_NIX_FILENAME; 2 + use rnix::parser::ParseError; 3 + use std::ffi::OsString; 4 + use std::fmt; 5 + use std::io; 6 + use std::path::PathBuf; 7 + 8 + /// Any problem that can occur when checking Nixpkgs 9 + pub enum NixpkgsProblem { 10 + ShardNonDir { 11 + relative_shard_path: PathBuf, 12 + }, 13 + InvalidShardName { 14 + relative_shard_path: PathBuf, 15 + shard_name: String, 16 + }, 17 + PackageNonDir { 18 + relative_package_dir: PathBuf, 19 + }, 20 + CaseSensitiveDuplicate { 21 + relative_shard_path: PathBuf, 22 + first: OsString, 23 + second: OsString, 24 + }, 25 + InvalidPackageName { 26 + relative_package_dir: PathBuf, 27 + package_name: String, 28 + }, 29 + IncorrectShard { 30 + relative_package_dir: PathBuf, 31 + correct_relative_package_dir: PathBuf, 32 + }, 33 + PackageNixNonExistent { 34 + relative_package_dir: PathBuf, 35 + }, 36 + PackageNixDir { 37 + relative_package_dir: PathBuf, 38 + }, 39 + UndefinedAttr { 40 + relative_package_file: PathBuf, 41 + package_name: String, 42 + }, 43 + WrongCallPackage { 44 + relative_package_file: PathBuf, 45 + package_name: String, 46 + }, 47 + NonDerivation { 48 + relative_package_file: PathBuf, 49 + package_name: String, 50 + }, 51 + OutsideSymlink { 52 + relative_package_dir: PathBuf, 53 + subpath: PathBuf, 54 + }, 55 + UnresolvableSymlink { 56 + relative_package_dir: PathBuf, 57 + subpath: PathBuf, 58 + io_error: io::Error, 59 + }, 60 + CouldNotParseNix { 61 + relative_package_dir: PathBuf, 62 + subpath: PathBuf, 63 + error: ParseError, 64 + }, 65 + PathInterpolation { 66 + relative_package_dir: PathBuf, 67 + subpath: PathBuf, 68 + line: usize, 69 + text: String, 70 + }, 71 + SearchPath { 72 + relative_package_dir: PathBuf, 73 + subpath: PathBuf, 74 + line: usize, 75 + text: String, 76 + }, 77 + OutsidePathReference { 78 + relative_package_dir: PathBuf, 79 + subpath: PathBuf, 80 + line: usize, 81 + text: String, 82 + }, 83 + UnresolvablePathReference { 84 + relative_package_dir: PathBuf, 85 + subpath: PathBuf, 86 + line: usize, 87 + text: String, 88 + io_error: io::Error, 89 + }, 90 + } 91 + 92 + impl fmt::Display for NixpkgsProblem { 93 + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 94 + match self { 95 + NixpkgsProblem::ShardNonDir { relative_shard_path } => 96 + write!( 97 + f, 98 + "{}: This is a file, but it should be a directory.", 99 + relative_shard_path.display(), 100 + ), 101 + NixpkgsProblem::InvalidShardName { relative_shard_path, shard_name } => 102 + write!( 103 + f, 104 + "{}: Invalid directory name \"{shard_name}\", must be at most 2 ASCII characters consisting of a-z, 0-9, \"-\" or \"_\".", 105 + relative_shard_path.display() 106 + ), 107 + NixpkgsProblem::PackageNonDir { relative_package_dir } => 108 + write!( 109 + f, 110 + "{}: This path is a file, but it should be a directory.", 111 + relative_package_dir.display(), 112 + ), 113 + NixpkgsProblem::CaseSensitiveDuplicate { relative_shard_path, first, second } => 114 + write!( 115 + f, 116 + "{}: Duplicate case-sensitive package directories {first:?} and {second:?}.", 117 + relative_shard_path.display(), 118 + ), 119 + NixpkgsProblem::InvalidPackageName { relative_package_dir, package_name } => 120 + write!( 121 + f, 122 + "{}: Invalid package directory name \"{package_name}\", must be ASCII characters consisting of a-z, A-Z, 0-9, \"-\" or \"_\".", 123 + relative_package_dir.display(), 124 + ), 125 + NixpkgsProblem::IncorrectShard { relative_package_dir, correct_relative_package_dir } => 126 + write!( 127 + f, 128 + "{}: Incorrect directory location, should be {} instead.", 129 + relative_package_dir.display(), 130 + correct_relative_package_dir.display(), 131 + ), 132 + NixpkgsProblem::PackageNixNonExistent { relative_package_dir } => 133 + write!( 134 + f, 135 + "{}: Missing required \"{PACKAGE_NIX_FILENAME}\" file.", 136 + relative_package_dir.display(), 137 + ), 138 + NixpkgsProblem::PackageNixDir { relative_package_dir } => 139 + write!( 140 + f, 141 + "{}: \"{PACKAGE_NIX_FILENAME}\" must be a file.", 142 + relative_package_dir.display(), 143 + ), 144 + NixpkgsProblem::UndefinedAttr { relative_package_file, package_name } => 145 + write!( 146 + f, 147 + "pkgs.{package_name}: This attribute is not defined but it should be defined automatically as {}", 148 + relative_package_file.display() 149 + ), 150 + NixpkgsProblem::WrongCallPackage { relative_package_file, package_name } => 151 + write!( 152 + f, 153 + "pkgs.{package_name}: This attribute is manually defined (most likely in pkgs/top-level/all-packages.nix), which is only allowed if the definition is of the form `pkgs.callPackage {} {{ ... }}` with a non-empty second argument.", 154 + relative_package_file.display() 155 + ), 156 + NixpkgsProblem::NonDerivation { relative_package_file, package_name } => 157 + write!( 158 + f, 159 + "pkgs.{package_name}: This attribute defined by {} is not a derivation", 160 + relative_package_file.display() 161 + ), 162 + NixpkgsProblem::OutsideSymlink { relative_package_dir, subpath } => 163 + write!( 164 + f, 165 + "{}: Path {} is a symlink pointing to a path outside the directory of that package.", 166 + relative_package_dir.display(), 167 + subpath.display(), 168 + ), 169 + NixpkgsProblem::UnresolvableSymlink { relative_package_dir, subpath, io_error } => 170 + write!( 171 + f, 172 + "{}: Path {} is a symlink which cannot be resolved: {io_error}.", 173 + relative_package_dir.display(), 174 + subpath.display(), 175 + ), 176 + NixpkgsProblem::CouldNotParseNix { relative_package_dir, subpath, error } => 177 + write!( 178 + f, 179 + "{}: File {} could not be parsed by rnix: {}", 180 + relative_package_dir.display(), 181 + subpath.display(), 182 + error, 183 + ), 184 + NixpkgsProblem::PathInterpolation { relative_package_dir, subpath, line, text } => 185 + write!( 186 + f, 187 + "{}: File {} at line {line} contains the path expression \"{}\", which is not yet supported and may point outside the directory of that package.", 188 + relative_package_dir.display(), 189 + subpath.display(), 190 + text 191 + ), 192 + NixpkgsProblem::SearchPath { relative_package_dir, subpath, line, text } => 193 + write!( 194 + f, 195 + "{}: File {} at line {line} contains the nix search path expression \"{}\" which may point outside the directory of that package.", 196 + relative_package_dir.display(), 197 + subpath.display(), 198 + text 199 + ), 200 + NixpkgsProblem::OutsidePathReference { relative_package_dir, subpath, line, text } => 201 + write!( 202 + f, 203 + "{}: File {} at line {line} contains the path expression \"{}\" which may point outside the directory of that package.", 204 + relative_package_dir.display(), 205 + subpath.display(), 206 + text, 207 + ), 208 + NixpkgsProblem::UnresolvablePathReference { relative_package_dir, subpath, line, text, io_error } => 209 + write!( 210 + f, 211 + "{}: File {} at line {line} contains the path expression \"{}\" which cannot be resolved: {io_error}.", 212 + relative_package_dir.display(), 213 + subpath.display(), 214 + text, 215 + ), 216 + } 217 + } 218 + }
+118 -127
pkgs/test/nixpkgs-check-by-name/src/references.rs
··· 1 - use crate::structure::Nixpkgs; 2 use crate::utils; 3 - use crate::utils::{ErrorWriter, LineIndex}; 4 5 use anyhow::Context; 6 use rnix::{Root, SyntaxKind::NODE_PATH}; 7 use std::ffi::OsStr; 8 use std::fs::read_to_string; 9 - use std::io; 10 - use std::path::{Path, PathBuf}; 11 - 12 - /// Small helper so we don't need to pass in the same arguments to all functions 13 - struct PackageContext<'a, W: io::Write> { 14 - error_writer: &'a mut ErrorWriter<W>, 15 - /// The package directory relative to Nixpkgs, such as `pkgs/by-name/fo/foo` 16 - relative_package_dir: &'a PathBuf, 17 - /// The absolute package directory 18 - absolute_package_dir: &'a PathBuf, 19 - } 20 21 /// Check that every package directory in pkgs/by-name doesn't link to outside that directory. 22 /// Both symlinks and Nix path expressions are checked. 23 - pub fn check_references<W: io::Write>( 24 - error_writer: &mut ErrorWriter<W>, 25 - nixpkgs: &Nixpkgs, 26 - ) -> anyhow::Result<()> { 27 - // Check the directories for each package separately 28 - for package_name in &nixpkgs.package_names { 29 - let relative_package_dir = Nixpkgs::relative_dir_for_package(package_name); 30 - let mut context = PackageContext { 31 - error_writer, 32 - relative_package_dir: &relative_package_dir, 33 - absolute_package_dir: &nixpkgs.path.join(&relative_package_dir), 34 - }; 35 - 36 - // The empty argument here is the subpath under the package directory to check 37 - // An empty one means the package directory itself 38 - check_path(&mut context, Path::new("")).context(format!( 39 - "While checking the references in package directory {}", 40 - relative_package_dir.display() 41 - ))?; 42 - } 43 - Ok(()) 44 } 45 46 /// Checks for a specific path to not have references outside 47 - fn check_path<W: io::Write>(context: &mut PackageContext<W>, subpath: &Path) -> anyhow::Result<()> { 48 - let path = context.absolute_package_dir.join(subpath); 49 50 - if path.is_symlink() { 51 // Check whether the symlink resolves to outside the package directory 52 match path.canonicalize() { 53 Ok(target) => { 54 // No need to handle the case of it being inside the directory, since we scan through the 55 // entire directory recursively anyways 56 - if let Err(_prefix_error) = target.strip_prefix(context.absolute_package_dir) { 57 - context.error_writer.write(&format!( 58 - "{}: Path {} is a symlink pointing to a path outside the directory of that package.", 59 - context.relative_package_dir.display(), 60 - subpath.display(), 61 - ))?; 62 } 63 } 64 - Err(e) => { 65 - context.error_writer.write(&format!( 66 - "{}: Path {} is a symlink which cannot be resolved: {e}.", 67 - context.relative_package_dir.display(), 68 - subpath.display(), 69 - ))?; 70 } 71 } 72 } else if path.is_dir() { 73 // Recursively check each entry 74 - for entry in utils::read_dir_sorted(&path)? { 75 - let entry_subpath = subpath.join(entry.file_name()); 76 - check_path(context, &entry_subpath) 77 - .context(format!("Error while recursing into {}", subpath.display()))? 78 - } 79 } else if path.is_file() { 80 // Only check Nix files 81 if let Some(ext) = path.extension() { 82 if ext == OsStr::new("nix") { 83 - check_nix_file(context, subpath).context(format!( 84 - "Error while checking Nix file {}", 85 - subpath.display() 86 - ))? 87 } 88 } 89 } else { 90 // This should never happen, git doesn't support other file types 91 anyhow::bail!("Unsupported file type for path {}", subpath.display()); 92 - } 93 - Ok(()) 94 } 95 96 /// Check whether a nix file contains path expression references pointing outside the package 97 /// directory 98 - fn check_nix_file<W: io::Write>( 99 - context: &mut PackageContext<W>, 100 subpath: &Path, 101 - ) -> anyhow::Result<()> { 102 - let path = context.absolute_package_dir.join(subpath); 103 let parent_dir = path.parent().context(format!( 104 "Could not get parent of path {}", 105 subpath.display() ··· 110 111 let root = Root::parse(&contents); 112 if let Some(error) = root.errors().first() { 113 - context.error_writer.write(&format!( 114 - "{}: File {} could not be parsed by rnix: {}", 115 - context.relative_package_dir.display(), 116 - subpath.display(), 117 - error, 118 - ))?; 119 - return Ok(()); 120 } 121 122 let line_index = LineIndex::new(&contents); 123 124 - for node in root.syntax().descendants() { 125 - // We're only interested in Path expressions 126 - if node.kind() != NODE_PATH { 127 - continue; 128 - } 129 130 - let text = node.text().to_string(); 131 - let line = line_index.line(node.text_range().start().into()); 132 - 133 - // Filters out ./foo/${bar}/baz 134 - // TODO: We can just check ./foo 135 - if node.children().count() != 0 { 136 - context.error_writer.write(&format!( 137 - "{}: File {} at line {line} contains the path expression \"{}\", which is not yet supported and may point outside the directory of that package.", 138 - context.relative_package_dir.display(), 139 - subpath.display(), 140 - text 141 - ))?; 142 - continue; 143 - } 144 - 145 - // Filters out search paths like <nixpkgs> 146 - if text.starts_with('<') { 147 - context.error_writer.write(&format!( 148 - "{}: File {} at line {line} contains the nix search path expression \"{}\" which may point outside the directory of that package.", 149 - context.relative_package_dir.display(), 150 - subpath.display(), 151 - text 152 - ))?; 153 - continue; 154 - } 155 - 156 - // Resolves the reference of the Nix path 157 - // turning `../baz` inside `/foo/bar/default.nix` to `/foo/baz` 158 - match parent_dir.join(Path::new(&text)).canonicalize() { 159 - Ok(target) => { 160 - // Then checking if it's still in the package directory 161 - // No need to handle the case of it being inside the directory, since we scan through the 162 - // entire directory recursively anyways 163 - if let Err(_prefix_error) = target.strip_prefix(context.absolute_package_dir) { 164 - context.error_writer.write(&format!( 165 - "{}: File {} at line {line} contains the path expression \"{}\" which may point outside the directory of that package.", 166 - context.relative_package_dir.display(), 167 - subpath.display(), 168 - text, 169 - ))?; 170 } 171 - } 172 - Err(e) => { 173 - context.error_writer.write(&format!( 174 - "{}: File {} at line {line} contains the path expression \"{}\" which cannot be resolved: {e}.", 175 - context.relative_package_dir.display(), 176 - subpath.display(), 177 text, 178 - ))?; 179 } 180 - }; 181 - } 182 - 183 - Ok(()) 184 }
··· 1 + use crate::nixpkgs_problem::NixpkgsProblem; 2 use crate::utils; 3 + use crate::utils::LineIndex; 4 + use crate::validation::{self, ResultIteratorExt, Validation::Success}; 5 6 use anyhow::Context; 7 use rnix::{Root, SyntaxKind::NODE_PATH}; 8 use std::ffi::OsStr; 9 use std::fs::read_to_string; 10 + use std::path::Path; 11 12 /// Check that every package directory in pkgs/by-name doesn't link to outside that directory. 13 /// Both symlinks and Nix path expressions are checked. 14 + pub fn check_references( 15 + relative_package_dir: &Path, 16 + absolute_package_dir: &Path, 17 + ) -> validation::Result<()> { 18 + // The empty argument here is the subpath under the package directory to check 19 + // An empty one means the package directory itself 20 + check_path(relative_package_dir, absolute_package_dir, Path::new("")).context(format!( 21 + "While checking the references in package directory {}", 22 + relative_package_dir.display() 23 + )) 24 } 25 26 /// Checks for a specific path to not have references outside 27 + fn check_path( 28 + relative_package_dir: &Path, 29 + absolute_package_dir: &Path, 30 + subpath: &Path, 31 + ) -> validation::Result<()> { 32 + let path = absolute_package_dir.join(subpath); 33 34 + Ok(if path.is_symlink() { 35 // Check whether the symlink resolves to outside the package directory 36 match path.canonicalize() { 37 Ok(target) => { 38 // No need to handle the case of it being inside the directory, since we scan through the 39 // entire directory recursively anyways 40 + if let Err(_prefix_error) = target.strip_prefix(absolute_package_dir) { 41 + NixpkgsProblem::OutsideSymlink { 42 + relative_package_dir: relative_package_dir.to_path_buf(), 43 + subpath: subpath.to_path_buf(), 44 + } 45 + .into() 46 + } else { 47 + Success(()) 48 } 49 } 50 + Err(io_error) => NixpkgsProblem::UnresolvableSymlink { 51 + relative_package_dir: relative_package_dir.to_path_buf(), 52 + subpath: subpath.to_path_buf(), 53 + io_error, 54 } 55 + .into(), 56 } 57 } else if path.is_dir() { 58 // Recursively check each entry 59 + validation::sequence_( 60 + utils::read_dir_sorted(&path)? 61 + .into_iter() 62 + .map(|entry| { 63 + let entry_subpath = subpath.join(entry.file_name()); 64 + check_path(relative_package_dir, absolute_package_dir, &entry_subpath) 65 + .context(format!("Error while recursing into {}", subpath.display())) 66 + }) 67 + .collect_vec()?, 68 + ) 69 } else if path.is_file() { 70 // Only check Nix files 71 if let Some(ext) = path.extension() { 72 if ext == OsStr::new("nix") { 73 + check_nix_file(relative_package_dir, absolute_package_dir, subpath).context( 74 + format!("Error while checking Nix file {}", subpath.display()), 75 + )? 76 + } else { 77 + Success(()) 78 } 79 + } else { 80 + Success(()) 81 } 82 } else { 83 // This should never happen, git doesn't support other file types 84 anyhow::bail!("Unsupported file type for path {}", subpath.display()); 85 + }) 86 } 87 88 /// Check whether a nix file contains path expression references pointing outside the package 89 /// directory 90 + fn check_nix_file( 91 + relative_package_dir: &Path, 92 + absolute_package_dir: &Path, 93 subpath: &Path, 94 + ) -> validation::Result<()> { 95 + let path = absolute_package_dir.join(subpath); 96 let parent_dir = path.parent().context(format!( 97 "Could not get parent of path {}", 98 subpath.display() ··· 103 104 let root = Root::parse(&contents); 105 if let Some(error) = root.errors().first() { 106 + return Ok(NixpkgsProblem::CouldNotParseNix { 107 + relative_package_dir: relative_package_dir.to_path_buf(), 108 + subpath: subpath.to_path_buf(), 109 + error: error.clone(), 110 + } 111 + .into()); 112 } 113 114 let line_index = LineIndex::new(&contents); 115 116 + Ok(validation::sequence_(root.syntax().descendants().map( 117 + |node| { 118 + let text = node.text().to_string(); 119 + let line = line_index.line(node.text_range().start().into()); 120 121 + if node.kind() != NODE_PATH { 122 + // We're only interested in Path expressions 123 + Success(()) 124 + } else if node.children().count() != 0 { 125 + // Filters out ./foo/${bar}/baz 126 + // TODO: We can just check ./foo 127 + NixpkgsProblem::PathInterpolation { 128 + relative_package_dir: relative_package_dir.to_path_buf(), 129 + subpath: subpath.to_path_buf(), 130 + line, 131 + text, 132 } 133 + .into() 134 + } else if text.starts_with('<') { 135 + // Filters out search paths like <nixpkgs> 136 + NixpkgsProblem::SearchPath { 137 + relative_package_dir: relative_package_dir.to_path_buf(), 138 + subpath: subpath.to_path_buf(), 139 + line, 140 text, 141 + } 142 + .into() 143 + } else { 144 + // Resolves the reference of the Nix path 145 + // turning `../baz` inside `/foo/bar/default.nix` to `/foo/baz` 146 + match parent_dir.join(Path::new(&text)).canonicalize() { 147 + Ok(target) => { 148 + // Then checking if it's still in the package directory 149 + // No need to handle the case of it being inside the directory, since we scan through the 150 + // entire directory recursively anyways 151 + if let Err(_prefix_error) = target.strip_prefix(absolute_package_dir) { 152 + NixpkgsProblem::OutsidePathReference { 153 + relative_package_dir: relative_package_dir.to_path_buf(), 154 + subpath: subpath.to_path_buf(), 155 + line, 156 + text, 157 + } 158 + .into() 159 + } else { 160 + Success(()) 161 + } 162 + } 163 + Err(e) => NixpkgsProblem::UnresolvablePathReference { 164 + relative_package_dir: relative_package_dir.to_path_buf(), 165 + subpath: subpath.to_path_buf(), 166 + line, 167 + text, 168 + io_error: e, 169 + } 170 + .into(), 171 + } 172 } 173 + }, 174 + ))) 175 }
+134 -116
pkgs/test/nixpkgs-check-by-name/src/structure.rs
··· 1 use crate::utils; 2 - use crate::utils::ErrorWriter; 3 use lazy_static::lazy_static; 4 use regex::Regex; 5 - use std::collections::HashMap; 6 - use std::io; 7 use std::path::{Path, PathBuf}; 8 - 9 - pub const BASE_SUBPATH: &str = "pkgs/by-name"; 10 - pub const PACKAGE_NIX_FILENAME: &str = "package.nix"; 11 12 lazy_static! { 13 static ref SHARD_NAME_REGEX: Regex = Regex::new(r"^[a-z0-9_-]{1,2}$").unwrap(); 14 static ref PACKAGE_NAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_-]+$").unwrap(); 15 } 16 17 - /// Contains information about the structure of the pkgs/by-name directory of a Nixpkgs 18 - pub struct Nixpkgs { 19 - /// The path to nixpkgs 20 - pub path: PathBuf, 21 - /// The names of all packages declared in pkgs/by-name 22 - pub package_names: Vec<String>, 23 } 24 25 - impl Nixpkgs { 26 - // Some utility functions for the basic structure 27 28 - pub fn shard_for_package(package_name: &str) -> String { 29 - package_name.to_lowercase().chars().take(2).collect() 30 - } 31 32 - pub fn relative_dir_for_shard(shard_name: &str) -> PathBuf { 33 - PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}")) 34 - } 35 - 36 - pub fn relative_dir_for_package(package_name: &str) -> PathBuf { 37 - Nixpkgs::relative_dir_for_shard(&Nixpkgs::shard_for_package(package_name)) 38 - .join(package_name) 39 - } 40 - 41 - pub fn relative_file_for_package(package_name: &str) -> PathBuf { 42 - Nixpkgs::relative_dir_for_package(package_name).join(PACKAGE_NIX_FILENAME) 43 - } 44 } 45 46 - impl Nixpkgs { 47 - /// Read the structure of a Nixpkgs directory, displaying errors on the writer. 48 - /// May return early with I/O errors. 49 - pub fn new<W: io::Write>( 50 - path: &Path, 51 - error_writer: &mut ErrorWriter<W>, 52 - ) -> anyhow::Result<Nixpkgs> { 53 - let base_dir = path.join(BASE_SUBPATH); 54 55 - let mut package_names = Vec::new(); 56 - 57 - for shard_entry in utils::read_dir_sorted(&base_dir)? { 58 let shard_path = shard_entry.path(); 59 let shard_name = shard_entry.file_name().to_string_lossy().into_owned(); 60 - let relative_shard_path = Nixpkgs::relative_dir_for_shard(&shard_name); 61 62 - if shard_name == "README.md" { 63 // README.md is allowed to be a file and not checked 64 - continue; 65 - } 66 67 - if !shard_path.is_dir() { 68 - error_writer.write(&format!( 69 - "{}: This is a file, but it should be a directory.", 70 - relative_shard_path.display(), 71 - ))?; 72 // we can't check for any other errors if it's a file, since there's no subdirectories to check 73 - continue; 74 - } 75 76 - let shard_name_valid = SHARD_NAME_REGEX.is_match(&shard_name); 77 - if !shard_name_valid { 78 - error_writer.write(&format!( 79 - "{}: Invalid directory name \"{shard_name}\", must be at most 2 ASCII characters consisting of a-z, 0-9, \"-\" or \"_\".", 80 - relative_shard_path.display() 81 - ))?; 82 - } 83 84 - let mut unique_package_names = HashMap::new(); 85 86 - for package_entry in utils::read_dir_sorted(&shard_path)? { 87 - let package_path = package_entry.path(); 88 - let package_name = package_entry.file_name().to_string_lossy().into_owned(); 89 - let relative_package_dir = 90 - PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}/{package_name}")); 91 92 - if !package_path.is_dir() { 93 - error_writer.write(&format!( 94 - "{}: This path is a file, but it should be a directory.", 95 - relative_package_dir.display(), 96 - ))?; 97 - continue; 98 - } 99 100 - if let Some(duplicate_package_name) = 101 - unique_package_names.insert(package_name.to_lowercase(), package_name.clone()) 102 - { 103 - error_writer.write(&format!( 104 - "{}: Duplicate case-sensitive package directories \"{duplicate_package_name}\" and \"{package_name}\".", 105 - relative_shard_path.display(), 106 - ))?; 107 - } 108 109 - let package_name_valid = PACKAGE_NAME_REGEX.is_match(&package_name); 110 - if !package_name_valid { 111 - error_writer.write(&format!( 112 - "{}: Invalid package directory name \"{package_name}\", must be ASCII characters consisting of a-z, A-Z, 0-9, \"-\" or \"_\".", 113 - relative_package_dir.display(), 114 - ))?; 115 - } 116 117 - let correct_relative_package_dir = Nixpkgs::relative_dir_for_package(&package_name); 118 - if relative_package_dir != correct_relative_package_dir { 119 - // Only show this error if we have a valid shard and package name 120 - // Because if one of those is wrong, you should fix that first 121 - if shard_name_valid && package_name_valid { 122 - error_writer.write(&format!( 123 - "{}: Incorrect directory location, should be {} instead.", 124 - relative_package_dir.display(), 125 - correct_relative_package_dir.display(), 126 - ))?; 127 - } 128 - } 129 130 - let package_nix_path = package_path.join(PACKAGE_NIX_FILENAME); 131 - if !package_nix_path.exists() { 132 - error_writer.write(&format!( 133 - "{}: Missing required \"{PACKAGE_NIX_FILENAME}\" file.", 134 - relative_package_dir.display(), 135 - ))?; 136 - } else if package_nix_path.is_dir() { 137 - error_writer.write(&format!( 138 - "{}: \"{PACKAGE_NIX_FILENAME}\" must be a file.", 139 - relative_package_dir.display(), 140 - ))?; 141 } 142 143 - package_names.push(package_name.clone()); 144 } 145 - } 146 147 - Ok(Nixpkgs { 148 - path: path.to_owned(), 149 - package_names, 150 - }) 151 - } 152 }
··· 1 + use crate::nixpkgs_problem::NixpkgsProblem; 2 + use crate::references; 3 use crate::utils; 4 + use crate::utils::{BASE_SUBPATH, PACKAGE_NIX_FILENAME}; 5 + use crate::validation::{self, ResultIteratorExt, Validation::Success}; 6 + use itertools::concat; 7 use lazy_static::lazy_static; 8 use regex::Regex; 9 + use std::fs::DirEntry; 10 use std::path::{Path, PathBuf}; 11 12 lazy_static! { 13 static ref SHARD_NAME_REGEX: Regex = Regex::new(r"^[a-z0-9_-]{1,2}$").unwrap(); 14 static ref PACKAGE_NAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_-]+$").unwrap(); 15 } 16 17 + // Some utility functions for the basic structure 18 + 19 + pub fn shard_for_package(package_name: &str) -> String { 20 + package_name.to_lowercase().chars().take(2).collect() 21 } 22 23 + pub fn relative_dir_for_shard(shard_name: &str) -> PathBuf { 24 + PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}")) 25 + } 26 27 + pub fn relative_dir_for_package(package_name: &str) -> PathBuf { 28 + relative_dir_for_shard(&shard_for_package(package_name)).join(package_name) 29 + } 30 31 + pub fn relative_file_for_package(package_name: &str) -> PathBuf { 32 + relative_dir_for_package(package_name).join(PACKAGE_NIX_FILENAME) 33 } 34 35 + /// Check the structure of Nixpkgs, returning the attribute names that are defined in 36 + /// `pkgs/by-name` 37 + pub fn check_structure(path: &Path) -> validation::Result<Vec<String>> { 38 + let base_dir = path.join(BASE_SUBPATH); 39 40 + let shard_results = utils::read_dir_sorted(&base_dir)? 41 + .into_iter() 42 + .map(|shard_entry| -> validation::Result<_> { 43 let shard_path = shard_entry.path(); 44 let shard_name = shard_entry.file_name().to_string_lossy().into_owned(); 45 + let relative_shard_path = relative_dir_for_shard(&shard_name); 46 47 + Ok(if shard_name == "README.md" { 48 // README.md is allowed to be a file and not checked 49 50 + Success(vec![]) 51 + } else if !shard_path.is_dir() { 52 + NixpkgsProblem::ShardNonDir { 53 + relative_shard_path: relative_shard_path.clone(), 54 + } 55 + .into() 56 // we can't check for any other errors if it's a file, since there's no subdirectories to check 57 + } else { 58 + let shard_name_valid = SHARD_NAME_REGEX.is_match(&shard_name); 59 + let result = if !shard_name_valid { 60 + NixpkgsProblem::InvalidShardName { 61 + relative_shard_path: relative_shard_path.clone(), 62 + shard_name: shard_name.clone(), 63 + } 64 + .into() 65 + } else { 66 + Success(()) 67 + }; 68 69 + let entries = utils::read_dir_sorted(&shard_path)?; 70 71 + let duplicate_results = entries 72 + .iter() 73 + .zip(entries.iter().skip(1)) 74 + .filter(|(l, r)| { 75 + l.file_name().to_ascii_lowercase() == r.file_name().to_ascii_lowercase() 76 + }) 77 + .map(|(l, r)| { 78 + NixpkgsProblem::CaseSensitiveDuplicate { 79 + relative_shard_path: relative_shard_path.clone(), 80 + first: l.file_name(), 81 + second: r.file_name(), 82 + } 83 + .into() 84 + }); 85 86 + let result = result.and(validation::sequence_(duplicate_results)); 87 88 + let package_results = entries 89 + .into_iter() 90 + .map(|package_entry| { 91 + check_package(path, &shard_name, shard_name_valid, package_entry) 92 + }) 93 + .collect_vec()?; 94 95 + result.and(validation::sequence(package_results)) 96 + }) 97 + }) 98 + .collect_vec()?; 99 100 + // Combine the package names conatained within each shard into a longer list 101 + Ok(validation::sequence(shard_results).map(concat)) 102 + } 103 + 104 + fn check_package( 105 + path: &Path, 106 + shard_name: &str, 107 + shard_name_valid: bool, 108 + package_entry: DirEntry, 109 + ) -> validation::Result<String> { 110 + let package_path = package_entry.path(); 111 + let package_name = package_entry.file_name().to_string_lossy().into_owned(); 112 + let relative_package_dir = PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}/{package_name}")); 113 114 + Ok(if !package_path.is_dir() { 115 + NixpkgsProblem::PackageNonDir { 116 + relative_package_dir: relative_package_dir.clone(), 117 + } 118 + .into() 119 + } else { 120 + let package_name_valid = PACKAGE_NAME_REGEX.is_match(&package_name); 121 + let result = if !package_name_valid { 122 + NixpkgsProblem::InvalidPackageName { 123 + relative_package_dir: relative_package_dir.clone(), 124 + package_name: package_name.clone(), 125 + } 126 + .into() 127 + } else { 128 + Success(()) 129 + }; 130 131 + let correct_relative_package_dir = relative_dir_for_package(&package_name); 132 + let result = result.and(if relative_package_dir != correct_relative_package_dir { 133 + // Only show this error if we have a valid shard and package name 134 + // Because if one of those is wrong, you should fix that first 135 + if shard_name_valid && package_name_valid { 136 + NixpkgsProblem::IncorrectShard { 137 + relative_package_dir: relative_package_dir.clone(), 138 + correct_relative_package_dir: correct_relative_package_dir.clone(), 139 } 140 + .into() 141 + } else { 142 + Success(()) 143 + } 144 + } else { 145 + Success(()) 146 + }); 147 148 + let package_nix_path = package_path.join(PACKAGE_NIX_FILENAME); 149 + let result = result.and(if !package_nix_path.exists() { 150 + NixpkgsProblem::PackageNixNonExistent { 151 + relative_package_dir: relative_package_dir.clone(), 152 } 153 + .into() 154 + } else if package_nix_path.is_dir() { 155 + NixpkgsProblem::PackageNixDir { 156 + relative_package_dir: relative_package_dir.clone(), 157 + } 158 + .into() 159 + } else { 160 + Success(()) 161 + }); 162 + 163 + let result = result.and(references::check_references( 164 + &relative_package_dir, 165 + &path.join(&relative_package_dir), 166 + )?); 167 168 + result.map(|_| package_name.clone()) 169 + }) 170 }
+3 -24
pkgs/test/nixpkgs-check-by-name/src/utils.rs
··· 1 use anyhow::Context; 2 - use colored::Colorize; 3 use std::fs; 4 use std::io; 5 use std::path::Path; 6 7 /// Deterministic file listing so that tests are reproducible 8 pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> { ··· 47 } 48 } 49 } 50 - 51 - /// A small wrapper around a generic io::Write specifically for errors: 52 - /// - Print everything in red to signal it's an error 53 - /// - Keep track of whether anything was printed at all, so that 54 - /// it can be queried whether any errors were encountered at all 55 - pub struct ErrorWriter<W> { 56 - pub writer: W, 57 - pub empty: bool, 58 - } 59 - 60 - impl<W: io::Write> ErrorWriter<W> { 61 - pub fn new(writer: W) -> ErrorWriter<W> { 62 - ErrorWriter { 63 - writer, 64 - empty: true, 65 - } 66 - } 67 - 68 - pub fn write(&mut self, string: &str) -> io::Result<()> { 69 - self.empty = false; 70 - writeln!(self.writer, "{}", string.red()) 71 - } 72 - }
··· 1 use anyhow::Context; 2 use std::fs; 3 use std::io; 4 use std::path::Path; 5 + 6 + pub const BASE_SUBPATH: &str = "pkgs/by-name"; 7 + pub const PACKAGE_NIX_FILENAME: &str = "package.nix"; 8 9 /// Deterministic file listing so that tests are reproducible 10 pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> { ··· 49 } 50 } 51 }
+102
pkgs/test/nixpkgs-check-by-name/src/validation.rs
···
··· 1 + use crate::nixpkgs_problem::NixpkgsProblem; 2 + use itertools::concat; 3 + use itertools::{ 4 + Either::{Left, Right}, 5 + Itertools, 6 + }; 7 + use Validation::*; 8 + 9 + /// The validation result of a check. 10 + /// Instead of exiting at the first failure, 11 + /// this type can accumulate multiple failures. 12 + /// This can be achieved using the functions `and`, `sequence` and `sequence_` 13 + /// 14 + /// This leans on https://hackage.haskell.org/package/validation 15 + pub enum Validation<A> { 16 + Failure(Vec<NixpkgsProblem>), 17 + Success(A), 18 + } 19 + 20 + impl<A> From<NixpkgsProblem> for Validation<A> { 21 + /// Create a `Validation<A>` from a single check problem 22 + fn from(value: NixpkgsProblem) -> Self { 23 + Failure(vec![value]) 24 + } 25 + } 26 + 27 + /// A type alias representing the result of a check, either: 28 + /// - Err(anyhow::Error): A fatal failure, typically I/O errors. 29 + /// Such failures are not caused by the files in Nixpkgs. 30 + /// This hints at a bug in the code or a problem with the deployment. 31 + /// - Ok(Failure(Vec<NixpkgsProblem>)): A non-fatal validation problem with the Nixpkgs files. 32 + /// Further checks can be run even with this result type. 33 + /// Such problems can be fixed by changing the Nixpkgs files. 34 + /// - Ok(Success(A)): A successful (potentially intermediate) result with an arbitrary value. 35 + /// No fatal errors have occurred and no validation problems have been found with Nixpkgs. 36 + pub type Result<A> = anyhow::Result<Validation<A>>; 37 + 38 + pub trait ResultIteratorExt<A, E>: Sized + Iterator<Item = std::result::Result<A, E>> { 39 + fn collect_vec(self) -> std::result::Result<Vec<A>, E>; 40 + } 41 + 42 + impl<I, A, E> ResultIteratorExt<A, E> for I 43 + where 44 + I: Sized + Iterator<Item = std::result::Result<A, E>>, 45 + { 46 + /// A convenience version of `collect` specialised to a vector 47 + fn collect_vec(self) -> std::result::Result<Vec<A>, E> { 48 + self.collect() 49 + } 50 + } 51 + 52 + impl<A> Validation<A> { 53 + /// Map a `Validation<A>` to a `Validation<B>` by applying a function to the 54 + /// potentially contained value in case of success. 55 + pub fn map<B>(self, f: impl FnOnce(A) -> B) -> Validation<B> { 56 + match self { 57 + Failure(err) => Failure(err), 58 + Success(value) => Success(f(value)), 59 + } 60 + } 61 + } 62 + 63 + impl Validation<()> { 64 + /// Combine two validations, both of which need to be successful for the return value to be successful. 65 + /// The `NixpkgsProblem`s of both sides are returned concatenated. 66 + pub fn and<A>(self, other: Validation<A>) -> Validation<A> { 67 + match (self, other) { 68 + (Success(_), Success(right_value)) => Success(right_value), 69 + (Failure(errors), Success(_)) => Failure(errors), 70 + (Success(_), Failure(errors)) => Failure(errors), 71 + (Failure(errors_l), Failure(errors_r)) => Failure(concat([errors_l, errors_r])), 72 + } 73 + } 74 + } 75 + 76 + /// Combine many validations into a single one. 77 + /// All given validations need to be successful in order for the returned validation to be successful, 78 + /// in which case the returned validation value contains a `Vec` of each individual value. 79 + /// Otherwise the `NixpkgsProblem`s of all validations are returned concatenated. 80 + pub fn sequence<A>(check_results: impl IntoIterator<Item = Validation<A>>) -> Validation<Vec<A>> { 81 + let (errors, values): (Vec<Vec<NixpkgsProblem>>, Vec<A>) = check_results 82 + .into_iter() 83 + .partition_map(|validation| match validation { 84 + Failure(err) => Left(err), 85 + Success(value) => Right(value), 86 + }); 87 + 88 + // To combine the errors from the results we flatten all the error Vec's into a new Vec 89 + // This is not very efficient, but doesn't matter because generally we should have no errors 90 + let flattened_errors = errors.into_iter().concat(); 91 + 92 + if flattened_errors.is_empty() { 93 + Success(values) 94 + } else { 95 + Failure(flattened_errors) 96 + } 97 + } 98 + 99 + /// Like `sequence`, but without any containing value, for convenience 100 + pub fn sequence_(validations: impl IntoIterator<Item = Validation<()>>) -> Validation<()> { 101 + sequence(validations).map(|_| ()) 102 + }
+56 -1
pkgs/tools/admin/pgadmin/default.nix
··· 9 , yarn 10 , fixup_yarn_lock 11 , nodejs 12 , server-mode ? true 13 }: 14 ··· 26 27 # keep the scope, as it is used throughout the derivation and tests 28 # this also makes potential future overrides easier 29 - pythonPackages = python3.pkgs.overrideScope (final: prev: rec { }); 30 31 offlineCache = fetchYarnDeps { 32 yarnLock = ./yarn.lock;
··· 9 , yarn 10 , fixup_yarn_lock 11 , nodejs 12 + , fetchpatch 13 , server-mode ? true 14 }: 15 ··· 27 28 # keep the scope, as it is used throughout the derivation and tests 29 # this also makes potential future overrides easier 30 + pythonPackages = python3.pkgs.overrideScope (final: prev: rec { 31 + # pgadmin 7.8 is incompatible with Flask >= 2.3 32 + flask = prev.flask.overridePythonAttrs (oldAttrs: rec { 33 + version = "2.2.5"; 34 + src = oldAttrs.src.override { 35 + pname = "Flask"; 36 + inherit version; 37 + hash = "sha256-7e6bCn/yZiG9WowQ/0hK4oc3okENmbC7mmhQx/uXeqA="; 38 + }; 39 + format = "setuptools"; 40 + }); 41 + # downgrade needed for older Flask 42 + httpbin = prev.httpbin.overridePythonAttrs (oldAttrs: rec { 43 + version = "0.7.0"; 44 + src = oldAttrs.src.override { 45 + inherit version; 46 + hash = "sha256-y7N3kMkVdfTxV1f0KtQdn3KesifV7b6J5OwXVIbbjfo="; 47 + }; 48 + format = "setuptools"; 49 + patches = [ 50 + (fetchpatch { 51 + # Replaces BaseResponse class with Response class for Werkezug 2.1.0 compatibility 52 + # https://github.com/postmanlabs/httpbin/pull/674 53 + url = "https://github.com/postmanlabs/httpbin/commit/5cc81ce87a3c447a127e4a1a707faf9f3b1c9b6b.patch"; 54 + hash = "sha256-SbEWjiqayMFYrbgAPZtSsXqSyCDUz3z127XgcKOcrkE="; 55 + }) 56 + ]; 57 + pytestFlagsArray = [ 58 + "test_httpbin.py" 59 + ]; 60 + propagatedBuildInputs = oldAttrs.propagatedBuildInputs ++ [ final.pythonPackages.brotlipy ]; 61 + }); 62 + # downgrade needed for older httpbin 63 + werkzeug = prev.werkzeug.overridePythonAttrs (oldAttrs: rec { 64 + version = "2.2.3"; 65 + format = "setuptools"; 66 + src = oldAttrs.src.override { 67 + pname = "Werkzeug"; 68 + inherit version; 69 + hash = "sha256-LhzMlBfU2jWLnebxdOOsCUOR6h1PvvLWZ4ZdgZ39Cv4="; 70 + }; 71 + }); 72 + # Downgrade needed for older Flask 73 + flask-security-too = prev.flask-security-too.overridePythonAttrs (oldAttrs: rec { 74 + version = "5.1.2"; 75 + src = oldAttrs.src.override { 76 + inherit version; 77 + hash = "sha256-lZzm43m30y+2qjxNddFEeg9HDlQP9afq5VtuR25zaLc="; 78 + }; 79 + postPatch = '' 80 + # This should be removed after updating to version 5.3.0. 81 + sed -i '/filterwarnings =/a ignore:pkg_resources is deprecated:DeprecationWarning' pytest.ini 82 + ''; 83 + }); 84 + }); 85 86 offlineCache = fetchYarnDeps { 87 yarnLock = ./yarn.lock;
+4
pkgs/tools/networking/openssh/common.nix
··· 18 , zlib 19 , openssl 20 , libedit 21 , pkg-config 22 , pam 23 , libredirect 24 , etcDir ? null 25 , withKerberos ? true 26 , libkrb5 27 , libfido2 28 , hostname ··· 64 buildInputs = [ zlib openssl libedit ] 65 ++ lib.optional withFIDO libfido2 66 ++ lib.optional withKerberos libkrb5 67 ++ lib.optional withPAM pam; 68 69 preConfigure = '' ··· 87 ++ lib.optional withKerberos (assert libkrb5 != null; "--with-kerberos5=${libkrb5}") 88 ++ lib.optional stdenv.isDarwin "--disable-libutil" 89 ++ lib.optional (!linkOpenssl) "--without-openssl" 90 ++ extraConfigureFlags; 91 92 ${if stdenv.hostPlatform.isStatic then "NIX_LDFLAGS" else null}= [ "-laudit" ] ++ lib.optionals withKerberos [ "-lkeyutils" ];
··· 18 , zlib 19 , openssl 20 , libedit 21 + , ldns 22 , pkg-config 23 , pam 24 , libredirect 25 , etcDir ? null 26 , withKerberos ? true 27 + , withLdns ? true 28 , libkrb5 29 , libfido2 30 , hostname ··· 66 buildInputs = [ zlib openssl libedit ] 67 ++ lib.optional withFIDO libfido2 68 ++ lib.optional withKerberos libkrb5 69 + ++ lib.optional withLdns ldns 70 ++ lib.optional withPAM pam; 71 72 preConfigure = '' ··· 90 ++ lib.optional withKerberos (assert libkrb5 != null; "--with-kerberos5=${libkrb5}") 91 ++ lib.optional stdenv.isDarwin "--disable-libutil" 92 ++ lib.optional (!linkOpenssl) "--without-openssl" 93 + ++ lib.optional withLdns "--with-ldns" 94 ++ extraConfigureFlags; 95 96 ${if stdenv.hostPlatform.isStatic then "NIX_LDFLAGS" else null}= [ "-laudit" ] ++ lib.optionals withKerberos [ "-lkeyutils" ];
+10 -5
pkgs/tools/security/cie-middleware-linux/default.nix
··· 20 21 let 22 pname = "cie-middleware-linux"; 23 - version = "1.4.4.0"; 24 25 src = fetchFromGitHub { 26 owner = "M0rf30"; 27 repo = pname; 28 - rev = "${version}-podofo"; 29 - sha256 = "sha256-Kyr9OTiY6roJ/wVJS/1aWfrrzDNQbuRTJQqo0akbMUU="; 30 }; 31 32 gradle = gradle_7; ··· 44 buildPhase = '' 45 # Run the fetchDeps task 46 export GRADLE_USER_HOME=$(mktemp -d) 47 gradle --no-daemon -b cie-java/build.gradle fetchDeps 48 ''; 49 ··· 60 61 outputHashAlgo = "sha256"; 62 outputHashMode = "recursive"; 63 - outputHash = "sha256-WzT5vYF9yCMU2A7EkLZyjgWrN3gD7pnkPXc3hDFqpD8="; 64 }; 65 66 in ··· 84 buildInputs = [ 85 cryptopp 86 fontconfig 87 - podofo 88 openssl 89 pcsclite 90 curl ··· 95 # substitute the cieid command with this $out/bin/cieid 96 substituteInPlace libs/pkcs11/src/CSP/AbilitaCIE.cpp \ 97 --replace 'file = "cieid"' 'file = "'$out'/bin/cieid"' 98 ''; 99 100 # Note: we use pushd/popd to juggle between the
··· 20 21 let 22 pname = "cie-middleware-linux"; 23 + version = "1.5.0"; 24 25 src = fetchFromGitHub { 26 owner = "M0rf30"; 27 repo = pname; 28 + rev = version; 29 + sha256 = "sha256-Z8K2Ibg5bBfSql5HEapKgdfiCf/EIKTTD15oVeysQGk="; 30 }; 31 32 gradle = gradle_7; ··· 44 buildPhase = '' 45 # Run the fetchDeps task 46 export GRADLE_USER_HOME=$(mktemp -d) 47 + ls -l 48 gradle --no-daemon -b cie-java/build.gradle fetchDeps 49 ''; 50 ··· 61 62 outputHashAlgo = "sha256"; 63 outputHashMode = "recursive"; 64 + outputHash = "sha256-jtaH8dBpnx8KMJe+jzJfkvcx1NO4nL5jsRO4+GI+d0c="; 65 }; 66 67 in ··· 85 buildInputs = [ 86 cryptopp 87 fontconfig 88 + podofo.dev 89 openssl 90 pcsclite 91 curl ··· 96 # substitute the cieid command with this $out/bin/cieid 97 substituteInPlace libs/pkcs11/src/CSP/AbilitaCIE.cpp \ 98 --replace 'file = "cieid"' 'file = "'$out'/bin/cieid"' 99 + 100 + # revert https://github.com/M0Rf30/cie-middleware-linux/commit/1a389d8 101 + sed -i libs/meson.build \ 102 + -e "s@podofo_dep = .\+@podofo_dep = dependency('libpodofo')@g" 103 ''; 104 105 # Note: we use pushd/popd to juggle between the
+2
pkgs/top-level/aliases.nix
··· 494 linuxPackages_6_3 = linuxKernel.packages.linux_6_3; 495 linuxPackages_6_4 = linuxKernel.packages.linux_6_4; 496 linuxPackages_6_5 = linuxKernel.packages.linux_6_5; 497 linuxPackages_rpi0 = linuxKernel.packages.linux_rpi1; 498 linuxPackages_rpi02w = linuxKernel.packages.linux_rpi3; 499 linuxPackages_rpi1 = linuxKernel.packages.linux_rpi1; ··· 518 linux_6_3 = linuxKernel.kernels.linux_6_3; 519 linux_6_4 = linuxKernel.kernels.linux_6_4; 520 linux_6_5 = linuxKernel.kernels.linux_6_5; 521 linux_rpi0 = linuxKernel.kernels.linux_rpi1; 522 linux_rpi02w = linuxKernel.kernels.linux_rpi3; 523 linux_rpi1 = linuxKernel.kernels.linux_rpi1;
··· 494 linuxPackages_6_3 = linuxKernel.packages.linux_6_3; 495 linuxPackages_6_4 = linuxKernel.packages.linux_6_4; 496 linuxPackages_6_5 = linuxKernel.packages.linux_6_5; 497 + linuxPackages_6_6 = linuxKernel.packages.linux_6_6; 498 linuxPackages_rpi0 = linuxKernel.packages.linux_rpi1; 499 linuxPackages_rpi02w = linuxKernel.packages.linux_rpi3; 500 linuxPackages_rpi1 = linuxKernel.packages.linux_rpi1; ··· 519 linux_6_3 = linuxKernel.kernels.linux_6_3; 520 linux_6_4 = linuxKernel.kernels.linux_6_4; 521 linux_6_5 = linuxKernel.kernels.linux_6_5; 522 + linux_6_6 = linuxKernel.kernels.linux_6_6; 523 linux_rpi0 = linuxKernel.kernels.linux_rpi1; 524 linux_rpi02w = linuxKernel.kernels.linux_rpi3; 525 linux_rpi1 = linuxKernel.kernels.linux_rpi1;
+10 -1
pkgs/top-level/linux-kernels.nix
··· 178 ]; 179 }; 180 181 linux_testing = let 182 testing = callPackage ../os-specific/linux/kernel/mainline.nix { 183 # A special branch that tracks the kernel under the release process ··· 567 linux_5_15 = recurseIntoAttrs (packagesFor kernels.linux_5_15); 568 linux_6_1 = recurseIntoAttrs (packagesFor kernels.linux_6_1); 569 linux_6_5 = recurseIntoAttrs (packagesFor kernels.linux_6_5); 570 } // lib.optionalAttrs config.allowAliases { 571 linux_4_9 = throw "linux 4.9 was removed because it will reach its end of life within 22.11"; # Added 2022-11-08 572 linux_4_14 = throw "linux 4.14 was removed because it will reach its end of life within 23.11"; # Added 2023-10-11 ··· 627 packageAliases = { 628 linux_default = packages.linux_6_1; 629 # Update this when adding the newest kernel major version! 630 - linux_latest = packages.linux_6_5; 631 linux_mptcp = throw "'linux_mptcp' has been moved to https://github.com/teto/mptcp-flake"; 632 linux_rt_default = packages.linux_rt_5_4; 633 linux_rt_latest = packages.linux_rt_6_1;
··· 178 ]; 179 }; 180 181 + linux_6_6 = callPackage ../os-specific/linux/kernel/mainline.nix { 182 + branch = "6.6"; 183 + kernelPatches = [ 184 + kernelPatches.bridge_stp_helper 185 + kernelPatches.request_key_helper 186 + ]; 187 + }; 188 + 189 linux_testing = let 190 testing = callPackage ../os-specific/linux/kernel/mainline.nix { 191 # A special branch that tracks the kernel under the release process ··· 575 linux_5_15 = recurseIntoAttrs (packagesFor kernels.linux_5_15); 576 linux_6_1 = recurseIntoAttrs (packagesFor kernels.linux_6_1); 577 linux_6_5 = recurseIntoAttrs (packagesFor kernels.linux_6_5); 578 + linux_6_6 = recurseIntoAttrs (packagesFor kernels.linux_6_6); 579 } // lib.optionalAttrs config.allowAliases { 580 linux_4_9 = throw "linux 4.9 was removed because it will reach its end of life within 22.11"; # Added 2022-11-08 581 linux_4_14 = throw "linux 4.14 was removed because it will reach its end of life within 23.11"; # Added 2023-10-11 ··· 636 packageAliases = { 637 linux_default = packages.linux_6_1; 638 # Update this when adding the newest kernel major version! 639 + linux_latest = packages.linux_6_6; 640 linux_mptcp = throw "'linux_mptcp' has been moved to https://github.com/teto/mptcp-flake"; 641 linux_rt_default = packages.linux_rt_5_4; 642 linux_rt_latest = packages.linux_rt_6_1;
+2
pkgs/top-level/python-packages.nix
··· 15666 15667 withings-api = callPackage ../development/python-modules/withings-api { }; 15668 15669 wktutils = callPackage ../development/python-modules/wktutils { }; 15670 15671 wled = callPackage ../development/python-modules/wled { };
··· 15666 15667 withings-api = callPackage ../development/python-modules/withings-api { }; 15668 15669 + withings-sync = callPackage ../development/python-modules/withings-sync { }; 15670 + 15671 wktutils = callPackage ../development/python-modules/wktutils { }; 15672 15673 wled = callPackage ../development/python-modules/wled { };