lol

Merge remote-tracking branch 'origin/staging-next' into staging

K900 1bb42223 c26d4adf

+4250 -1253
+2
ci/eval/compare/default.nix
··· 9 beforeResultDir, 10 afterResultDir, 11 touchedFilesJson, 12 }: 13 let 14 /* ··· 119 maintainers = import ./maintainers.nix { 120 changedattrs = lib.attrNames (lib.groupBy (a: a.name) rebuildsPackagePlatformAttrs); 121 changedpathsjson = touchedFilesJson; 122 }; 123 in 124 runCommand "compare"
··· 9 beforeResultDir, 10 afterResultDir, 11 touchedFilesJson, 12 + byName ? false, 13 }: 14 let 15 /* ··· 120 maintainers = import ./maintainers.nix { 121 changedattrs = lib.attrNames (lib.groupBy (a: a.name) rebuildsPackagePlatformAttrs); 122 changedpathsjson = touchedFilesJson; 123 + inherit byName; 124 }; 125 in 126 runCommand "compare"
+19 -3
ci/eval/compare/maintainers.nix
··· 1 # Almost directly vendored from https://github.com/NixOS/ofborg/blob/5a4e743f192fb151915fcbe8789922fa401ecf48/ofborg/src/maintainers.nix 2 - { changedattrs, changedpathsjson }: 3 let 4 pkgs = import ../../.. { 5 system = "x86_64-linux"; ··· 41 ) validPackageAttributes; 42 43 attrsWithMaintainers = builtins.map ( 44 - pkg: pkg // { maintainers = (pkg.package.meta or { }).maintainers or [ ]; } 45 ) attrsWithPackages; 46 47 relevantFilenames = ··· 83 pkg: 84 builtins.map (maintainer: { 85 id = maintainer.githubId; 86 packageName = pkg.name; 87 dueToFiles = pkg.filenames; 88 }) pkg.maintainers 89 ) attrsWithModifiedFiles; 90 91 - byMaintainer = lib.groupBy (ping: toString ping.id) listToPing; 92 93 packagesPerMaintainer = lib.attrsets.mapAttrs ( 94 maintainer: packages: builtins.map (pkg: pkg.packageName) packages
··· 1 # Almost directly vendored from https://github.com/NixOS/ofborg/blob/5a4e743f192fb151915fcbe8789922fa401ecf48/ofborg/src/maintainers.nix 2 + { 3 + changedattrs, 4 + changedpathsjson, 5 + byName ? false, 6 + }: 7 let 8 pkgs = import ../../.. { 9 system = "x86_64-linux"; ··· 45 ) validPackageAttributes; 46 47 attrsWithMaintainers = builtins.map ( 48 + pkg: 49 + let 50 + meta = pkg.package.meta or { }; 51 + in 52 + pkg 53 + // { 54 + # TODO: Refactor this so we can ping entire teams instead of the individual members. 55 + # Note that this will require keeping track of GH team IDs in "maintainers/teams.nix". 56 + maintainers = 57 + meta.maintainers or [ ] 58 + ++ lib.flatten (map (team: team.members or [ ]) (meta.teams or [ ])); 59 + } 60 ) attrsWithPackages; 61 62 relevantFilenames = ··· 98 pkg: 99 builtins.map (maintainer: { 100 id = maintainer.githubId; 101 + inherit (maintainer) github; 102 packageName = pkg.name; 103 dueToFiles = pkg.filenames; 104 }) pkg.maintainers 105 ) attrsWithModifiedFiles; 106 107 + byMaintainer = lib.groupBy (ping: toString ping.${if byName then "github" else "id"}) listToPing; 108 109 packagesPerMaintainer = lib.attrsets.mapAttrs ( 110 maintainer: packages: builtins.map (pkg: pkg.packageName) packages
+1 -1
doc/build-helpers/images/appimagetools.section.md
··· 33 version = "0.6.30"; 34 35 src = fetchurl { 36 - url = "https://github.com/nukeop/nuclear/releases/download/v${version}/${pname}-v${version}.AppImage"; 37 hash = "sha256-he1uGC1M/nFcKpMM9JKY4oeexJcnzV0ZRxhTjtJz6xw="; 38 }; 39 in
··· 33 version = "0.6.30"; 34 35 src = fetchurl { 36 + url = "https://github.com/nukeop/nuclear/releases/download/v${version}/nuclear-v${version}.AppImage"; 37 hash = "sha256-he1uGC1M/nFcKpMM9JKY4oeexJcnzV0ZRxhTjtJz6xw="; 38 }; 39 in
+8
doc/doc-support/package.nix
··· 49 ''; 50 51 buildPhase = '' 52 substituteInPlace ./languages-frameworks/python.section.md \ 53 --subst-var-by python-interpreter-table "$(<"${pythonInterpreterTable}")" 54 ··· 84 --section-toc-depth 1 \ 85 manual.md \ 86 out/index.html 87 ''; 88 89 installPhase = '' 90 dest="$out/share/doc/nixpkgs" 91 mkdir -p "$(dirname "$dest")" 92 mv out "$dest" ··· 97 mkdir -p $out/nix-support/ 98 echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products 99 echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products 100 ''; 101 102 passthru = {
··· 49 ''; 50 51 buildPhase = '' 52 + runHook preBuild 53 + 54 substituteInPlace ./languages-frameworks/python.section.md \ 55 --subst-var-by python-interpreter-table "$(<"${pythonInterpreterTable}")" 56 ··· 86 --section-toc-depth 1 \ 87 manual.md \ 88 out/index.html 89 + 90 + runHook postBuild 91 ''; 92 93 installPhase = '' 94 + runHook preInstall 95 + 96 dest="$out/share/doc/nixpkgs" 97 mkdir -p "$(dirname "$dest")" 98 mv out "$dest" ··· 103 mkdir -p $out/nix-support/ 104 echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products 105 echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products 106 + 107 + runHook postInstall 108 ''; 109 110 passthru = {
+1 -1
doc/functions/generators.section.md
··· 27 } ":"; 28 }; 29 30 - # the INI file can now be given as plain old nix values 31 in 32 customToINI { 33 main = { 34 pushinfo = true;
··· 27 } ":"; 28 }; 29 30 in 31 + # the INI file can now be given as plain old nix values 32 customToINI { 33 main = { 34 pushinfo = true;
+5 -5
doc/hooks/tauri.section.md
··· 23 wrapGAppsHook4, 24 }: 25 26 - rustPlatform.buildRustPackage rec { 27 - # . . . 28 29 useFetchCargoVendor = true; 30 cargoHash = "..."; 31 32 # Assuming our app's frontend uses `npm` as a package manager 33 npmDeps = fetchNpmDeps { 34 - name = "${pname}-npm-deps-${version}"; 35 inherit src; 36 hash = "..."; 37 }; ··· 61 # And make sure we build there too 62 buildAndTestSubdir = cargoRoot; 63 64 - # . . . 65 - } 66 ``` 67 68 ## Variables controlling cargo-tauri {#tauri-hook-variables-controlling}
··· 23 wrapGAppsHook4, 24 }: 25 26 + rustPlatform.buildRustPackage (finalAttrs: { 27 + # ... 28 29 useFetchCargoVendor = true; 30 cargoHash = "..."; 31 32 # Assuming our app's frontend uses `npm` as a package manager 33 npmDeps = fetchNpmDeps { 34 + name = "${finalAttrs.pname}-npm-deps-${finalAttrs.version}"; 35 inherit src; 36 hash = "..."; 37 }; ··· 61 # And make sure we build there too 62 buildAndTestSubdir = cargoRoot; 63 64 + # ... 65 + }) 66 ``` 67 68 ## Variables controlling cargo-tauri {#tauri-hook-variables-controlling}
+4
doc/languages-frameworks/agda.section.md
··· 208 libraryName = "IAL-1.3"; 209 210 buildPhase = '' 211 patchShebangs find-deps.sh 212 make 213 ''; 214 } 215 ```
··· 208 libraryName = "IAL-1.3"; 209 210 buildPhase = '' 211 + runHook preBuild 212 + 213 patchShebangs find-deps.sh 214 make 215 + 216 + runHook postBuild 217 ''; 218 } 219 ```
+8 -1
doc/languages-frameworks/bower.section.md
··· 105 src = myWebApp; 106 }; 107 108 buildPhase = '' 109 cp --reflink=auto --no-preserve=mode -R $bowerComponents/bower_components . # note 2 110 - export HOME=$PWD # note 3 111 ${pkgs.nodePackages.gulp}/bin/gulp build # note 4 112 ''; 113 114 installPhase = "mv gulpdist $out";
··· 105 src = myWebApp; 106 }; 107 108 + nativeBuildInputs = [ 109 + writableTmpDirAsHomeHook # note 3 110 + ]; 111 + 112 buildPhase = '' 113 + runHook preBuild 114 + 115 cp --reflink=auto --no-preserve=mode -R $bowerComponents/bower_components . # note 2 116 ${pkgs.nodePackages.gulp}/bin/gulp build # note 4 117 + 118 + runHook postBuild 119 ''; 120 121 installPhase = "mv gulpdist $out";
+2 -2
doc/languages-frameworks/dart.section.md
··· 42 43 src = fetchFromGitHub { 44 owner = "sass"; 45 - repo = pname; 46 - rev = version; 47 hash = "sha256-U6enz8yJcc4Wf8m54eYIAnVg/jsGi247Wy8lp1r1wg4="; 48 }; 49
··· 42 43 src = fetchFromGitHub { 44 owner = "sass"; 45 + repo = "dart-sass"; 46 + tag = version; 47 hash = "sha256-U6enz8yJcc4Wf8m54eYIAnVg/jsGi247Wy8lp1r1wg4="; 48 }; 49
+1 -1
doc/languages-frameworks/dlang.section.md
··· 22 src = fetchFromGitHub { 23 owner = "CyberShadow"; 24 repo = "btdu"; 25 - rev = "v${version}"; 26 hash = "sha256-3sSZq+5UJH02IO0Y1yL3BLHDb4lk8k6awb5ZysBQciE="; 27 }; 28
··· 22 src = fetchFromGitHub { 23 owner = "CyberShadow"; 24 repo = "btdu"; 25 + tag = "v${version}"; 26 hash = "sha256-3sSZq+5UJH02IO0Y1yL3BLHDb4lk8k6awb5ZysBQciE="; 27 }; 28
+37 -4
doc/languages-frameworks/emscripten.section.md
··· 41 (pkgs.zlib.override { 42 stdenv = pkgs.emscriptenStdenv; 43 }).overrideAttrs 44 - (old: rec { 45 buildInputs = old.buildInputs ++ [ pkg-config ]; 46 # we need to reset this setting! 47 env = (old.env or { }) // { 48 NIX_CFLAGS_COMPILE = ""; 49 }; 50 configurePhase = '' 51 # FIXME: Some tests require writing at $HOME 52 HOME=$TMPDIR ··· 57 58 runHook postConfigure 59 ''; 60 dontStrip = true; 61 outputs = [ "out" ]; 62 buildPhase = '' 63 emmake make 64 ''; 65 installPhase = '' 66 emmake make install 67 ''; 68 checkPhase = '' 69 echo "================= testing zlib using node =================" 70 71 echo "Compiling a custom test" ··· 84 echo "it seems to work! very good." 85 fi 86 echo "================= /testing zlib using node =================" 87 ''; 88 89 postPatch = pkgs.lib.optionalString pkgs.stdenv.hostPlatform.isDarwin '' ··· 102 This `xmlmirror` example features an Emscripten package that is defined completely from this context and no `pkgs.zlib.override` is used. 103 104 ```nix 105 - pkgs.buildEmscriptenPackage rec { 106 - name = "xmlmirror"; 107 108 buildInputs = [ 109 pkg-config ··· 116 openjdk 117 json_c 118 ]; 119 nativeBuildInputs = [ 120 pkg-config 121 zlib 122 ]; 123 ··· 128 }; 129 130 configurePhase = '' 131 rm -f fastXmlLint.js* 132 # a fix for ERROR:root:For asm.js, TOTAL_MEMORY must be a multiple of 16MB, was 234217728 133 # https://gitlab.com/odfplugfest/xmlmirror/issues/8 ··· 137 sed -e "s/\$(JSONC_LDFLAGS) \$(ZLIB_LDFLAGS) \$(LIBXML20_LDFLAGS)/\$(JSONC_LDFLAGS) \$(LIBXML20_LDFLAGS) \$(ZLIB_LDFLAGS) /g" -i Makefile.emEnv 138 # https://gitlab.com/odfplugfest/xmlmirror/issues/11 139 sed -e "s/-o fastXmlLint.js/-s EXTRA_EXPORTED_RUNTIME_METHODS='[\"ccall\", \"cwrap\"]' -o fastXmlLint.js/g" -i Makefile.emEnv 140 ''; 141 142 buildPhase = '' 143 - HOME=$TMPDIR 144 make -f Makefile.emEnv 145 ''; 146 147 outputs = [ ··· 150 ]; 151 152 installPhase = '' 153 mkdir -p $out/share 154 mkdir -p $doc/share/${name} 155 ··· 163 cp *.json $out/share 164 cp *.rng $out/share 165 cp README.md $doc/share/${name} 166 ''; 167 checkPhase = '' 168 169 ''; 170 } 171 ```
··· 41 (pkgs.zlib.override { 42 stdenv = pkgs.emscriptenStdenv; 43 }).overrideAttrs 44 + (old: { 45 buildInputs = old.buildInputs ++ [ pkg-config ]; 46 # we need to reset this setting! 47 env = (old.env or { }) // { 48 NIX_CFLAGS_COMPILE = ""; 49 }; 50 + 51 configurePhase = '' 52 # FIXME: Some tests require writing at $HOME 53 HOME=$TMPDIR ··· 58 59 runHook postConfigure 60 ''; 61 + 62 dontStrip = true; 63 outputs = [ "out" ]; 64 + 65 buildPhase = '' 66 + runHook preBuild 67 + 68 emmake make 69 + 70 + runHook postBuild 71 ''; 72 + 73 installPhase = '' 74 + runHook preInstall 75 + 76 emmake make install 77 + 78 + runHook postInstall 79 ''; 80 + 81 checkPhase = '' 82 + runHook preCheck 83 + 84 echo "================= testing zlib using node =================" 85 86 echo "Compiling a custom test" ··· 99 echo "it seems to work! very good." 100 fi 101 echo "================= /testing zlib using node =================" 102 + 103 + runHook postCheck 104 ''; 105 106 postPatch = pkgs.lib.optionalString pkgs.stdenv.hostPlatform.isDarwin '' ··· 119 This `xmlmirror` example features an Emscripten package that is defined completely from this context and no `pkgs.zlib.override` is used. 120 121 ```nix 122 + pkgs.buildEmscriptenPackage { 123 + pname = "xmlmirror"; 124 + version = "1.2.3"; 125 126 buildInputs = [ 127 pkg-config ··· 134 openjdk 135 json_c 136 ]; 137 + 138 nativeBuildInputs = [ 139 pkg-config 140 + writableTmpDirAsHomeHook 141 zlib 142 ]; 143 ··· 148 }; 149 150 configurePhase = '' 151 + runHook preConfigure 152 + 153 rm -f fastXmlLint.js* 154 # a fix for ERROR:root:For asm.js, TOTAL_MEMORY must be a multiple of 16MB, was 234217728 155 # https://gitlab.com/odfplugfest/xmlmirror/issues/8 ··· 159 sed -e "s/\$(JSONC_LDFLAGS) \$(ZLIB_LDFLAGS) \$(LIBXML20_LDFLAGS)/\$(JSONC_LDFLAGS) \$(LIBXML20_LDFLAGS) \$(ZLIB_LDFLAGS) /g" -i Makefile.emEnv 160 # https://gitlab.com/odfplugfest/xmlmirror/issues/11 161 sed -e "s/-o fastXmlLint.js/-s EXTRA_EXPORTED_RUNTIME_METHODS='[\"ccall\", \"cwrap\"]' -o fastXmlLint.js/g" -i Makefile.emEnv 162 + 163 + runHook postConfigure 164 ''; 165 166 buildPhase = '' 167 + runHook preBuild 168 + 169 make -f Makefile.emEnv 170 + 171 + runHook postBuild 172 ''; 173 174 outputs = [ ··· 177 ]; 178 179 installPhase = '' 180 + runHook preInstall 181 + 182 mkdir -p $out/share 183 mkdir -p $doc/share/${name} 184 ··· 192 cp *.json $out/share 193 cp *.rng $out/share 194 cp README.md $doc/share/${name} 195 + runHook postInstall 196 ''; 197 + 198 checkPhase = '' 199 + runHook preCheck 200 201 + runHook postCheck 202 ''; 203 } 204 ```
+3 -3
doc/languages-frameworks/go.section.md
··· 13 14 ```nix 15 { 16 - pet = buildGoModule rec { 17 pname = "pet"; 18 version = "0.3.4"; 19 20 src = fetchFromGitHub { 21 owner = "knqyf263"; 22 repo = "pet"; 23 - rev = "v${version}"; 24 hash = "sha256-Gjw1dRrgM8D3G7v6WIM2+50r4HmTXvx0Xxme2fH9TlQ="; 25 }; 26 ··· 32 license = lib.licenses.mit; 33 maintainers = with lib.maintainers; [ kalbasit ]; 34 }; 35 - }; 36 } 37 ``` 38
··· 13 14 ```nix 15 { 16 + pet = buildGoModule (finalAttrs: { 17 pname = "pet"; 18 version = "0.3.4"; 19 20 src = fetchFromGitHub { 21 owner = "knqyf263"; 22 repo = "pet"; 23 + tag = "v${finalAttrs.version}"; 24 hash = "sha256-Gjw1dRrgM8D3G7v6WIM2+50r4HmTXvx0Xxme2fH9TlQ="; 25 }; 26 ··· 32 license = lib.licenses.mit; 33 maintainers = with lib.maintainers; [ kalbasit ]; 34 }; 35 + }); 36 } 37 ``` 38
+12 -5
doc/languages-frameworks/javascript.section.md
··· 198 fetchFromGitHub, 199 }: 200 201 - buildNpmPackage rec { 202 pname = "flood"; 203 version = "4.7.0"; 204 205 src = fetchFromGitHub { 206 owner = "jesec"; 207 - repo = pname; 208 - rev = "v${version}"; 209 hash = "sha256-BR+ZGkBBfd0dSQqAvujsbgsEPFYw/ThrylxUbOksYxM="; 210 }; 211 ··· 222 license = lib.licenses.gpl3Only; 223 maintainers = with lib.maintainers; [ winter ]; 224 }; 225 - } 226 ``` 227 228 In the default `installPhase` set by `buildNpmPackage`, it uses `npm pack --json --dry-run` to decide what files to install in `$out/lib/node_modules/$name/`, where `$name` is the `name` string defined in the package's `package.json`. ··· 646 647 ```nix 648 { 649 buildPhase = '' 650 - export HOME=$(mktemp -d) 651 yarn --offline build 652 ''; 653 } 654 ```
··· 198 fetchFromGitHub, 199 }: 200 201 + buildNpmPackage (finalAttrs: { 202 pname = "flood"; 203 version = "4.7.0"; 204 205 src = fetchFromGitHub { 206 owner = "jesec"; 207 + repo = "flood"; 208 + tag = "v${finalAttrs.version}"; 209 hash = "sha256-BR+ZGkBBfd0dSQqAvujsbgsEPFYw/ThrylxUbOksYxM="; 210 }; 211 ··· 222 license = lib.licenses.gpl3Only; 223 maintainers = with lib.maintainers; [ winter ]; 224 }; 225 + }) 226 ``` 227 228 In the default `installPhase` set by `buildNpmPackage`, it uses `npm pack --json --dry-run` to decide what files to install in `$out/lib/node_modules/$name/`, where `$name` is the `name` string defined in the package's `package.json`. ··· 646 647 ```nix 648 { 649 + nativeBuildInputs = [ 650 + writableTmpDirAsHomeHook 651 + ]; 652 + 653 buildPhase = '' 654 + runHook preBuild 655 + 656 yarn --offline build 657 + 658 + runHook postBuild 659 ''; 660 } 661 ```
+2 -3
doc/languages-frameworks/lisp.section.md
··· 135 - names starting with a number have a `_` prepended (`3d-vectors`->`_3d-vectors`) 136 - `_` in names is converted to `__` for reversibility 137 138 - 139 ## Defining packages manually inside Nixpkgs {#lisp-defining-packages-inside} 140 141 Packages that for some reason are not in Quicklisp, and so cannot be ··· 185 domain = "gitlab.common-lisp.net"; 186 owner = "alexandria"; 187 repo = "alexandria"; 188 - rev = "v${version}"; 189 hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ="; 190 }; 191 }; ··· 212 domain = "gitlab.common-lisp.net"; 213 owner = "alexandria"; 214 repo = "alexandria"; 215 - rev = "v${version}"; 216 hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ="; 217 }; 218 })
··· 135 - names starting with a number have a `_` prepended (`3d-vectors`->`_3d-vectors`) 136 - `_` in names is converted to `__` for reversibility 137 138 ## Defining packages manually inside Nixpkgs {#lisp-defining-packages-inside} 139 140 Packages that for some reason are not in Quicklisp, and so cannot be ··· 184 domain = "gitlab.common-lisp.net"; 185 owner = "alexandria"; 186 repo = "alexandria"; 187 + tag = "v${version}"; 188 hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ="; 189 }; 190 }; ··· 211 domain = "gitlab.common-lisp.net"; 212 owner = "alexandria"; 213 repo = "alexandria"; 214 + tag = "v${version}"; 215 hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ="; 216 }; 217 })
+53 -17
doc/languages-frameworks/maven.section.md
··· 23 24 src = fetchFromGitHub { 25 owner = "intoolswetrust"; 26 - repo = pname; 27 - rev = "${pname}-${version}"; 28 hash = "sha256-rRttA5H0A0c44loBzbKH7Waoted3IsOgxGCD2VM0U/Q="; 29 }; 30 ··· 33 nativeBuildInputs = [ makeWrapper ]; 34 35 installPhase = '' 36 mkdir -p $out/bin $out/share/jd-cli 37 install -Dm644 jd-cli/target/jd-cli.jar $out/share/jd-cli 38 39 makeWrapper ${jre}/bin/java $out/bin/jd-cli \ 40 --add-flags "-jar $out/share/jd-cli/jd-cli.jar" 41 ''; 42 43 meta = { ··· 301 buildInputs = [ maven ]; 302 src = ./.; # or fetchFromGitHub, cleanSourceWith, etc 303 buildPhase = '' 304 mvn package -Dmaven.repo.local=$out 305 ''; 306 307 # keep only *.{pom,jar,sha1,nbm} and delete all ephemeral files with lastModified timestamps inside 308 installPhase = '' 309 find $out -type f \ 310 -name \*.lastUpdated -or \ 311 -name resolver-status.properties -or \ 312 -name _remote.repositories \ 313 -delete 314 ''; 315 316 # don't do any fixup ··· 354 maven, 355 callPackage, 356 }: 357 - # pick a repository derivation, here we will use buildMaven 358 let 359 repository = callPackage ./build-maven-repository.nix { }; 360 in 361 - stdenv.mkDerivation rec { 362 pname = "maven-demo"; 363 version = "1.0"; 364 ··· 366 buildInputs = [ maven ]; 367 368 buildPhase = '' 369 echo "Using repository ${repository}" 370 mvn --offline -Dmaven.repo.local=${repository} package; 371 ''; 372 373 installPhase = '' 374 - install -Dm644 target/${pname}-${version}.jar $out/share/java 375 ''; 376 - } 377 ``` 378 379 ::: {.tip} ··· 421 let 422 repository = callPackage ./build-maven-repository.nix { }; 423 in 424 - stdenv.mkDerivation rec { 425 pname = "maven-demo"; 426 version = "1.0"; 427 ··· 430 buildInputs = [ maven ]; 431 432 buildPhase = '' 433 echo "Using repository ${repository}" 434 mvn --offline -Dmaven.repo.local=${repository} package; 435 ''; 436 437 installPhase = '' 438 mkdir -p $out/bin 439 440 classpath=$(find ${repository} -name "*.jar" -printf ':%h/%f'); 441 - install -Dm644 target/${pname}-${version}.jar $out/share/java 442 # create a wrapper that will automatically set the classpath 443 # this should be the paths from the dependency derivation 444 - makeWrapper ${jre}/bin/java $out/bin/${pname} \ 445 - --add-flags "-classpath $out/share/java/${pname}-${version}.jar:''${classpath#:}" \ 446 --add-flags "Main" 447 ''; 448 - } 449 ``` 450 451 #### MANIFEST file via Maven Plugin {#manifest-file-via-maven-plugin} ··· 502 makeWrapper, 503 jre, 504 }: 505 - # pick a repository derivation, here we will use buildMaven 506 let 507 repository = callPackage ./build-maven-repository.nix { }; 508 in 509 - stdenv.mkDerivation rec { 510 pname = "maven-demo"; 511 version = "1.0"; 512 ··· 515 buildInputs = [ maven ]; 516 517 buildPhase = '' 518 echo "Using repository ${repository}" 519 mvn --offline -Dmaven.repo.local=${repository} package; 520 ''; 521 522 installPhase = '' 523 mkdir -p $out/bin 524 525 # create a symbolic link for the repository directory 526 ln -s ${repository} $out/repository 527 528 - install -Dm644 target/${pname}-${version}.jar $out/share/java 529 # create a wrapper that will automatically set the classpath 530 # this should be the paths from the dependency derivation 531 - makeWrapper ${jre}/bin/java $out/bin/${pname} \ 532 - --add-flags "-jar $out/share/java/${pname}-${version}.jar" 533 ''; 534 - } 535 ``` 536 ::: {.note} 537 Our script produces a dependency on `jre` rather than `jdk` to restrict the runtime closure necessary to run the application.
··· 23 24 src = fetchFromGitHub { 25 owner = "intoolswetrust"; 26 + repo = "jd-cli"; 27 + tag = "jd-cli-${version}"; 28 hash = "sha256-rRttA5H0A0c44loBzbKH7Waoted3IsOgxGCD2VM0U/Q="; 29 }; 30 ··· 33 nativeBuildInputs = [ makeWrapper ]; 34 35 installPhase = '' 36 + runHook preInstall 37 + 38 mkdir -p $out/bin $out/share/jd-cli 39 install -Dm644 jd-cli/target/jd-cli.jar $out/share/jd-cli 40 41 makeWrapper ${jre}/bin/java $out/bin/jd-cli \ 42 --add-flags "-jar $out/share/jd-cli/jd-cli.jar" 43 + 44 + runHook postInstall 45 ''; 46 47 meta = { ··· 305 buildInputs = [ maven ]; 306 src = ./.; # or fetchFromGitHub, cleanSourceWith, etc 307 buildPhase = '' 308 + runHook preBuild 309 + 310 mvn package -Dmaven.repo.local=$out 311 + 312 + runHook postBuild 313 ''; 314 315 # keep only *.{pom,jar,sha1,nbm} and delete all ephemeral files with lastModified timestamps inside 316 installPhase = '' 317 + runHook preInstall 318 + 319 find $out -type f \ 320 -name \*.lastUpdated -or \ 321 -name resolver-status.properties -or \ 322 -name _remote.repositories \ 323 -delete 324 + 325 + runHook postInstall 326 ''; 327 328 # don't do any fixup ··· 366 maven, 367 callPackage, 368 }: 369 let 370 + # pick a repository derivation, here we will use buildMaven 371 repository = callPackage ./build-maven-repository.nix { }; 372 in 373 + stdenv.mkDerivation (finalAttrs: { 374 pname = "maven-demo"; 375 version = "1.0"; 376 ··· 378 buildInputs = [ maven ]; 379 380 buildPhase = '' 381 + runHook preBuild 382 + 383 echo "Using repository ${repository}" 384 mvn --offline -Dmaven.repo.local=${repository} package; 385 + 386 + runHook postBuild 387 ''; 388 389 installPhase = '' 390 + runHook preInstall 391 + 392 + install -Dm644 target/${finalAttrs.pname}-${finalAttrs.version}.jar $out/share/java 393 + 394 + runHook postInstall 395 ''; 396 + }) 397 ``` 398 399 ::: {.tip} ··· 441 let 442 repository = callPackage ./build-maven-repository.nix { }; 443 in 444 + stdenv.mkDerivation (finalAttrs: { 445 pname = "maven-demo"; 446 version = "1.0"; 447 ··· 450 buildInputs = [ maven ]; 451 452 buildPhase = '' 453 + runHook preBuild 454 + 455 echo "Using repository ${repository}" 456 mvn --offline -Dmaven.repo.local=${repository} package; 457 + 458 + runHook postBuild 459 ''; 460 461 installPhase = '' 462 + runHook preInstall 463 + 464 mkdir -p $out/bin 465 466 classpath=$(find ${repository} -name "*.jar" -printf ':%h/%f'); 467 + install -Dm644 target/maven-demo-${finalAttrs.version}.jar $out/share/java 468 # create a wrapper that will automatically set the classpath 469 # this should be the paths from the dependency derivation 470 + makeWrapper ${jre}/bin/java $out/bin/maven-demo \ 471 + --add-flags "-classpath $out/share/java/maven-demo-${finalAttrs.version}.jar:''${classpath#:}" \ 472 --add-flags "Main" 473 + 474 + runHook postInstall 475 ''; 476 + }) 477 ``` 478 479 #### MANIFEST file via Maven Plugin {#manifest-file-via-maven-plugin} ··· 530 makeWrapper, 531 jre, 532 }: 533 let 534 + # pick a repository derivation, here we will use buildMaven 535 repository = callPackage ./build-maven-repository.nix { }; 536 in 537 + stdenv.mkDerivation (finalAttrs: { 538 pname = "maven-demo"; 539 version = "1.0"; 540 ··· 543 buildInputs = [ maven ]; 544 545 buildPhase = '' 546 + runHook preBuild 547 + 548 echo "Using repository ${repository}" 549 mvn --offline -Dmaven.repo.local=${repository} package; 550 + 551 + runHook postBuild 552 ''; 553 554 installPhase = '' 555 + runHook preInstall 556 + 557 mkdir -p $out/bin 558 559 # create a symbolic link for the repository directory 560 ln -s ${repository} $out/repository 561 562 + install -Dm644 target/maven-demo-${finalAttrs.version}.jar $out/share/java 563 # create a wrapper that will automatically set the classpath 564 # this should be the paths from the dependency derivation 565 + makeWrapper ${jre}/bin/java $out/bin/maven-demo \ 566 + --add-flags "-jar $out/share/java/maven-demo-${finalAttrs.version}.jar" 567 + 568 + runHook postInstall 569 ''; 570 + }) 571 ``` 572 ::: {.note} 573 Our script produces a dependency on `jre` rather than `jdk` to restrict the runtime closure necessary to run the application.
+3 -3
doc/languages-frameworks/ocaml.section.md
··· 83 84 src = fetchFromGitHub { 85 owner = "inhabitedtype"; 86 - repo = pname; 87 - rev = version; 88 hash = "sha256-MK8o+iPGANEhrrTc1Kz9LBilx2bDPQt7Pp5P2libucI="; 89 }; 90 ··· 124 minimalOCamlVersion = "4.02"; 125 126 src = fetchurl { 127 - url = "https://github.com/flowtype/ocaml-${pname}/releases/download/v${version}/${pname}-v${version}.tbz"; 128 hash = "sha256-d5/3KUBAWRj8tntr4RkJ74KWW7wvn/B/m1nx0npnzyc="; 129 }; 130
··· 83 84 src = fetchFromGitHub { 85 owner = "inhabitedtype"; 86 + repo = "angstrom"; 87 + tag = version; 88 hash = "sha256-MK8o+iPGANEhrrTc1Kz9LBilx2bDPQt7Pp5P2libucI="; 89 }; 90 ··· 124 minimalOCamlVersion = "4.02"; 125 126 src = fetchurl { 127 + url = "https://github.com/flowtype/ocaml-wtf8/releases/download/v${version}/wtf8-v${version}.tbz"; 128 hash = "sha256-d5/3KUBAWRj8tntr4RkJ74KWW7wvn/B/m1nx0npnzyc="; 129 }; 130
+4 -4
doc/languages-frameworks/perl.section.md
··· 39 pname = "Class-C3"; 40 version = "0.21"; 41 src = fetchurl { 42 - url = "mirror://cpan/authors/id/F/FL/FLORA/${pname}-${version}.tar.gz"; 43 hash = "sha256-/5GE5xHT0uYGOQxroqj6LMU7CtKn2s6vMVoSXxL4iK4="; 44 }; 45 }; ··· 88 version = "0.36"; 89 90 src = fetchurl { 91 - url = "mirror://cpan/authors/id/P/PM/PMQS/${pname}-${version}.tar.gz"; 92 hash = "sha256-4Y+HGgGQqcOfdiKcFIyMrWBEccVNVAMDBWZlFTMorh8="; 93 }; 94 ··· 107 pname = "Class-C3-Componentised"; 108 version = "1.0004"; 109 src = fetchurl { 110 - url = "mirror://cpan/authors/id/A/AS/ASH/${pname}-${version}.tar.gz"; 111 hash = "sha256-ASO9rV/FzJYZ0BH572Fxm2ZrFLMZLFATJng1NuU4FHc="; 112 }; 113 propagatedBuildInputs = [ ··· 137 version = "12.50"; 138 139 src = fetchurl { 140 - url = "https://exiftool.org/${pname}-${version}.tar.gz"; 141 hash = "sha256-vOhB/FwQMC8PPvdnjDvxRpU6jAZcC6GMQfc0AH4uwKg="; 142 }; 143
··· 39 pname = "Class-C3"; 40 version = "0.21"; 41 src = fetchurl { 42 + url = "mirror://cpan/authors/id/F/FL/FLORA/Class-C3-${version}.tar.gz"; 43 hash = "sha256-/5GE5xHT0uYGOQxroqj6LMU7CtKn2s6vMVoSXxL4iK4="; 44 }; 45 }; ··· 88 version = "0.36"; 89 90 src = fetchurl { 91 + url = "mirror://cpan/authors/id/P/PM/PMQS/BerkeleyDB-${version}.tar.gz"; 92 hash = "sha256-4Y+HGgGQqcOfdiKcFIyMrWBEccVNVAMDBWZlFTMorh8="; 93 }; 94 ··· 107 pname = "Class-C3-Componentised"; 108 version = "1.0004"; 109 src = fetchurl { 110 + url = "mirror://cpan/authors/id/A/AS/ASH/Class-C3-Componentised-${version}.tar.gz"; 111 hash = "sha256-ASO9rV/FzJYZ0BH572Fxm2ZrFLMZLFATJng1NuU4FHc="; 112 }; 113 propagatedBuildInputs = [ ··· 137 version = "12.50"; 138 139 src = fetchurl { 140 + url = "https://exiftool.org/Image-ExifTool-${version}.tar.gz"; 141 hash = "sha256-vOhB/FwQMC8PPvdnjDvxRpU6jAZcC6GMQfc0AH4uwKg="; 142 }; 143
+3 -2
doc/languages-frameworks/python.section.md
··· 2135 2136 #### Common issues {#common-issues} 2137 2138 - * Tests that attempt to access `$HOME` can be fixed by using the following 2139 - work-around before running tests (e.g. `preCheck`): `export HOME=$(mktemp -d)` 2140 * Compiling with Cython causes tests to fail with a `ModuleNotLoadedError`. 2141 This can be fixed with two changes in the derivation: 1) replacing `pytest` with 2142 `pytestCheckHook` and 2) adding a `preCheck` containing `cd $out` to run
··· 2135 2136 #### Common issues {#common-issues} 2137 2138 + * Tests that attempt to access `$HOME` can be fixed by using `writableTmpDirAsHomeHook` in 2139 + `nativeCheckInputs`, which sets up a writable temporary directory as the home directory. Alternatively, 2140 + you can achieve the same effect manually (e.g. in `preCheck`) with: `export HOME=$(mktemp -d)`. 2141 * Compiling with Cython causes tests to fail with a `ModuleNotLoadedError`. 2142 This can be fixed with two changes in the derivation: 1) replacing `pytest` with 2143 `pytestCheckHook` and 2) adding a `preCheck` containing `cd $out` to run
+23 -22
doc/languages-frameworks/rust.section.md
··· 28 rustPlatform, 29 }: 30 31 - rustPlatform.buildRustPackage rec { 32 pname = "ripgrep"; 33 version = "14.1.1"; 34 35 src = fetchFromGitHub { 36 owner = "BurntSushi"; 37 - repo = pname; 38 - rev = version; 39 hash = "sha256-gyWnahj1A+iXUQlQ1O1H1u7K5euYQOld9qWm99Vjaeg="; 40 }; 41 ··· 48 license = lib.licenses.unlicense; 49 maintainers = [ ]; 50 }; 51 - } 52 ``` 53 54 `buildRustPackage` requires a `cargoHash` attribute, computed over all crate sources of this package. ··· 104 `pname`: 105 106 ```nix 107 - rustPlatform.buildRustPackage rec { 108 pname = "broot"; 109 version = "1.2.0"; 110 111 src = fetchCrate { 112 - inherit pname version; 113 hash = "sha256-aDQA4A5mScX9or3Lyiv/5GyAehidnpKKE0grhbP1Ctc="; 114 }; 115 116 useFetchCargoVendor = true; 117 cargoHash = "sha256-iDYh52rj1M5Uupvbx2WeDd/jvQZ+2A50V5rp5e2t7q4="; 118 - cargoDepsName = pname; 119 120 # ... 121 - } 122 ``` 123 124 ### Importing a `Cargo.lock` file {#importing-a-cargo.lock-file} ··· 184 specified in the `outputHashes` attribute. For example: 185 186 ```nix 187 - rustPlatform.buildRustPackage rec { 188 pname = "myproject"; 189 version = "1.0.0"; 190 ··· 209 avoid having to specify `outputHashes`. For example: 210 211 ```nix 212 - rustPlatform.buildRustPackage rec { 213 pname = "myproject"; 214 version = "1.0.0"; 215 ··· 235 For example: 236 237 ```nix 238 - rustPlatform.buildRustPackage rec { 239 pname = "myproject"; 240 version = "1.0.0"; 241 ··· 427 the `cargoPatches` attribute to update or add it. 428 429 ```nix 430 - rustPlatform.buildRustPackage rec { 431 # ... 432 cargoPatches = [ 433 # a patch file to add/update Cargo.lock in the source code ··· 575 576 src = fetchFromGitHub { 577 owner = "huggingface"; 578 - repo = pname; 579 - rev = "python-v${version}"; 580 hash = "sha256-rQ2hRV52naEf6PvRsWVCTN7B1oXAQGmnpJw4iIdhamw="; 581 }; 582 ··· 665 src = fetchFromGitHub { 666 owner = "Qiskit"; 667 repo = "retworkx"; 668 - rev = version; 669 hash = "sha256-11n30ldg3y3y6qxg3hbj837pnbwjkqw3nxq6frds647mmmprrd20="; 670 }; 671 ··· 705 tinysparql, 706 }: 707 708 - stdenv.mkDerivation rec { 709 pname = "health"; 710 version = "0.95.0"; 711 ··· 713 domain = "gitlab.gnome.org"; 714 owner = "World"; 715 repo = "health"; 716 - rev = version; 717 hash = "sha256-PrNPprSS98yN8b8yw2G6hzTSaoE65VbsM3q7FVB4mds="; 718 }; 719 720 cargoDeps = rustPlatform.fetchCargoVendor { 721 - inherit pname version src; 722 hash = "sha256-eR1ZGtTZQNhofFUEjI7IX16sMKPJmAl7aIFfPJukecg="; 723 }; 724 ··· 740 ]; 741 742 # ... 743 - } 744 ``` 745 746 ## `buildRustCrate`: Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo} ··· 1000 }; 1001 in 1002 1003 - rustPlatform.buildRustPackage rec { 1004 pname = "ripgrep"; 1005 version = "14.1.1"; 1006 1007 src = fetchFromGitHub { 1008 owner = "BurntSushi"; 1009 repo = "ripgrep"; 1010 - rev = version; 1011 hash = "sha256-gyWnahj1A+iXUQlQ1O1H1u7K5euYQOld9qWm99Vjaeg="; 1012 }; 1013 1014 useFetchCargoVendor = true; 1015 cargoHash = "sha256-9atn5qyBDy4P6iUoHFhg+TV6Ur71fiah4oTJbBMeEy4="; 1016 1017 doCheck = false; 1018 1019 meta = { ··· 1025 ]; 1026 maintainers = with lib.maintainers; [ ]; 1027 }; 1028 - } 1029 ``` 1030 1031 Follow the below steps to try that snippet.
··· 28 rustPlatform, 29 }: 30 31 + rustPlatform.buildRustPackage (finalAttrs: { 32 pname = "ripgrep"; 33 version = "14.1.1"; 34 35 src = fetchFromGitHub { 36 owner = "BurntSushi"; 37 + repo = "ripgrep"; 38 + tag = finalAttrs.version; 39 hash = "sha256-gyWnahj1A+iXUQlQ1O1H1u7K5euYQOld9qWm99Vjaeg="; 40 }; 41 ··· 48 license = lib.licenses.unlicense; 49 maintainers = [ ]; 50 }; 51 + }) 52 ``` 53 54 `buildRustPackage` requires a `cargoHash` attribute, computed over all crate sources of this package. ··· 104 `pname`: 105 106 ```nix 107 + rustPlatform.buildRustPackage (finalAttrs: { 108 pname = "broot"; 109 version = "1.2.0"; 110 111 src = fetchCrate { 112 + inherit (finalAttrs) pname version; 113 hash = "sha256-aDQA4A5mScX9or3Lyiv/5GyAehidnpKKE0grhbP1Ctc="; 114 }; 115 116 useFetchCargoVendor = true; 117 cargoHash = "sha256-iDYh52rj1M5Uupvbx2WeDd/jvQZ+2A50V5rp5e2t7q4="; 118 + cargoDepsName = finalAttrs.pname; 119 120 # ... 121 + }) 122 ``` 123 124 ### Importing a `Cargo.lock` file {#importing-a-cargo.lock-file} ··· 184 specified in the `outputHashes` attribute. For example: 185 186 ```nix 187 + rustPlatform.buildRustPackage { 188 pname = "myproject"; 189 version = "1.0.0"; 190 ··· 209 avoid having to specify `outputHashes`. For example: 210 211 ```nix 212 + rustPlatform.buildRustPackage { 213 pname = "myproject"; 214 version = "1.0.0"; 215 ··· 235 For example: 236 237 ```nix 238 + rustPlatform.buildRustPackage { 239 pname = "myproject"; 240 version = "1.0.0"; 241 ··· 427 the `cargoPatches` attribute to update or add it. 428 429 ```nix 430 + rustPlatform.buildRustPackage { 431 # ... 432 cargoPatches = [ 433 # a patch file to add/update Cargo.lock in the source code ··· 575 576 src = fetchFromGitHub { 577 owner = "huggingface"; 578 + repo = "tokenizers"; 579 + tag = "python-v${version}"; 580 hash = "sha256-rQ2hRV52naEf6PvRsWVCTN7B1oXAQGmnpJw4iIdhamw="; 581 }; 582 ··· 665 src = fetchFromGitHub { 666 owner = "Qiskit"; 667 repo = "retworkx"; 668 + tag = version; 669 hash = "sha256-11n30ldg3y3y6qxg3hbj837pnbwjkqw3nxq6frds647mmmprrd20="; 670 }; 671 ··· 705 tinysparql, 706 }: 707 708 + stdenv.mkDerivation (finalAttrs: { 709 pname = "health"; 710 version = "0.95.0"; 711 ··· 713 domain = "gitlab.gnome.org"; 714 owner = "World"; 715 repo = "health"; 716 + tag = finalAttrs.version; 717 hash = "sha256-PrNPprSS98yN8b8yw2G6hzTSaoE65VbsM3q7FVB4mds="; 718 }; 719 720 cargoDeps = rustPlatform.fetchCargoVendor { 721 + inherit (finalAttrs) pname version src; 722 hash = "sha256-eR1ZGtTZQNhofFUEjI7IX16sMKPJmAl7aIFfPJukecg="; 723 }; 724 ··· 740 ]; 741 742 # ... 743 + }) 744 ``` 745 746 ## `buildRustCrate`: Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo} ··· 1000 }; 1001 in 1002 1003 + rustPlatform.buildRustPackage (finalAttrs: { 1004 pname = "ripgrep"; 1005 version = "14.1.1"; 1006 1007 src = fetchFromGitHub { 1008 owner = "BurntSushi"; 1009 repo = "ripgrep"; 1010 + tag = finalAttrs.version; 1011 hash = "sha256-gyWnahj1A+iXUQlQ1O1H1u7K5euYQOld9qWm99Vjaeg="; 1012 }; 1013 1014 useFetchCargoVendor = true; 1015 cargoHash = "sha256-9atn5qyBDy4P6iUoHFhg+TV6Ur71fiah4oTJbBMeEy4="; 1016 1017 + # Tests require network access. Skipping. 1018 doCheck = false; 1019 1020 meta = { ··· 1026 ]; 1027 maintainers = with lib.maintainers; [ ]; 1028 }; 1029 + }) 1030 ``` 1031 1032 Follow the below steps to try that snippet.
+9 -5
doc/languages-frameworks/swift.section.md
··· 82 generated = swiftpm2nix.helpers ./nix; 83 in 84 85 - stdenv.mkDerivation rec { 86 pname = "myproject"; 87 version = "0.0.0"; 88 89 src = fetchFromGitHub { 90 owner = "nixos"; 91 - repo = pname; 92 - rev = version; 93 - hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; 94 }; 95 96 # Including SwiftPM as a nativeBuildInput provides a buildPhase for you. ··· 106 configurePhase = generated.configure; 107 108 installPhase = '' 109 # This is a special function that invokes swiftpm to find the location 110 # of the binaries it produced. 111 binPath="$(swiftpmBinPath)" 112 # Now perform any installation steps. 113 mkdir -p $out/bin 114 cp $binPath/myproject $out/bin/ 115 ''; 116 - } 117 ``` 118 119 ### Custom build flags {#ssec-swiftpm-custom-build-flags}
··· 82 generated = swiftpm2nix.helpers ./nix; 83 in 84 85 + stdenv.mkDerivation (finalAttrs: { 86 pname = "myproject"; 87 version = "0.0.0"; 88 89 src = fetchFromGitHub { 90 owner = "nixos"; 91 + repo = "myproject"; 92 + tag = finalAttrs.version; 93 + hash = ""; 94 }; 95 96 # Including SwiftPM as a nativeBuildInput provides a buildPhase for you. ··· 106 configurePhase = generated.configure; 107 108 installPhase = '' 109 + runHook preInstall 110 + 111 # This is a special function that invokes swiftpm to find the location 112 # of the binaries it produced. 113 binPath="$(swiftpmBinPath)" 114 # Now perform any installation steps. 115 mkdir -p $out/bin 116 cp $binPath/myproject $out/bin/ 117 + 118 + runHook postInstall 119 ''; 120 + }) 121 ``` 122 123 ### Custom build flags {#ssec-swiftpm-custom-build-flags}
+4 -4
doc/languages-frameworks/texlive.section.md
··· 94 - TeX Live packages are also available under `texlive.pkgs` as derivations with outputs `out`, `tex`, `texdoc`, `texsource`, `tlpkg`, `man`, `info`. They cannot be installed outside of `texlive.combine` but are available for other uses. To repackage a font, for instance, use 95 96 ```nix 97 - stdenvNoCC.mkDerivation rec { 98 src = texlive.pkgs.iwona; 99 dontUnpack = true; 100 101 - inherit (src) pname version; 102 103 installPhase = '' 104 runHook preInstall 105 install -Dm644 $src/fonts/opentype/nowacki/iwona/*.otf -t $out/share/fonts/opentype 106 runHook postInstall 107 ''; 108 - } 109 ``` 110 111 See `biber`, `iwona` for complete examples. ··· 173 (writeShellScript "force-tex-output.sh" '' 174 out="''${tex-}" 175 '') 176 ]; 177 178 dontConfigure = true; ··· 184 latex foiltex.ins 185 186 # Generate the documentation 187 - export HOME=. 188 latexmk -pdf foiltex.dtx 189 190 runHook postBuild
··· 94 - TeX Live packages are also available under `texlive.pkgs` as derivations with outputs `out`, `tex`, `texdoc`, `texsource`, `tlpkg`, `man`, `info`. They cannot be installed outside of `texlive.combine` but are available for other uses. To repackage a font, for instance, use 95 96 ```nix 97 + stdenvNoCC.mkDerivation (finalAttrs: { 98 src = texlive.pkgs.iwona; 99 dontUnpack = true; 100 101 + inherit (finalAttrs.src) pname version; 102 103 installPhase = '' 104 runHook preInstall 105 install -Dm644 $src/fonts/opentype/nowacki/iwona/*.otf -t $out/share/fonts/opentype 106 runHook postInstall 107 ''; 108 + }) 109 ``` 110 111 See `biber`, `iwona` for complete examples. ··· 173 (writeShellScript "force-tex-output.sh" '' 174 out="''${tex-}" 175 '') 176 + writableTmpDirAsHomeHook # Need a writable $HOME for latexmk 177 ]; 178 179 dontConfigure = true; ··· 185 latex foiltex.ins 186 187 # Generate the documentation 188 latexmk -pdf foiltex.dtx 189 190 runHook postBuild
+65 -66
doc/packages/emacs.section.md
··· 28 29 ```nix 30 { 31 - packageOverrides = 32 - pkgs: with pkgs; rec { 33 - myEmacsConfig = writeText "default.el" '' 34 - (eval-when-compile 35 - (require 'use-package)) 36 37 - ;; load some packages 38 39 - (use-package company 40 - :bind ("<C-tab>" . company-complete) 41 - :diminish company-mode 42 - :commands (company-mode global-company-mode) 43 - :defer 1 44 - :config 45 - (global-company-mode)) 46 47 - (use-package counsel 48 - :commands (counsel-descbinds) 49 - :bind (([remap execute-extended-command] . counsel-M-x) 50 - ("C-x C-f" . counsel-find-file) 51 - ("C-c g" . counsel-git) 52 - ("C-c j" . counsel-git-grep) 53 - ("C-c k" . counsel-ag) 54 - ("C-x l" . counsel-locate) 55 - ("M-y" . counsel-yank-pop))) 56 57 - (use-package flycheck 58 - :defer 2 59 - :config (global-flycheck-mode)) 60 61 - (use-package ivy 62 - :defer 1 63 - :bind (("C-c C-r" . ivy-resume) 64 - ("C-x C-b" . ivy-switch-buffer) 65 - :map ivy-minibuffer-map 66 - ("C-j" . ivy-call)) 67 - :diminish ivy-mode 68 - :commands ivy-mode 69 - :config 70 - (ivy-mode 1)) 71 72 - (use-package magit 73 - :defer 74 - :if (executable-find "git") 75 - :bind (("C-x g" . magit-status) 76 - ("C-x G" . magit-dispatch-popup)) 77 - :init 78 - (setq magit-completing-read-function 'ivy-completing-read)) 79 80 - (use-package projectile 81 - :commands projectile-mode 82 - :bind-keymap ("C-c p" . projectile-command-map) 83 - :defer 5 84 - :config 85 - (projectile-global-mode)) 86 - ''; 87 88 - myEmacs = emacs.pkgs.withPackages ( 89 - epkgs: 90 - (with epkgs.melpaStablePackages; [ 91 - (runCommand "default.el" { } '' 92 - mkdir -p $out/share/emacs/site-lisp 93 - cp ${myEmacsConfig} $out/share/emacs/site-lisp/default.el 94 - '') 95 - company 96 - counsel 97 - flycheck 98 - ivy 99 - magit 100 - projectile 101 - use-package 102 - ]) 103 - ); 104 - }; 105 } 106 ``` 107
··· 28 29 ```nix 30 { 31 + packageOverrides = pkgs: { 32 + myEmacsConfig = pkgs.writeText "default.el" '' 33 + (eval-when-compile 34 + (require 'use-package)) 35 36 + ;; load some packages 37 38 + (use-package company 39 + :bind ("<C-tab>" . company-complete) 40 + :diminish company-mode 41 + :commands (company-mode global-company-mode) 42 + :defer 1 43 + :config 44 + (global-company-mode)) 45 46 + (use-package counsel 47 + :commands (counsel-descbinds) 48 + :bind (([remap execute-extended-command] . counsel-M-x) 49 + ("C-x C-f" . counsel-find-file) 50 + ("C-c g" . counsel-git) 51 + ("C-c j" . counsel-git-grep) 52 + ("C-c k" . counsel-ag) 53 + ("C-x l" . counsel-locate) 54 + ("M-y" . counsel-yank-pop))) 55 56 + (use-package flycheck 57 + :defer 2 58 + :config (global-flycheck-mode)) 59 60 + (use-package ivy 61 + :defer 1 62 + :bind (("C-c C-r" . ivy-resume) 63 + ("C-x C-b" . ivy-switch-buffer) 64 + :map ivy-minibuffer-map 65 + ("C-j" . ivy-call)) 66 + :diminish ivy-mode 67 + :commands ivy-mode 68 + :config 69 + (ivy-mode 1)) 70 71 + (use-package magit 72 + :defer 73 + :if (executable-find "git") 74 + :bind (("C-x g" . magit-status) 75 + ("C-x G" . magit-dispatch-popup)) 76 + :init 77 + (setq magit-completing-read-function 'ivy-completing-read)) 78 79 + (use-package projectile 80 + :commands projectile-mode 81 + :bind-keymap ("C-c p" . projectile-command-map) 82 + :defer 5 83 + :config 84 + (projectile-global-mode)) 85 + ''; 86 87 + myEmacs = emacs.pkgs.withPackages ( 88 + epkgs: 89 + (with epkgs.melpaStablePackages; [ 90 + (runCommand "default.el" { } '' 91 + mkdir -p $out/share/emacs/site-lisp 92 + cp ${myEmacsConfig} $out/share/emacs/site-lisp/default.el 93 + '') 94 + company 95 + counsel 96 + flycheck 97 + ivy 98 + magit 99 + projectile 100 + use-package 101 + ]) 102 + ); 103 + }; 104 } 105 ``` 106
+3
doc/redirects.json
··· 431 "typst-package-scope-and-usage": [ 432 "index.html#typst-package-scope-and-usage" 433 ], 434 "variables-specifying-dependencies": [ 435 "index.html#variables-specifying-dependencies" 436 ],
··· 431 "typst-package-scope-and-usage": [ 432 "index.html#typst-package-scope-and-usage" 433 ], 434 + "var-meta-teams": [ 435 + "index.html#var-meta-teams" 436 + ], 437 "variables-specifying-dependencies": [ 438 "index.html#variables-specifying-dependencies" 439 ],
+4
doc/stdenv/meta.chapter.md
··· 91 92 A list of the maintainers of this Nix expression. Maintainers are defined in [`nixpkgs/maintainers/maintainer-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix). There is no restriction to becoming a maintainer, just add yourself to that list in a separate commit titled “maintainers: add alice” in the same pull request, and reference maintainers with `maintainers = with lib.maintainers; [ alice bob ]`. 93 94 ### `mainProgram` {#var-meta-mainProgram} 95 96 The name of the main binary for the package. This affects the binary `nix run` executes. Example: `"rg"`
··· 91 92 A list of the maintainers of this Nix expression. Maintainers are defined in [`nixpkgs/maintainers/maintainer-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix). There is no restriction to becoming a maintainer, just add yourself to that list in a separate commit titled “maintainers: add alice” in the same pull request, and reference maintainers with `maintainers = with lib.maintainers; [ alice bob ]`. 93 94 + ### `teams` {#var-meta-teams} 95 + 96 + A list of the teams of this Nix expression. Teams are defined in [`nixpkgs/maintainers/team-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/team-list.nix), and can be defined in a package with `meta.teams = with lib.teams; [ team1 team2 ]`. 97 + 98 ### `mainProgram` {#var-meta-mainProgram} 99 100 The name of the main binary for the package. This affects the binary `nix run` executes. Example: `"rg"`
+19 -7
doc/stdenv/stdenv.chapter.md
··· 20 **Since [RFC 0035](https://github.com/NixOS/rfcs/pull/35), this is preferred for packages in Nixpkgs**, as it allows us to reuse the version easily: 21 22 ```nix 23 - stdenv.mkDerivation rec { 24 pname = "libfoo"; 25 version = "1.2.3"; 26 src = fetchurl { 27 - url = "http://example.org/libfoo-source-${version}.tar.bz2"; 28 hash = "sha256-tWxU/LANbQE32my+9AXyt3nCT7NBVfJ45CX757EMT3Q="; 29 }; 30 - } 31 ``` 32 33 Many packages have dependencies that are not provided in the standard environment. It’s usually sufficient to specify those dependencies in the `buildInputs` attribute: ··· 53 stdenv.mkDerivation { 54 pname = "fnord"; 55 version = "4.5"; 56 # ... 57 buildPhase = '' 58 gcc foo.c -o foo 59 ''; 60 installPhase = '' 61 mkdir -p $out/bin 62 cp foo $out/bin 63 ''; 64 } 65 ``` ··· 212 213 Consider for example this simplified derivation for `solo5`, a sandboxing tool: 214 ```nix 215 - stdenv.mkDerivation rec { 216 pname = "solo5"; 217 version = "0.7.5"; 218 219 src = fetchurl { 220 - url = "https://github.com/Solo5/solo5/releases/download/v${version}/solo5-v${version}.tar.gz"; 221 hash = "sha256-viwrS9lnaU8sTGuzK/+L/PlMM/xRRtgVuK5pixVeDEw="; 222 }; 223 ··· 225 makeWrapper 226 pkg-config 227 ]; 228 buildInputs = [ libseccomp ]; 229 230 postInstall = '' ··· 249 util-linux 250 qemu 251 ]; 252 - checkPhase = ''[elided] ''; 253 - } 254 ``` 255 256 - `makeWrapper` is a setup hook, i.e., a shell script sourced by the generic builder of `stdenv`.
··· 20 **Since [RFC 0035](https://github.com/NixOS/rfcs/pull/35), this is preferred for packages in Nixpkgs**, as it allows us to reuse the version easily: 21 22 ```nix 23 + stdenv.mkDerivation (finalAttrs: { 24 pname = "libfoo"; 25 version = "1.2.3"; 26 src = fetchurl { 27 + url = "http://example.org/libfoo-source-${finalAttrs.version}.tar.bz2"; 28 hash = "sha256-tWxU/LANbQE32my+9AXyt3nCT7NBVfJ45CX757EMT3Q="; 29 }; 30 + }) 31 ``` 32 33 Many packages have dependencies that are not provided in the standard environment. It’s usually sufficient to specify those dependencies in the `buildInputs` attribute: ··· 53 stdenv.mkDerivation { 54 pname = "fnord"; 55 version = "4.5"; 56 + 57 # ... 58 + 59 buildPhase = '' 60 + runHook preBuild 61 + 62 gcc foo.c -o foo 63 + 64 + runHook postBuild 65 ''; 66 + 67 installPhase = '' 68 + runHook preInstall 69 + 70 mkdir -p $out/bin 71 cp foo $out/bin 72 + 73 + runHook postInstall 74 ''; 75 } 76 ``` ··· 223 224 Consider for example this simplified derivation for `solo5`, a sandboxing tool: 225 ```nix 226 + stdenv.mkDerivation (finalAttrs: { 227 pname = "solo5"; 228 version = "0.7.5"; 229 230 src = fetchurl { 231 + url = "https://github.com/Solo5/solo5/releases/download/v${finalAttrs.version}/solo5-v${finalAttrs.version}.tar.gz"; 232 hash = "sha256-viwrS9lnaU8sTGuzK/+L/PlMM/xRRtgVuK5pixVeDEw="; 233 }; 234 ··· 236 makeWrapper 237 pkg-config 238 ]; 239 + 240 buildInputs = [ libseccomp ]; 241 242 postInstall = '' ··· 261 util-linux 262 qemu 263 ]; 264 + checkPhase = ''[elided]''; 265 + }) 266 ``` 267 268 - `makeWrapper` is a setup hook, i.e., a shell script sourced by the generic builder of `stdenv`.
+79 -81
doc/using/configuration.chapter.md
··· 300 301 ```nix 302 { 303 - packageOverrides = 304 - pkgs: with pkgs; rec { 305 - myProfile = writeText "my-profile" '' 306 - export PATH=$HOME/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/sbin:/bin:/usr/sbin:/usr/bin 307 - export MANPATH=$HOME/.nix-profile/share/man:/nix/var/nix/profiles/default/share/man:/usr/share/man 308 - ''; 309 - myPackages = pkgs.buildEnv { 310 - name = "my-packages"; 311 - paths = [ 312 - (runCommand "profile" { } '' 313 - mkdir -p $out/etc/profile.d 314 - cp ${myProfile} $out/etc/profile.d/my-profile.sh 315 - '') 316 - aspell 317 - bc 318 - coreutils 319 - ffmpeg 320 - man 321 - nix 322 - emscripten 323 - jq 324 - nox 325 - silver-searcher 326 - ]; 327 - pathsToLink = [ 328 - "/share/man" 329 - "/share/doc" 330 - "/bin" 331 - "/etc" 332 - ]; 333 - extraOutputsToInstall = [ 334 - "man" 335 - "doc" 336 - ]; 337 - }; 338 }; 339 } 340 ``` 341 ··· 360 361 ```nix 362 { 363 - packageOverrides = 364 - pkgs: with pkgs; rec { 365 - myProfile = writeText "my-profile" '' 366 - export PATH=$HOME/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/sbin:/bin:/usr/sbin:/usr/bin 367 - export MANPATH=$HOME/.nix-profile/share/man:/nix/var/nix/profiles/default/share/man:/usr/share/man 368 - export INFOPATH=$HOME/.nix-profile/share/info:/nix/var/nix/profiles/default/share/info:/usr/share/info 369 ''; 370 - myPackages = pkgs.buildEnv { 371 - name = "my-packages"; 372 - paths = [ 373 - (runCommand "profile" { } '' 374 - mkdir -p $out/etc/profile.d 375 - cp ${myProfile} $out/etc/profile.d/my-profile.sh 376 - '') 377 - aspell 378 - bc 379 - coreutils 380 - ffmpeg 381 - man 382 - nix 383 - emscripten 384 - jq 385 - nox 386 - silver-searcher 387 - texinfoInteractive 388 - ]; 389 - pathsToLink = [ 390 - "/share/man" 391 - "/share/doc" 392 - "/share/info" 393 - "/bin" 394 - "/etc" 395 - ]; 396 - extraOutputsToInstall = [ 397 - "man" 398 - "doc" 399 - "info" 400 - ]; 401 - postBuild = '' 402 - if [ -x $out/bin/install-info -a -w $out/share/info ]; then 403 - shopt -s nullglob 404 - for i in $out/share/info/*.info $out/share/info/*.info.gz; do 405 - $out/bin/install-info $i $out/share/info/dir 406 - done 407 - fi 408 - ''; 409 - }; 410 }; 411 } 412 ``` 413
··· 300 301 ```nix 302 { 303 + packageOverrides = pkgs: { 304 + myProfile = pkgs.writeText "my-profile" '' 305 + export PATH=$HOME/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/sbin:/bin:/usr/sbin:/usr/bin 306 + export MANPATH=$HOME/.nix-profile/share/man:/nix/var/nix/profiles/default/share/man:/usr/share/man 307 + ''; 308 + myPackages = pkgs.buildEnv { 309 + name = "my-packages"; 310 + paths = with pkgs; [ 311 + (runCommand "profile" { } '' 312 + mkdir -p $out/etc/profile.d 313 + cp ${myProfile} $out/etc/profile.d/my-profile.sh 314 + '') 315 + aspell 316 + bc 317 + coreutils 318 + ffmpeg 319 + man 320 + nix 321 + emscripten 322 + jq 323 + nox 324 + silver-searcher 325 + ]; 326 + pathsToLink = [ 327 + "/share/man" 328 + "/share/doc" 329 + "/bin" 330 + "/etc" 331 + ]; 332 + extraOutputsToInstall = [ 333 + "man" 334 + "doc" 335 + ]; 336 }; 337 + }; 338 } 339 ``` 340 ··· 359 360 ```nix 361 { 362 + packageOverrides = pkgs: { 363 + myProfile = pkgs.writeText "my-profile" '' 364 + export PATH=$HOME/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/sbin:/bin:/usr/sbin:/usr/bin 365 + export MANPATH=$HOME/.nix-profile/share/man:/nix/var/nix/profiles/default/share/man:/usr/share/man 366 + export INFOPATH=$HOME/.nix-profile/share/info:/nix/var/nix/profiles/default/share/info:/usr/share/info 367 + ''; 368 + myPackages = pkgs.buildEnv { 369 + name = "my-packages"; 370 + paths = with pkgs; [ 371 + (runCommand "profile" { } '' 372 + mkdir -p $out/etc/profile.d 373 + cp ${myProfile} $out/etc/profile.d/my-profile.sh 374 + '') 375 + aspell 376 + bc 377 + coreutils 378 + ffmpeg 379 + man 380 + nix 381 + emscripten 382 + jq 383 + nox 384 + silver-searcher 385 + texinfoInteractive 386 + ]; 387 + pathsToLink = [ 388 + "/share/man" 389 + "/share/doc" 390 + "/share/info" 391 + "/bin" 392 + "/etc" 393 + ]; 394 + extraOutputsToInstall = [ 395 + "man" 396 + "doc" 397 + "info" 398 + ]; 399 + postBuild = '' 400 + if [ -x $out/bin/install-info -a -w $out/share/info ]; then 401 + shopt -s nullglob 402 + for i in $out/share/info/*.info $out/share/info/*.info.gz; do 403 + $out/bin/install-info $i $out/share/info/dir 404 + done 405 + fi 406 ''; 407 }; 408 + }; 409 } 410 ``` 411
+6 -6
maintainers/maintainer-list.nix
··· 22870 githubId = 10437171; 22871 keys = [ { fingerprint = "75F0 AB7C FE01 D077 AEE6 CAFD 353E 4A18 EE0F AB72"; } ]; 22872 }; 22873 spacefrogg = { 22874 email = "spacefrogg-nixos@meterriblecrew.net"; 22875 github = "spacefrogg"; ··· 23407 github = "svend"; 23408 githubId = 306190; 23409 name = "Svend Sorensen"; 23410 - }; 23411 - sven-of-cord = { 23412 - email = "sven@cord.com"; 23413 - github = "sven-of-cord"; 23414 - githubId = 98333944; 23415 - name = "Sven Over"; 23416 }; 23417 Svenum = { 23418 email = "s.ziegler@holypenguin.net";
··· 22870 githubId = 10437171; 22871 keys = [ { fingerprint = "75F0 AB7C FE01 D077 AEE6 CAFD 353E 4A18 EE0F AB72"; } ]; 22872 }; 22873 + spacedentist = { 22874 + email = "sp@cedenti.st"; 22875 + github = "spacedentist"; 22876 + githubId = 1536420; 22877 + name = "Sven Over"; 22878 + }; 22879 spacefrogg = { 22880 email = "spacefrogg-nixos@meterriblecrew.net"; 22881 github = "spacefrogg"; ··· 23413 github = "svend"; 23414 githubId = 306190; 23415 name = "Svend Sorensen"; 23416 }; 23417 Svenum = { 23418 email = "s.ziegler@holypenguin.net";
+11
maintainers/scripts/README.md
··· 57 58 [`maintainer-list.nix`]: ../maintainer-list.nix 59 60 61 ## Conventions 62
··· 57 58 [`maintainer-list.nix`]: ../maintainer-list.nix 59 60 + ### `get-maintainer-pings-between.sh` 61 + 62 + Gets which maintainers would be pinged between two Nixpkgs revisions. 63 + Outputs a JSON object on stdout mapping GitHub usernames to the attributes 64 + that they would be getting pinged for. 65 + 66 + Example: 67 + 68 + ```sh 69 + maintainers/scripts/get-maintainer-pings-between.sh HEAD^ HEAD 70 + ``` 71 72 ## Conventions 73
+78
maintainers/scripts/get-maintainer-pings-between.sh
···
··· 1 + #!/usr/bin/env nix-shell 2 + #!nix-shell -i bash -p git jq 3 + 4 + # Outputs a list of maintainers that would be pinged across two nixpkgs revisions. 5 + # Authors: 6 + # Morgan Jones (@numinit) 7 + # Tristan Ross (@RossComputerGuy) 8 + 9 + set -euo pipefail 10 + 11 + if [ $# -lt 2 ]; then 12 + echo "Usage: $0 <rev-from> <rev-to>" >&2 13 + exit 1 14 + fi 15 + 16 + repo="$(git rev-parse --show-toplevel)" 17 + system="$(nix-instantiate --eval --expr builtins.currentSystem)" 18 + rev1="$(git -C "$repo" rev-parse "$1")" 19 + rev2="$(git -C "$repo" rev-parse "$2")" 20 + 21 + echo "Touched files:" >&2 22 + git -C "$repo" diff --name-only "$rev1" "$rev2" \ 23 + | jq --raw-input --slurp 'split("\n")[:-1]' | tee "$TMPDIR/touched-files.json" >&2 24 + 25 + # Runs an eval in the given worktree, outputting the path to $TMPDIR/$1.path. 26 + # $1: The revision SHA. 27 + eval_in_worktree() ( 28 + mkdir -p .worktree 29 + local rev="$1" 30 + local tree=".worktree/$rev" 31 + if [ ! -d "$tree" ]; then 32 + git -C "$repo" worktree add -f -d "$tree" "$rev" >&2 33 + fi 34 + cd "$tree" 35 + 36 + local workdir="$TMPDIR/$rev" 37 + rm -rf "$workdir" 38 + mkdir -p "$workdir" 39 + 40 + nix-build ci -A eval.attrpathsSuperset -o "$workdir/paths" >&2 41 + mkdir -p "$workdir/intermediates" 42 + nix-build ci -A eval.singleSystem \ 43 + --arg evalSystem "$system" \ 44 + --arg attrpathFile "$workdir/paths/paths.json" \ 45 + --arg chunkSize ${CHUNK_SIZE:-10000} \ 46 + -o "$workdir/intermediates/.intermediate-1" >&2 47 + 48 + # eval.combine nix-build needs a directory, not a symlink 49 + cp -RL "$workdir/intermediates/.intermediate-1" "$workdir/intermediates/intermediate-1" 50 + chmod -R +w "$workdir/intermediates/intermediate-1" 51 + rm -rf "$workdir/intermediates/.intermediate-1" 52 + 53 + nix-build ci -A eval.combine \ 54 + --arg resultsDir "$workdir/intermediates" \ 55 + -o "$workdir/result" >&2 56 + ) 57 + 58 + eval_in_worktree "$rev1" & 59 + pid1=$! 60 + eval_in_worktree "$rev2" & 61 + pid2=$! 62 + 63 + wait $pid1 64 + wait $pid2 65 + 66 + path1="$TMPDIR/$rev1" 67 + path2="$TMPDIR/$rev2" 68 + 69 + # Use the repo this script was executed in to get accurate maintainer info 70 + nix-build "$repo/ci" -A eval.compare \ 71 + --arg beforeResultDir "$path1/result" \ 72 + --arg afterResultDir "$path2/result" \ 73 + --arg touchedFilesJson "$TMPDIR/touched-files.json" \ 74 + --arg byName true \ 75 + -o comparison 76 + 77 + echo "Pinged maintainers (check $repo/comparison for more details)" >&2 78 + jq < comparison/maintainers.json
+1
nixos/doc/manual/development/nixos-tests.chapter.md
··· 10 running-nixos-tests.section.md 11 running-nixos-tests-interactively.section.md 12 linking-nixos-tests-to-packages.section.md 13 ```
··· 10 running-nixos-tests.section.md 11 running-nixos-tests-interactively.section.md 12 linking-nixos-tests-to-packages.section.md 13 + testing-hardware-features.section.md 14 ```
+152
nixos/doc/manual/development/testing-hardware-features.section.md
···
··· 1 + # Testing Hardware Features {#sec-nixos-test-testing-hardware-features} 2 + 3 + This section covers how to test various features using NixOS tests that would 4 + normally only be possible with hardware. It is designed to showcase the NixOS test 5 + framework's flexibility when combined with various hardware simulation libraries 6 + or kernel modules. 7 + 8 + ## Wi-Fi {#sec-nixos-test-wifi} 9 + 10 + Use `services.vwifi` to set up a virtual Wi-Fi physical layer. Create at least two nodes 11 + for this kind of test: one with vwifi active, and either a station or an access point. 12 + Give each a static IP address on the test network so they will never collide. 13 + This module likely supports other topologies too; document them if you make one. 14 + 15 + This NixOS module leverages [vwifi](https://github.com/Raizo62/vwifi). Read the 16 + upstream repository's documentation for more information. 17 + 18 + ### vwifi server {#sec-nixos-test-wifi-vwifi-server} 19 + 20 + This node runs the vwifi server, and otherwise does not interact with the network. 21 + You can run `vwifi-ctrl` on this node to control characteristics of the simulated 22 + physical layer. 23 + 24 + ```nix 25 + airgap = 26 + { config, ... }: 27 + { 28 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 29 + { 30 + address = "192.168.1.2"; 31 + prefixLength = 24; 32 + } 33 + ]; 34 + services.vwifi = { 35 + server = { 36 + enable = true; 37 + ports.tcp = 8212; 38 + # uncomment if you want to enable monitor mode on another node 39 + # ports.spy = 8213; 40 + openFirewall = true; 41 + }; 42 + }; 43 + }; 44 + ``` 45 + 46 + ### AP {#sec-nixos-test-wifi-ap} 47 + 48 + A node like this will act as a wireless access point in infrastructure mode. 49 + 50 + ```nix 51 + ap = 52 + { config, ... }: 53 + { 54 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 55 + { 56 + address = "192.168.1.3"; 57 + prefixLength = 24; 58 + } 59 + ]; 60 + services.hostapd = { 61 + enable = true; 62 + radios.wlan0 = { 63 + channel = 1; 64 + networks.wlan0 = { 65 + ssid = "NixOS Test Wi-Fi Network"; 66 + authentication = { 67 + mode = "wpa3-sae"; 68 + saePasswords = [ { password = "supersecret"; } ]; 69 + enableRecommendedPairwiseCiphers = true; 70 + }; 71 + }; 72 + }; 73 + }; 74 + services.vwifi = { 75 + module = { 76 + enable = true; 77 + macPrefix = "74:F8:F6:00:01"; 78 + }; 79 + client = { 80 + enable = true; 81 + serverAddress = "192.168.1.2"; 82 + }; 83 + }; 84 + }; 85 + ``` 86 + 87 + ### Station {#sec-nixos-test-wifi-station} 88 + 89 + A node like this acts as a wireless client. 90 + 91 + ```nix 92 + station = 93 + { config, ... }: 94 + { 95 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 96 + { 97 + address = "192.168.1.3"; 98 + prefixLength = 24; 99 + } 100 + ]; 101 + networking.wireless = { 102 + # No, really, we want it enabled! 103 + enable = lib.mkOverride 0 true; 104 + interfaces = [ "wlan0" ]; 105 + networks = { 106 + "NixOS Test Wi-Fi Network" = { 107 + psk = "supersecret"; 108 + authProtocols = [ "SAE" ]; 109 + }; 110 + }; 111 + }; 112 + services.vwifi = { 113 + module = { 114 + enable = true; 115 + macPrefix = "74:F8:F6:00:02"; 116 + }; 117 + client = { 118 + enable = true; 119 + serverAddress = "192.168.1.2"; 120 + }; 121 + }; 122 + }; 123 + ``` 124 + 125 + ### Monitor {#sec-nixos-test-wifi-monitor} 126 + 127 + When the monitor mode interface is enabled, this node will receive 128 + all packets broadcast by all other nodes through the spy interface. 129 + 130 + ```nix 131 + monitor = 132 + { config, ... }: 133 + { 134 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 135 + { 136 + address = "192.168.1.4"; 137 + prefixLength = 24; 138 + } 139 + ]; 140 + 141 + services.vwifi = { 142 + module = { 143 + enable = true; 144 + macPrefix = "74:F8:F6:00:03"; 145 + }; 146 + client = { 147 + enable = true; 148 + spy = true; 149 + serverAddress = "192.168.1.2"; 150 + }; 151 + }; 152 + ```
+18
nixos/doc/manual/redirects.json
··· 77 "sec-mattermost-plugins-build": [ 78 "index.html#sec-mattermost-plugins-build" 79 ], 80 "sec-obtaining": [ 81 "index.html#sec-obtaining" 82 ], ··· 1894 ], 1895 "sec-linking-nixos-tests-to-packages": [ 1896 "index.html#sec-linking-nixos-tests-to-packages" 1897 ], 1898 "chap-developing-the-test-driver": [ 1899 "index.html#chap-developing-the-test-driver"
··· 77 "sec-mattermost-plugins-build": [ 78 "index.html#sec-mattermost-plugins-build" 79 ], 80 + "sec-nixos-test-wifi": [ 81 + "index.html#sec-nixos-test-wifi" 82 + ], 83 + "sec-nixos-test-wifi-ap": [ 84 + "index.html#sec-nixos-test-wifi-ap" 85 + ], 86 + "sec-nixos-test-wifi-monitor": [ 87 + "index.html#sec-nixos-test-wifi-monitor" 88 + ], 89 + "sec-nixos-test-wifi-station": [ 90 + "index.html#sec-nixos-test-wifi-station" 91 + ], 92 + "sec-nixos-test-wifi-vwifi-server": [ 93 + "index.html#sec-nixos-test-wifi-vwifi-server" 94 + ], 95 "sec-obtaining": [ 96 "index.html#sec-obtaining" 97 ], ··· 1909 ], 1910 "sec-linking-nixos-tests-to-packages": [ 1911 "index.html#sec-linking-nixos-tests-to-packages" 1912 + ], 1913 + "sec-nixos-test-testing-hardware-features": [ 1914 + "index.html#sec-nixos-test-testing-hardware-features" 1915 ], 1916 "chap-developing-the-test-driver": [ 1917 "index.html#chap-developing-the-test-driver"
+6 -1
nixos/doc/manual/release-notes/rl-2505.section.md
··· 36 - `services.mattermost.listenAddress` has been split into {option}`services.mattermost.host` and {option}`services.mattermost.port`. If your `listenAddress` contained a port, you will need to edit your configuration. 37 - Mattermost now supports peer authentication on both MySQL and Postgres database backends. Updating {option}`system.stateVersion` to 25.05 or later will result in peer authentication being used by default if the Mattermost server would otherwise be connecting to localhost. This is the recommended configuration. 38 - The Mattermost module will produce eval warnings if a database password would end up in the Nix store, and recommend alternatives such as peer authentication or using the environment file. 39 - - Mattermost's entire test suite is now enabled by default, which will extend build time from sources by up to an hour. A `withoutTests` passthru has been added in case you want to skip it. 40 - We now support `mmctl` for Mattermost administration if both {option}`services.mattermost.socket.enable` and {option}`services.mattermost.socket.export` are set, which export the Mattermost control socket path into the system environment. 41 - A new `pkgs.mattermost.buildPlugin` function has been added, which allows plugins to be built from source, including webapp frontends with a supported package-lock.json. See the Mattermost NixOS test and [manual](https://nixos.org/manual/nixpkgs/unstable/#sec-mattermost-plugins-build) for an example. 42 - Note that the Mattermost module will create an account _without_ a well-known UID if the username differs from the default (`mattermost`). If you used Mattermost with a nonstandard username, you may want to review the module changes before upgrading. ··· 58 - [scanservjs](https://github.com/sbs20/scanservjs/), a web UI for SANE scanners. Available at [services.scanservjs](#opt-services.scanservjs.enable). 59 60 - [Kimai](https://www.kimai.org/), a web-based multi-user time-tracking application. Available as [services.kimai](options.html#opt-services.kimai). 61 62 - [Homer](https://homer-demo.netlify.app/), a very simple static homepage for your server. Available as [services.homer](options.html#opt-services.homer). 63 ··· 526 - `bind.cacheNetworks` now only controls access for recursive queries, where it previously controlled access for all queries. 527 528 - [`services.mongodb.enableAuth`](#opt-services.mongodb.enableAuth) now uses the newer [mongosh](https://github.com/mongodb-js/mongosh) shell instead of the legacy shell to configure the initial superuser. You can configure the mongosh package to use through the [`services.mongodb.mongoshPackage`](#opt-services.mongodb.mongoshPackage) option. 529 530 - The paperless module now has an option for regular automatic export of 531 documents data using the integrated document exporter.
··· 36 - `services.mattermost.listenAddress` has been split into {option}`services.mattermost.host` and {option}`services.mattermost.port`. If your `listenAddress` contained a port, you will need to edit your configuration. 37 - Mattermost now supports peer authentication on both MySQL and Postgres database backends. Updating {option}`system.stateVersion` to 25.05 or later will result in peer authentication being used by default if the Mattermost server would otherwise be connecting to localhost. This is the recommended configuration. 38 - The Mattermost module will produce eval warnings if a database password would end up in the Nix store, and recommend alternatives such as peer authentication or using the environment file. 39 - We now support `mmctl` for Mattermost administration if both {option}`services.mattermost.socket.enable` and {option}`services.mattermost.socket.export` are set, which export the Mattermost control socket path into the system environment. 40 - A new `pkgs.mattermost.buildPlugin` function has been added, which allows plugins to be built from source, including webapp frontends with a supported package-lock.json. See the Mattermost NixOS test and [manual](https://nixos.org/manual/nixpkgs/unstable/#sec-mattermost-plugins-build) for an example. 41 - Note that the Mattermost module will create an account _without_ a well-known UID if the username differs from the default (`mattermost`). If you used Mattermost with a nonstandard username, you may want to review the module changes before upgrading. ··· 57 - [scanservjs](https://github.com/sbs20/scanservjs/), a web UI for SANE scanners. Available at [services.scanservjs](#opt-services.scanservjs.enable). 58 59 - [Kimai](https://www.kimai.org/), a web-based multi-user time-tracking application. Available as [services.kimai](options.html#opt-services.kimai). 60 + 61 + - [Kismet](https://www.kismetwireless.net/), a Wi-Fi, Bluetooth, and RF monitoring application supporting a wide range of hardware. Available as {option}`services.kismet`. 62 + 63 + - [vwifi](https://github.com/Raizo62/vwifi), a Wi-Fi simulator daemon leveraging the `mac80211_hwsim` and `vhost_vsock` kernel modules for efficient simulation of multi-node Wi-Fi networks. Available as {option}`services.vwifi`. 64 65 - [Homer](https://homer-demo.netlify.app/), a very simple static homepage for your server. Available as [services.homer](options.html#opt-services.homer). 66 ··· 529 - `bind.cacheNetworks` now only controls access for recursive queries, where it previously controlled access for all queries. 530 531 - [`services.mongodb.enableAuth`](#opt-services.mongodb.enableAuth) now uses the newer [mongosh](https://github.com/mongodb-js/mongosh) shell instead of the legacy shell to configure the initial superuser. You can configure the mongosh package to use through the [`services.mongodb.mongoshPackage`](#opt-services.mongodb.mongoshPackage) option. 532 + 533 + - There is a new set of NixOS test tools for testing virtual Wi-Fi networks in many different topologies. See the {option}`services.vwifi` module, {option}`services.kismet` NixOS test, and [manual](https://nixos.org/manual/nixpkgs/unstable/#sec-nixos-test-wifi) for documentation and examples. 534 535 - The paperless module now has an option for regular automatic export of 536 documents data using the integrated document exporter.
+2
nixos/modules/module-list.nix
··· 1175 ./services/networking/kea.nix 1176 ./services/networking/keepalived/default.nix 1177 ./services/networking/keybase.nix 1178 ./services/networking/knot.nix 1179 ./services/networking/kresd.nix 1180 ./services/networking/lambdabot.nix ··· 1352 ./services/networking/veilid.nix 1353 ./services/networking/vdirsyncer.nix 1354 ./services/networking/vsftpd.nix 1355 ./services/networking/wasabibackend.nix 1356 ./services/networking/websockify.nix 1357 ./services/networking/wg-access-server.nix
··· 1175 ./services/networking/kea.nix 1176 ./services/networking/keepalived/default.nix 1177 ./services/networking/keybase.nix 1178 + ./services/networking/kismet.nix 1179 ./services/networking/knot.nix 1180 ./services/networking/kresd.nix 1181 ./services/networking/lambdabot.nix ··· 1353 ./services/networking/veilid.nix 1354 ./services/networking/vdirsyncer.nix 1355 ./services/networking/vsftpd.nix 1356 + ./services/networking/vwifi.nix 1357 ./services/networking/wasabibackend.nix 1358 ./services/networking/websockify.nix 1359 ./services/networking/wg-access-server.nix
+459
nixos/modules/services/networking/kismet.nix
···
··· 1 + { 2 + config, 3 + lib, 4 + pkgs, 5 + ... 6 + }: 7 + 8 + let 9 + inherit (lib.trivial) isFloat isInt isBool; 10 + inherit (lib.modules) mkIf; 11 + inherit (lib.options) 12 + literalExpression 13 + mkOption 14 + mkPackageOption 15 + mkEnableOption 16 + ; 17 + inherit (lib.strings) 18 + isString 19 + escapeShellArg 20 + escapeShellArgs 21 + concatMapStringsSep 22 + concatMapAttrsStringSep 23 + replaceStrings 24 + substring 25 + stringLength 26 + hasInfix 27 + hasSuffix 28 + typeOf 29 + match 30 + ; 31 + inherit (lib.lists) all isList flatten; 32 + inherit (lib.attrsets) 33 + attrsToList 34 + filterAttrs 35 + optionalAttrs 36 + mapAttrs' 37 + mapAttrsToList 38 + nameValuePair 39 + ; 40 + inherit (lib.generators) toKeyValue; 41 + inherit (lib) types; 42 + 43 + # Deeply checks types for a given type function. Calls `override` with type and value. 44 + deep = 45 + func: override: type: 46 + let 47 + prev = func type; 48 + in 49 + prev 50 + // { 51 + check = value: prev.check value && (override type value); 52 + }; 53 + 54 + # Deep listOf. 55 + listOf' = deep types.listOf (type: value: all type.check value); 56 + 57 + # Deep attrsOf. 58 + attrsOf' = deep types.attrsOf (type: value: all (item: type.check item.value) (attrsToList value)); 59 + 60 + # Kismet config atoms. 61 + atom = 62 + with types; 63 + oneOf [ 64 + number 65 + bool 66 + str 67 + ]; 68 + 69 + # Composite types. 70 + listOfAtom = listOf' atom; 71 + atomOrList = with types; either atom listOfAtom; 72 + lists = listOf' atomOrList; 73 + kvPair = attrsOf' atomOrList; 74 + kvPairs = listOf' kvPair; 75 + 76 + # Options that eval to a string with a header (foo:key=value) 77 + headerKvPair = attrsOf' (attrsOf' atomOrList); 78 + headerKvPairs = attrsOf' (listOf' (attrsOf' atomOrList)); 79 + 80 + # Toplevel config type. 81 + topLevel = 82 + let 83 + topLevel' = 84 + with types; 85 + oneOf [ 86 + headerKvPairs 87 + headerKvPair 88 + kvPairs 89 + kvPair 90 + listOfAtom 91 + lists 92 + atom 93 + ]; 94 + in 95 + topLevel' 96 + // { 97 + description = "Kismet config stanza"; 98 + }; 99 + 100 + # Throws invalid. 101 + invalid = atom: throw "invalid value '${toString atom}' of type '${typeOf atom}'"; 102 + 103 + # Converts an atom. 104 + mkAtom = 105 + atom: 106 + if isString atom then 107 + if hasInfix "\"" atom || hasInfix "," atom then 108 + ''"${replaceStrings [ ''"'' ] [ ''\"'' ] atom}"'' 109 + else 110 + atom 111 + else if isFloat atom || isInt atom || isBool atom then 112 + toString atom 113 + else 114 + invalid atom; 115 + 116 + # Converts an inline atom or list to a string. 117 + mkAtomOrListInline = 118 + atomOrList: 119 + if isList atomOrList then 120 + mkAtom "${concatMapStringsSep "," mkAtom atomOrList}" 121 + else 122 + mkAtom atomOrList; 123 + 124 + # Converts an out of line atom or list to a string. 125 + mkAtomOrList = 126 + atomOrList: 127 + if isList atomOrList then 128 + "${concatMapStringsSep "," mkAtomOrListInline atomOrList}" 129 + else 130 + mkAtom atomOrList; 131 + 132 + # Throws if the string matches the given regex. 133 + deny = 134 + regex: str: 135 + assert (match regex str) == null; 136 + str; 137 + 138 + # Converts a set of k/v pairs. 139 + convertKv = concatMapAttrsStringSep "," ( 140 + name: value: "${mkAtom (deny "=" name)}=${mkAtomOrListInline value}" 141 + ); 142 + 143 + # Converts k/v pairs with a header. 144 + convertKvWithHeader = header: attrs: "${mkAtom (deny ":" header)}:${convertKv attrs}"; 145 + 146 + # Converts the entire config. 147 + convertConfig = mapAttrs' ( 148 + name: value: 149 + let 150 + # Convert foo' into 'foo+' for support for '+=' syntax. 151 + newName = if hasSuffix "'" name then substring 0 (stringLength name - 1) name + "+" else name; 152 + 153 + # Get the stringified value. 154 + newValue = 155 + if headerKvPairs.check value then 156 + flatten ( 157 + mapAttrsToList (header: values: (map (value: convertKvWithHeader header value) values)) value 158 + ) 159 + else if headerKvPair.check value then 160 + mapAttrsToList convertKvWithHeader value 161 + else if kvPairs.check value then 162 + map convertKv value 163 + else if kvPair.check value then 164 + convertKv value 165 + else if listOfAtom.check value then 166 + mkAtomOrList value 167 + else if lists.check value then 168 + map mkAtomOrList value 169 + else if atom.check value then 170 + mkAtom value 171 + else 172 + invalid value; 173 + in 174 + nameValuePair newName newValue 175 + ); 176 + 177 + mkKismetConf = 178 + options: 179 + (toKeyValue { listsAsDuplicateKeys = true; }) ( 180 + filterAttrs (_: value: value != null) (convertConfig options) 181 + ); 182 + 183 + cfg = config.services.kismet; 184 + in 185 + { 186 + options.services.kismet = { 187 + enable = mkEnableOption "kismet"; 188 + package = mkPackageOption pkgs "kismet" { }; 189 + user = mkOption { 190 + description = "The user to run Kismet as."; 191 + type = types.str; 192 + default = "kismet"; 193 + }; 194 + group = mkOption { 195 + description = "The group to run Kismet as."; 196 + type = types.str; 197 + default = "kismet"; 198 + }; 199 + serverName = mkOption { 200 + description = "The name of the server."; 201 + type = types.str; 202 + default = "Kismet"; 203 + }; 204 + serverDescription = mkOption { 205 + description = "The description of the server."; 206 + type = types.str; 207 + default = "NixOS Kismet server"; 208 + }; 209 + logTypes = mkOption { 210 + description = "The log types."; 211 + type = with types; listOf str; 212 + default = [ "kismet" ]; 213 + }; 214 + dataDir = mkOption { 215 + description = "The Kismet data directory."; 216 + type = types.path; 217 + default = "/var/lib/kismet"; 218 + }; 219 + httpd = { 220 + enable = mkOption { 221 + description = "True to enable the HTTP server."; 222 + type = types.bool; 223 + default = false; 224 + }; 225 + address = mkOption { 226 + description = "The address to listen on. Note that this cannot be a hostname or Kismet will not start."; 227 + type = types.str; 228 + default = "127.0.0.1"; 229 + }; 230 + port = mkOption { 231 + description = "The port to listen on."; 232 + type = types.port; 233 + default = 2501; 234 + }; 235 + }; 236 + settings = mkOption { 237 + description = '' 238 + Options for Kismet. See: 239 + https://www.kismetwireless.net/docs/readme/configuring/configfiles/ 240 + ''; 241 + default = { }; 242 + type = with types; attrsOf topLevel; 243 + example = literalExpression '' 244 + { 245 + /* Examples for atoms */ 246 + # dot11_link_bssts=false 247 + dot11_link_bssts = false; # Boolean 248 + 249 + # dot11_related_bss_window=10000000 250 + dot11_related_bss_window = 10000000; # Integer 251 + 252 + # devicefound=00:11:22:33:44:55 253 + devicefound = "00:11:22:33:44:55"; # String 254 + 255 + # log_types+=wiglecsv 256 + log_types' = "wiglecsv"; 257 + 258 + /* Examples for lists of atoms */ 259 + # wepkey=00:DE:AD:C0:DE:00,FEEDFACE42 260 + wepkey = [ "00:DE:AD:C0:DE:00" "FEEDFACE42" ]; 261 + 262 + # alert=ADHOCCONFLICT,5/min,1/sec 263 + # alert=ADVCRYPTCHANGE,5/min,1/sec 264 + alert = [ 265 + [ "ADHOCCONFLICT" "5/min" "1/sec" ] 266 + [ "ADVCRYPTCHANGE" "5/min" "1/sec" ] 267 + ]; 268 + 269 + /* Examples for sets of atoms */ 270 + # source=wlan0:name=ath11k 271 + source.wlan0 = { name = "ath11k"; }; 272 + 273 + /* Examples with colon-suffixed headers */ 274 + # gps=gpsd:host=localhost,port=2947 275 + gps.gpsd = { 276 + host = "localhost"; 277 + port = 2947; 278 + }; 279 + 280 + # apspoof=Foo1:ssid=Bar1,validmacs="00:11:22:33:44:55,aa:bb:cc:dd:ee:ff" 281 + # apspoof=Foo1:ssid=Bar2,validmacs="01:12:23:34:45:56,ab:bc:cd:de:ef:f0" 282 + # apspoof=Foo2:ssid=Baz1,validmacs="11:22:33:44:55:66,bb:cc:dd:ee:ff:00" 283 + apspoof.Foo1 = [ 284 + { ssid = "Bar1"; validmacs = [ "00:11:22:33:44:55" "aa:bb:cc:dd:ee:ff" ]; } 285 + { ssid = "Bar2"; validmacs = [ "01:12:23:34:45:56" "ab:bc:cd:de:ef:f0" ]; } 286 + ]; 287 + 288 + # because Foo1 is a list, Foo2 needs to be as well 289 + apspoof.Foo2 = [ 290 + { 291 + ssid = "Bar2"; 292 + validmacs = [ "00:11:22:33:44:55" "aa:bb:cc:dd:ee:ff" ]; 293 + }; 294 + ]; 295 + } 296 + ''; 297 + }; 298 + extraConfig = mkOption { 299 + description = '' 300 + Literal Kismet config lines appended to the site config. 301 + Note that `services.kismet.settings` allows you to define 302 + all options here using Nix attribute sets. 303 + ''; 304 + default = ""; 305 + type = types.str; 306 + example = '' 307 + # Looks like the following in `services.kismet.settings`: 308 + # wepkey = [ "00:DE:AD:C0:DE:00" "FEEDFACE42" ]; 309 + wepkey=00:DE:AD:C0:DE:00,FEEDFACE42 310 + ''; 311 + }; 312 + }; 313 + 314 + config = 315 + let 316 + configDir = "${cfg.dataDir}/.kismet"; 317 + settings = 318 + cfg.settings 319 + // { 320 + server_name = cfg.serverName; 321 + server_description = cfg.serverDescription; 322 + logging_enabled = cfg.logTypes != [ ]; 323 + log_types = cfg.logTypes; 324 + } 325 + // optionalAttrs cfg.httpd.enable { 326 + httpd_bind_address = cfg.httpd.address; 327 + httpd_port = cfg.httpd.port; 328 + httpd_auth_file = "${configDir}/kismet_httpd.conf"; 329 + httpd_home = "${cfg.package}/share/kismet/httpd"; 330 + }; 331 + in 332 + mkIf cfg.enable { 333 + systemd.tmpfiles.settings = { 334 + "10-kismet" = { 335 + ${cfg.dataDir} = { 336 + d = { 337 + inherit (cfg) user group; 338 + mode = "0750"; 339 + }; 340 + }; 341 + ${configDir} = { 342 + d = { 343 + inherit (cfg) user group; 344 + mode = "0750"; 345 + }; 346 + }; 347 + }; 348 + }; 349 + systemd.services.kismet = 350 + let 351 + kismetConf = pkgs.writeText "kismet.conf" '' 352 + ${mkKismetConf settings} 353 + ${cfg.extraConfig} 354 + ''; 355 + in 356 + { 357 + description = "Kismet monitoring service"; 358 + wants = [ "basic.target" ]; 359 + after = [ 360 + "basic.target" 361 + "network.target" 362 + ]; 363 + wantedBy = [ "multi-user.target" ]; 364 + serviceConfig = 365 + let 366 + capabilities = [ 367 + "CAP_NET_ADMIN" 368 + "CAP_NET_RAW" 369 + ]; 370 + kismetPreStart = pkgs.writeShellScript "kismet-pre-start" '' 371 + owner=${escapeShellArg "${cfg.user}:${cfg.group}"} 372 + mkdir -p ~/.kismet 373 + 374 + # Ensure permissions on directories Kismet uses. 375 + chown "$owner" ~/ ~/.kismet 376 + cd ~/.kismet 377 + 378 + package=${cfg.package} 379 + if [ -d "$package/etc" ]; then 380 + for file in "$package/etc"/*.conf; do 381 + # Symlink the config files if they exist or are already a link. 382 + base="''${file##*/}" 383 + if [ ! -f "$base" ] || [ -L "$base" ]; then 384 + ln -sf "$file" "$base" 385 + fi 386 + done 387 + fi 388 + 389 + for file in kismet_httpd.conf; do 390 + # Un-symlink these files. 391 + if [ -L "$file" ]; then 392 + cp "$file" ".$file" 393 + rm -f "$file" 394 + mv ".$file" "$file" 395 + chmod 0640 "$file" 396 + chown "$owner" "$file" 397 + fi 398 + done 399 + 400 + # Link the site config. 401 + ln -sf ${kismetConf} kismet_site.conf 402 + ''; 403 + in 404 + { 405 + Type = "simple"; 406 + ExecStart = escapeShellArgs [ 407 + "${cfg.package}/bin/kismet" 408 + "--homedir" 409 + cfg.dataDir 410 + "--confdir" 411 + configDir 412 + "--datadir" 413 + "${cfg.package}/share" 414 + "--no-ncurses" 415 + "-f" 416 + "${configDir}/kismet.conf" 417 + ]; 418 + WorkingDirectory = cfg.dataDir; 419 + ExecStartPre = "+${kismetPreStart}"; 420 + Restart = "always"; 421 + KillMode = "control-group"; 422 + CapabilityBoundingSet = capabilities; 423 + AmbientCapabilities = capabilities; 424 + LockPersonality = true; 425 + NoNewPrivileges = true; 426 + PrivateDevices = false; 427 + PrivateTmp = true; 428 + PrivateUsers = false; 429 + ProtectClock = true; 430 + ProtectControlGroups = true; 431 + ProtectHome = true; 432 + ProtectHostname = true; 433 + ProtectKernelLogs = true; 434 + ProtectKernelModules = true; 435 + ProtectKernelTunables = true; 436 + ProtectProc = "invisible"; 437 + ProtectSystem = "full"; 438 + RestrictNamespaces = true; 439 + RestrictSUIDSGID = true; 440 + User = cfg.user; 441 + Group = cfg.group; 442 + UMask = "0007"; 443 + TimeoutStopSec = 30; 444 + }; 445 + 446 + # Allow it to restart if the wifi interface is not up 447 + unitConfig.StartLimitIntervalSec = 5; 448 + }; 449 + users.groups.${cfg.group} = { }; 450 + users.users.${cfg.user} = { 451 + inherit (cfg) group; 452 + description = "User for running Kismet"; 453 + isSystemUser = true; 454 + home = cfg.dataDir; 455 + }; 456 + }; 457 + 458 + meta.maintainers = with lib.maintainers; [ numinit ]; 459 + }
+200
nixos/modules/services/networking/vwifi.nix
···
··· 1 + { 2 + config, 3 + lib, 4 + pkgs, 5 + ... 6 + }: 7 + 8 + let 9 + inherit (lib.modules) mkIf mkMerge; 10 + inherit (lib.options) mkOption mkPackageOption mkEnableOption; 11 + inherit (lib.lists) optional optionals; 12 + inherit (lib.strings) 13 + hasSuffix 14 + escapeShellArgs 15 + ; 16 + inherit (lib) types; 17 + cfg = config.services.vwifi; 18 + in 19 + { 20 + options = { 21 + services.vwifi = 22 + let 23 + mkOptionalPort = 24 + name: 25 + mkOption { 26 + description = '' 27 + The ${name} port. Set to null if we should leave it unset. 28 + ''; 29 + type = with types; nullOr port; 30 + default = null; 31 + }; 32 + in 33 + { 34 + package = mkPackageOption pkgs "vwifi" { }; 35 + module = { 36 + enable = mkEnableOption "mac80211_hwsim module"; 37 + numRadios = mkOption { 38 + description = "The number of virtual radio interfaces to create."; 39 + type = types.int; 40 + default = 1; 41 + }; 42 + macPrefix = mkOption { 43 + description = '' 44 + The prefix for MAC addresses to use, without the trailing ':'. 45 + If one radio is created, you can specify the whole MAC address here. 46 + The default is defined in vwifi/src/config.h. 47 + ''; 48 + type = types.strMatching "^(([0-9A-Fa-f]{2}:){0,5}[0-9A-Fa-f]{2})$"; 49 + default = "74:F8:F6"; 50 + }; 51 + }; 52 + client = { 53 + enable = mkEnableOption "vwifi client"; 54 + spy = mkEnableOption "spy mode, useful for wireless monitors"; 55 + serverAddress = mkOption { 56 + description = '' 57 + The address of the server. If set to null, will try to use the vsock protocol. 58 + Note that this assumes that the server is spawned on the host and passed through to 59 + QEMU, with something like: 60 + 61 + -device vhost-vsock-pci,id=vwifi0,guest-cid=42 62 + ''; 63 + type = with types; nullOr str; 64 + default = null; 65 + }; 66 + serverPort = mkOptionalPort "server port"; 67 + extraArgs = mkOption { 68 + description = '' 69 + Extra arguments to pass to vwifi-client. You can use this if you want to bring 70 + the radios up using vwifi-client instead of at boot. 71 + ''; 72 + type = with types; listOf str; 73 + default = [ ]; 74 + example = [ 75 + "--number" 76 + "3" 77 + ]; 78 + }; 79 + }; 80 + server = { 81 + enable = mkEnableOption "vwifi server"; 82 + vsock.enable = mkEnableOption "vsock kernel module"; 83 + ports = { 84 + vhost = mkOptionalPort "vhost"; 85 + tcp = mkOptionalPort "TCP server"; 86 + spy = mkOptionalPort "spy interface"; 87 + control = mkOptionalPort "control interface"; 88 + }; 89 + openFirewall = mkEnableOption "opening the firewall for the TCP and spy ports"; 90 + extraArgs = mkOption { 91 + description = '' 92 + Extra arguments to pass to vwifi-server. You can use this for things including 93 + changing the ports or inducing packet loss. 94 + ''; 95 + type = with types; listOf str; 96 + default = [ ]; 97 + example = [ "--lost-packets" ]; 98 + }; 99 + }; 100 + }; 101 + }; 102 + 103 + config = mkMerge [ 104 + (mkIf cfg.module.enable { 105 + boot.kernelModules = [ 106 + "mac80211_hwsim" 107 + ]; 108 + boot.extraModprobeConfig = '' 109 + # We'll add more radios using vwifi-add-interfaces in the systemd unit. 110 + options mac80211_hwsim radios=0 111 + ''; 112 + systemd.services.vwifi-add-interfaces = mkIf (cfg.module.numRadios > 0) { 113 + description = "vwifi interface bringup"; 114 + wantedBy = [ "network-pre.target" ]; 115 + serviceConfig = { 116 + Type = "oneshot"; 117 + ExecStart = 118 + let 119 + args = [ 120 + (toString cfg.module.numRadios) 121 + cfg.module.macPrefix 122 + ]; 123 + in 124 + "${cfg.package}/bin/vwifi-add-interfaces ${escapeShellArgs args}"; 125 + }; 126 + }; 127 + assertions = [ 128 + { 129 + assertion = !(hasSuffix ":" cfg.module.macPrefix); 130 + message = '' 131 + services.vwifi.module.macPrefix should not have a trailing ":". 132 + ''; 133 + } 134 + ]; 135 + }) 136 + (mkIf cfg.client.enable { 137 + systemd.services.vwifi-client = 138 + let 139 + clientArgs = 140 + optional cfg.client.spy "--spy" 141 + ++ optional (cfg.client.serverAddress != null) cfg.client.serverAddress 142 + ++ optionals (cfg.client.serverPort != null) [ 143 + "--port" 144 + cfg.client.serverPort 145 + ] 146 + ++ cfg.client.extraArgs; 147 + in 148 + rec { 149 + description = "vwifi client"; 150 + wantedBy = [ "multi-user.target" ]; 151 + after = [ "network.target" ]; 152 + requires = after; 153 + serviceConfig = { 154 + ExecStart = "${cfg.package}/bin/vwifi-client ${escapeShellArgs clientArgs}"; 155 + }; 156 + }; 157 + }) 158 + (mkIf cfg.server.enable { 159 + boot.kernelModules = mkIf cfg.server.vsock.enable [ 160 + "vhost_vsock" 161 + ]; 162 + networking.firewall.allowedTCPPorts = mkIf cfg.server.openFirewall ( 163 + optional (cfg.server.ports.tcp != null) cfg.server.ports.tcp 164 + ++ optional (cfg.server.ports.spy != null) cfg.server.ports.spy 165 + ); 166 + systemd.services.vwifi-server = 167 + let 168 + serverArgs = 169 + optionals (cfg.server.ports.vhost != null) [ 170 + "--port-vhost" 171 + (toString cfg.server.ports.vhost) 172 + ] 173 + ++ optionals (cfg.server.ports.tcp != null) [ 174 + "--port-tcp" 175 + (toString cfg.server.ports.tcp) 176 + ] 177 + ++ optionals (cfg.server.ports.spy != null) [ 178 + "--port-spy" 179 + (toString cfg.server.ports.spy) 180 + ] 181 + ++ optionals (cfg.server.ports.control != null) [ 182 + "--port-ctrl" 183 + (toString cfg.server.ports.control) 184 + ] 185 + ++ cfg.server.extraArgs; 186 + in 187 + rec { 188 + description = "vwifi server"; 189 + wantedBy = [ "multi-user.target" ]; 190 + after = [ "network.target" ]; 191 + requires = after; 192 + serviceConfig = { 193 + ExecStart = "${cfg.package}/bin/vwifi-server ${escapeShellArgs serverArgs}"; 194 + }; 195 + }; 196 + }) 197 + ]; 198 + 199 + meta.maintainers = with lib.maintainers; [ numinit ]; 200 + }
+21 -11
nixos/modules/services/web-apps/mattermost.nix
··· 41 # The directory to store mutable data within dataDir. 42 mutableDataDir = "${cfg.dataDir}/data"; 43 44 - # The plugin directory. Note that this is the *post-unpack* plugin directory, 45 - # since Mattermost unpacks plugins to put them there. (Hence, mutable data.) 46 - pluginDir = "${mutableDataDir}/plugins"; 47 48 # Mattermost uses this as a staging directory to unpack plugins, among possibly other things. 49 # Ensure that it's inside mutableDataDir since it can get rather large. ··· 232 services.mattermost.environmentFile = "<your environment file>"; 233 services.mattermost.database.fromEnvironment = true; 234 '' database; 235 - FileSettings.Directory = cfg.dataDir; 236 - PluginSettings.Directory = "${pluginDir}/server"; 237 - PluginSettings.ClientDirectory = "${pluginDir}/client"; 238 LogSettings = { 239 FileLocation = cfg.logDir; 240 ··· 800 "R- ${tempDir} - - - - -" 801 "d= ${tempDir} 0750 ${cfg.user} ${cfg.group} - -" 802 803 - # Ensure that pluginDir is a directory, as it could be a symlink on prior versions. 804 # Don't remove or clean it out since it should be persistent, as this is where plugins are unpacked. 805 - "d= ${pluginDir} 0750 ${cfg.user} ${cfg.group} - -" 806 807 # Ensure that the plugin directories exist. 808 "d= ${mattermostConf.PluginSettings.Directory} 0750 ${cfg.user} ${cfg.group} - -" ··· 819 if cfg.pluginsBundle == null then 820 # Create the plugin tarball directory to allow plugin uploads. 821 [ 822 - "d= ${cfg.dataDir}/plugins 0750 ${cfg.user} ${cfg.group} - -" 823 ] 824 else 825 # Symlink the plugin tarball directory, removing anything existing, since it's managed by Nix. 826 - [ "L+ ${cfg.dataDir}/plugins - - - - ${cfg.pluginsBundle}" ] 827 ); 828 829 systemd.services.mattermost = rec { ··· 867 # Logs too. 868 oldLogs="$dataDir/logs" 869 newLogs="$logDir" 870 - if [ "$oldLogs" != "$newLogs" ] && [ -d "$oldLogs" ]; then 871 # Migrate the legacy log location to the new log location. 872 # Allow this to fail if there aren't any logs to move. 873 echo "Moving legacy logs at $oldLogs to $newLogs" >&2 874 mkdir -p "$newLogs" 875 mv "$oldLogs"/* "$newLogs" || true 876 fi 877 '' 878 + optionalString (!cfg.mutableConfig) ''
··· 41 # The directory to store mutable data within dataDir. 42 mutableDataDir = "${cfg.dataDir}/data"; 43 44 + # The plugin directory. Note that this is the *pre-unpack* plugin directory, 45 + # since Mattermost looks in mutableDataDir for a directory called "plugins". 46 + # If Mattermost is installed with plugins defined in a Nix configuration, the plugins 47 + # are symlinked here. Otherwise, this is a real directory and the tarballs are uploaded here. 48 + pluginTarballDir = "${mutableDataDir}/plugins"; 49 + 50 + # We need a different unpack directory for Mattermost to sync things to at launch, 51 + # since the above may be a symlink to the store. 52 + pluginUnpackDir = "${mutableDataDir}/.plugins"; 53 54 # Mattermost uses this as a staging directory to unpack plugins, among possibly other things. 55 # Ensure that it's inside mutableDataDir since it can get rather large. ··· 238 services.mattermost.environmentFile = "<your environment file>"; 239 services.mattermost.database.fromEnvironment = true; 240 '' database; 241 + 242 + # Note that the plugin tarball directory is not configurable, and is expected to be in FileSettings.Directory/plugins. 243 + FileSettings.Directory = mutableDataDir; 244 + PluginSettings.Directory = "${pluginUnpackDir}/server"; 245 + PluginSettings.ClientDirectory = "${pluginUnpackDir}/client"; 246 + 247 LogSettings = { 248 FileLocation = cfg.logDir; 249 ··· 809 "R- ${tempDir} - - - - -" 810 "d= ${tempDir} 0750 ${cfg.user} ${cfg.group} - -" 811 812 + # Ensure that pluginUnpackDir is a directory. 813 # Don't remove or clean it out since it should be persistent, as this is where plugins are unpacked. 814 + "d= ${pluginUnpackDir} 0750 ${cfg.user} ${cfg.group} - -" 815 816 # Ensure that the plugin directories exist. 817 "d= ${mattermostConf.PluginSettings.Directory} 0750 ${cfg.user} ${cfg.group} - -" ··· 828 if cfg.pluginsBundle == null then 829 # Create the plugin tarball directory to allow plugin uploads. 830 [ 831 + "d= ${pluginTarballDir} 0750 ${cfg.user} ${cfg.group} - -" 832 ] 833 else 834 # Symlink the plugin tarball directory, removing anything existing, since it's managed by Nix. 835 + [ "L+ ${pluginTarballDir} - - - - ${cfg.pluginsBundle}" ] 836 ); 837 838 systemd.services.mattermost = rec { ··· 876 # Logs too. 877 oldLogs="$dataDir/logs" 878 newLogs="$logDir" 879 + if [ "$oldLogs" != "$newLogs" ] && [ -d "$oldLogs" ] && [ ! -f "$newLogs/.initial-created" ]; then 880 # Migrate the legacy log location to the new log location. 881 # Allow this to fail if there aren't any logs to move. 882 echo "Moving legacy logs at $oldLogs to $newLogs" >&2 883 mkdir -p "$newLogs" 884 mv "$oldLogs"/* "$newLogs" || true 885 + touch "$newLogs/.initial-created" 886 fi 887 '' 888 + optionalString (!cfg.mutableConfig) ''
+1
nixos/tests/all-tests.nix
··· 702 keyd = handleTest ./keyd.nix { }; 703 keymap = handleTest ./keymap.nix { }; 704 kimai = runTest ./kimai.nix; 705 kmonad = runTest ./kmonad.nix; 706 knot = runTest ./knot.nix; 707 komga = handleTest ./komga.nix { };
··· 702 keyd = handleTest ./keyd.nix { }; 703 keymap = handleTest ./keymap.nix { }; 704 kimai = runTest ./kimai.nix; 705 + kismet = runTest ./kismet.nix; 706 kmonad = runTest ./kmonad.nix; 707 knot = runTest ./knot.nix; 708 komga = handleTest ./komga.nix { };
+266
nixos/tests/kismet.nix
···
··· 1 + { pkgs, lib, ... }: 2 + 3 + let 4 + ssid = "Hydra SmokeNet"; 5 + psk = "stayoffmywifi"; 6 + wlanInterface = "wlan0"; 7 + in 8 + { 9 + name = "kismet"; 10 + 11 + nodes = 12 + let 13 + hostAddress = id: "192.168.1.${toString (id + 1)}"; 14 + serverAddress = hostAddress 1; 15 + in 16 + { 17 + airgap = 18 + { config, ... }: 19 + { 20 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 21 + { 22 + address = serverAddress; 23 + prefixLength = 24; 24 + } 25 + ]; 26 + services.vwifi = { 27 + server = { 28 + enable = true; 29 + ports.tcp = 8212; 30 + ports.spy = 8213; 31 + openFirewall = true; 32 + }; 33 + }; 34 + }; 35 + 36 + ap = 37 + { config, ... }: 38 + { 39 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 40 + { 41 + address = hostAddress 2; 42 + prefixLength = 24; 43 + } 44 + ]; 45 + services.hostapd = { 46 + enable = true; 47 + radios.${wlanInterface} = { 48 + channel = 1; 49 + networks.${wlanInterface} = { 50 + inherit ssid; 51 + authentication = { 52 + mode = "wpa3-sae"; 53 + saePasswords = [ { password = psk; } ]; 54 + enableRecommendedPairwiseCiphers = true; 55 + }; 56 + }; 57 + }; 58 + }; 59 + services.vwifi = { 60 + module = { 61 + enable = true; 62 + macPrefix = "74:F8:F6:00:01"; 63 + }; 64 + client = { 65 + enable = true; 66 + inherit serverAddress; 67 + }; 68 + }; 69 + }; 70 + 71 + station = 72 + { config, ... }: 73 + { 74 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 75 + { 76 + address = hostAddress 3; 77 + prefixLength = 24; 78 + } 79 + ]; 80 + networking.wireless = { 81 + # No, really, we want it enabled! 82 + enable = lib.mkOverride 0 true; 83 + interfaces = [ wlanInterface ]; 84 + networks = { 85 + ${ssid} = { 86 + inherit psk; 87 + authProtocols = [ "SAE" ]; 88 + }; 89 + }; 90 + }; 91 + services.vwifi = { 92 + module = { 93 + enable = true; 94 + macPrefix = "74:F8:F6:00:02"; 95 + }; 96 + client = { 97 + enable = true; 98 + inherit serverAddress; 99 + }; 100 + }; 101 + }; 102 + 103 + monitor = 104 + { config, ... }: 105 + { 106 + networking.interfaces.eth1.ipv4.addresses = lib.mkForce [ 107 + { 108 + address = hostAddress 4; 109 + prefixLength = 24; 110 + } 111 + ]; 112 + 113 + services.kismet = { 114 + enable = true; 115 + serverName = "NixOS Kismet Smoke Test"; 116 + serverDescription = "Server testing virtual wifi devices running on Hydra"; 117 + httpd.enable = true; 118 + # Check that the settings all eval correctly 119 + settings = { 120 + # Should append to log_types 121 + log_types' = "wiglecsv"; 122 + 123 + # Should all generate correctly 124 + wepkey = [ 125 + "00:DE:AD:C0:DE:00" 126 + "FEEDFACE42" 127 + ]; 128 + alert = [ 129 + [ 130 + "ADHOCCONFLICT" 131 + "5/min" 132 + "1/sec" 133 + ] 134 + [ 135 + "ADVCRYPTCHANGE" 136 + "5/min" 137 + "1/sec" 138 + ] 139 + ]; 140 + gps.gpsd = { 141 + host = "localhost"; 142 + port = 2947; 143 + }; 144 + apspoof.Foo1 = [ 145 + { 146 + ssid = "Bar1"; 147 + validmacs = [ 148 + "00:11:22:33:44:55" 149 + "aa:bb:cc:dd:ee:ff" 150 + ]; 151 + } 152 + { 153 + ssid = "Bar2"; 154 + validmacs = [ 155 + "01:12:23:34:45:56" 156 + "ab:bc:cd:de:ef:f0" 157 + ]; 158 + } 159 + ]; 160 + apspoof.Foo2 = [ 161 + { 162 + ssid = "Bar2"; 163 + validmacs = [ 164 + "00:11:22:33:44:55" 165 + "aa:bb:cc:dd:ee:ff" 166 + ]; 167 + } 168 + ]; 169 + 170 + # The actual source 171 + source.${wlanInterface} = { 172 + name = "Virtual Wifi"; 173 + }; 174 + }; 175 + extraConfig = '' 176 + # this comment should be ignored 177 + ''; 178 + }; 179 + 180 + services.vwifi = { 181 + module = { 182 + enable = true; 183 + macPrefix = "74:F8:F6:00:03"; 184 + }; 185 + client = { 186 + enable = true; 187 + spy = true; 188 + inherit serverAddress; 189 + }; 190 + }; 191 + 192 + environment.systemPackages = with pkgs; [ 193 + config.services.kismet.package 194 + config.services.vwifi.package 195 + jq 196 + ]; 197 + }; 198 + }; 199 + 200 + testScript = 201 + { nodes, ... }: 202 + '' 203 + import shlex 204 + 205 + # Wait for the vwifi server to come up 206 + airgap.start() 207 + airgap.wait_for_unit("vwifi-server.service") 208 + airgap.wait_for_open_port(${toString nodes.airgap.services.vwifi.server.ports.tcp}) 209 + 210 + httpd_port = ${toString nodes.monitor.services.kismet.httpd.port} 211 + server_name = "${nodes.monitor.services.kismet.serverName}" 212 + server_description = "${nodes.monitor.services.kismet.serverDescription}" 213 + wlan_interface = "${wlanInterface}" 214 + ap_essid = "${ssid}" 215 + ap_mac_prefix = "${nodes.ap.services.vwifi.module.macPrefix}" 216 + station_mac_prefix = "${nodes.station.services.vwifi.module.macPrefix}" 217 + 218 + # Spawn the other nodes. 219 + monitor.start() 220 + 221 + # Wait for the monitor to come up 222 + monitor.wait_for_unit("kismet.service") 223 + monitor.wait_for_open_port(httpd_port) 224 + 225 + # Should be up but require authentication. 226 + url = f"http://localhost:{httpd_port}" 227 + monitor.succeed(f"curl {url} | tee /dev/stderr | grep '<title>Kismet</title>'") 228 + 229 + # Have to set the password now. 230 + monitor.succeed("echo httpd_username=nixos >> ~kismet/.kismet/kismet_httpd.conf") 231 + monitor.succeed("echo httpd_password=hydra >> ~kismet/.kismet/kismet_httpd.conf") 232 + monitor.systemctl("restart kismet.service") 233 + monitor.wait_for_unit("kismet.service") 234 + monitor.wait_for_open_port(httpd_port) 235 + 236 + # Authentication should now work. 237 + url = f"http://nixos:hydra@localhost:{httpd_port}" 238 + monitor.succeed(f"curl {url}/system/status.json | tee /dev/stderr | jq -e --arg serverName {shlex.quote(server_name)} --arg serverDescription {shlex.quote(server_description)} '.\"kismet.system.server_name\" == $serverName and .\"kismet.system.server_description\" == $serverDescription'") 239 + 240 + # Wait for the station to connect to the AP while Kismet is monitoring 241 + ap.start() 242 + station.start() 243 + 244 + unit = f"wpa_supplicant-{wlan_interface}" 245 + 246 + # Generate handshakes until we detect both devices 247 + success = False 248 + for i in range(100): 249 + station.wait_for_unit(f"wpa_supplicant-{wlan_interface}.service") 250 + station.succeed(f"ifconfig {wlan_interface} down && ifconfig {wlan_interface} up") 251 + station.wait_until_succeeds(f"journalctl -u {shlex.quote(unit)} -e | grep -Eqi {shlex.quote(wlan_interface + ': CTRL-EVENT-CONNECTED - Connection to ' + ap_mac_prefix + '[0-9a-f:]* completed')}") 252 + station.succeed(f"journalctl --rotate --unit={shlex.quote(unit)}") 253 + station.succeed(f"sleep 3 && journalctl --vacuum-time=1s --unit={shlex.quote(unit)}") 254 + 255 + # We're connected, make sure Kismet sees both of our devices 256 + status, stdout = monitor.execute(f"curl {url}/devices/views/all/last-time/0/devices.json | tee /dev/stderr | jq -e --arg macPrefix {shlex.quote(ap_mac_prefix)} --arg ssid {shlex.quote(ap_essid)} '. | (map(select((.\"kismet.device.base.macaddr\"? | startswith($macPrefix)) and .\"dot11.device\"?.\"dot11.device.last_beaconed_ssid_record\"?.\"dot11.advertisedssid.ssid\" == $ssid)) | length) == 1'") 257 + if status != 0: 258 + continue 259 + status, stdout = monitor.execute(f"curl {url}/devices/views/all/last-time/0/devices.json | tee /dev/stderr | jq -e --arg macPrefix {shlex.quote(station_mac_prefix)} '. | (map(select((.\"kismet.device.base.macaddr\"? | startswith($macPrefix)))) | length) == 1'") 260 + if status == 0: 261 + success = True 262 + break 263 + 264 + assert success 265 + ''; 266 + }
+15 -3
nixos/tests/mattermost/default.nix
··· 335 if [ "$actualPostAttachmentHash" != "$postAttachmentHash" ]; then 336 echo "Post attachment hash mismatched!" >&2 337 exit 1 338 - else 339 echo "Post attachment hash was OK!" >&2 340 exit 0 341 fi 342 else 343 echo "Post didn't exist when it should have!" >&2 ··· 454 # Switch to the newer config and make sure the plugins directory is replaced with a directory, 455 # since it could have been a symlink on previous versions. 456 mostlyMutable.systemctl("stop mattermost.service") 457 - mostlyMutable.succeed(f"[ ! -L /var/lib/mattermost/data/plugins ] && rm -rf /var/lib/mattermost/data/plugins && ln -s {mostlyMutablePlugins} /var/lib/mattermost/data/plugins || true") 458 mostlyMutable.succeed('[ -L /var/lib/mattermost/data/plugins ] && [ -d /var/lib/mattermost/data/plugins ]') 459 switch_to_specialisation(mostlyMutable, mostlyMutableToplevel, "upgrade") 460 wait_mattermost_up(mostlyMutable) 461 - mostlyMutable.succeed('[ ! -L /var/lib/mattermost/data/plugins ] && [ -d /var/lib/mattermost/data/plugins ]') 462 463 # HelpLink should be changed, still, and the post should still exist 464 expect_config(mostlyMutable, esr, '.AboutLink == "https://nixos.org" and .HelpLink == "https://nixos.org/nixos/manual"')
··· 335 if [ "$actualPostAttachmentHash" != "$postAttachmentHash" ]; then 336 echo "Post attachment hash mismatched!" >&2 337 exit 1 338 + fi 339 + 340 + # Make sure it's on the filesystem in the expected place 341 + fsPath="$(find /var/lib/mattermost/data -name "$(basename -- "$postAttachment")" -print -quit)" 342 + if [ -z "$fsPath" ] || [ ! -f "$fsPath" ]; then 343 + echo "Attachment didn't exist on the filesystem!" >&2 344 + exit 1 345 + fi 346 + 347 + # And that the hash matches. 348 + actualFsAttachmentHash="$(sha256sum "$fsPath" | awk '{print $1}')" 349 + if [ "$actualFsAttachmentHash" == "$postAttachmentHash" ]; then 350 echo "Post attachment hash was OK!" >&2 351 exit 0 352 + else 353 + echo "Attachment hash mismatched on disk!" >&2 354 + exit 1 355 fi 356 else 357 echo "Post didn't exist when it should have!" >&2 ··· 468 # Switch to the newer config and make sure the plugins directory is replaced with a directory, 469 # since it could have been a symlink on previous versions. 470 mostlyMutable.systemctl("stop mattermost.service") 471 mostlyMutable.succeed('[ -L /var/lib/mattermost/data/plugins ] && [ -d /var/lib/mattermost/data/plugins ]') 472 switch_to_specialisation(mostlyMutable, mostlyMutableToplevel, "upgrade") 473 wait_mattermost_up(mostlyMutable) 474 475 # HelpLink should be changed, still, and the post should still exist 476 expect_config(mostlyMutable, esr, '.AboutLink == "https://nixos.org" and .HelpLink == "https://nixos.org/nixos/manual"')
+1
pkgs/applications/audio/puredata/default.nix
··· 62 63 postInstall = '' 64 wrapProgram $out/bin/pd --prefix PATH : ${lib.makeBinPath [ tk ]} 65 ''; 66 67 meta = with lib; {
··· 62 63 postInstall = '' 64 wrapProgram $out/bin/pd --prefix PATH : ${lib.makeBinPath [ tk ]} 65 + wrapProgram $out/bin/pd-gui --prefix PATH : ${lib.makeBinPath [ tk ]} 66 ''; 67 68 meta = with lib; {
+4 -3
pkgs/applications/misc/goldendict-ng/default.nix
··· 30 31 stdenv.mkDerivation { 32 pname = "goldendict-ng"; 33 - version = "25.02.0"; 34 35 src = fetchFromGitHub { 36 owner = "xiaoyifang"; 37 repo = "goldendict-ng"; 38 - tag = "v25.02.0-Release.e895b18c"; 39 - hash = "sha256-k8pGzrSFbAUP7DG3lSAYBa5WeeSUbjZMvMqmxPqdT3E="; 40 }; 41 42 nativeBuildInputs = [ ··· 92 maintainers = with maintainers; [ 93 slbtty 94 michojel 95 ]; 96 license = licenses.gpl3Plus; 97 };
··· 30 31 stdenv.mkDerivation { 32 pname = "goldendict-ng"; 33 + version = "25.05.0"; 34 35 src = fetchFromGitHub { 36 owner = "xiaoyifang"; 37 repo = "goldendict-ng"; 38 + tag = "v25.05.0-Release.2a2b0e16"; 39 + hash = "sha256-PBqkVac867xE4ZcvwTysAK6rQSoEZelKrV9USvFsaLk="; 40 }; 41 42 nativeBuildInputs = [ ··· 92 maintainers = with maintainers; [ 93 slbtty 94 michojel 95 + linsui 96 ]; 97 license = licenses.gpl3Plus; 98 };
+25 -11
pkgs/applications/networking/charles/default.nix
··· 4 makeWrapper, 5 makeDesktopItem, 6 fetchurl, 7 jdk11, 8 jdk8, 9 }: ··· 12 generic = 13 { 14 version, 15 - sha256, 16 platform ? "", 17 jdk, 18 ... ··· 52 "--user-agent" 53 "Mozilla/5.0" 54 ]; # HTTP 104 otherwise 55 - inherit sha256; 56 }; 57 nativeBuildInputs = [ makeWrapper ]; 58 59 installPhase = '' 60 makeWrapper ${jdk}/bin/java $out/bin/charles \ 61 - --add-flags "-Xmx1024M -Dcharles.config='~/.charles.config' -jar $out/share/java/charles.jar" 62 63 for fn in lib/*.jar; do 64 install -D -m644 $fn $out/share/java/$(basename $fn) ··· 69 70 mkdir -p $out/share/icons 71 cp -r icon $out/share/icons/hicolor 72 ''; 73 74 - meta = with lib; { 75 description = "Web Debugging Proxy"; 76 homepage = "https://www.charlesproxy.com/"; 77 - maintainers = with maintainers; [ 78 kalbasit 79 kashw2 80 ]; 81 - sourceProvenance = with sourceTypes; [ binaryBytecode ]; 82 - license = licenses.unfree; 83 - platforms = platforms.unix; 84 }; 85 }; 86 87 in 88 { 89 charles4 = ( 90 generic { 91 - version = "4.6.4"; 92 - sha256 = "KEQYb90kt41dS3TJLZqdaV9P3mQA9UPsEyiFb/knm3w="; 93 platform = "_amd64"; 94 jdk = jdk11; 95 } ··· 97 charles3 = ( 98 generic { 99 version = "3.12.3"; 100 - sha256 = "13zk82ny1w5zd9qcs9qkq0kdb22ni5byzajyshpxdfm4zv6p32ss"; 101 jdk = jdk8.jre; 102 mainProgram = "charles"; 103 }
··· 4 makeWrapper, 5 makeDesktopItem, 6 fetchurl, 7 + openjdk17-bootstrap, 8 jdk11, 9 jdk8, 10 }: ··· 13 generic = 14 { 15 version, 16 + hash, 17 platform ? "", 18 jdk, 19 ... ··· 53 "--user-agent" 54 "Mozilla/5.0" 55 ]; # HTTP 104 otherwise 56 + inherit hash; 57 }; 58 + 59 nativeBuildInputs = [ makeWrapper ]; 60 61 installPhase = '' 62 + runHook preInstall 63 + 64 makeWrapper ${jdk}/bin/java $out/bin/charles \ 65 + --add-flags "-Xmx1024M -Dcharles.config='~/.charles.config' ${lib.optionalString (lib.versionOlder version "5.0") "-jar $out/share/java/charles.jar"} ${lib.optionalString (lib.versionAtLeast version "5.0") "-XX:+UseZGC -Djava.library.path='$out/share/java' --add-opens java.base/sun.security.ssl=com.charlesproxy --add-opens java.desktop/java.awt.event=com.charlesproxy --add-opens java.base/java.io=com.charlesproxy --add-modules com.jthemedetector,com.formdev.flatlaf --module-path '$out/share/java' -m com.charlesproxy"}" 66 67 for fn in lib/*.jar; do 68 install -D -m644 $fn $out/share/java/$(basename $fn) ··· 73 74 mkdir -p $out/share/icons 75 cp -r icon $out/share/icons/hicolor 76 + 77 + runHook postInstall 78 ''; 79 80 + meta = { 81 description = "Web Debugging Proxy"; 82 homepage = "https://www.charlesproxy.com/"; 83 + maintainers = with lib.maintainers; [ 84 kalbasit 85 kashw2 86 ]; 87 + sourceProvenance = with lib.sourceTypes; [ binaryBytecode ]; 88 + license = lib.licenses.unfree; 89 + platforms = lib.platforms.unix; 90 }; 91 }; 92 93 in 94 { 95 + charles5 = ( 96 + generic { 97 + version = "5.0"; 98 + hash = "sha256-gvspRI3uF7bjE4UBuTGS5+n2h0nKudLtW3sqs2GZIyM="; 99 + platform = "_x86_64"; 100 + jdk = openjdk17-bootstrap; 101 + } 102 + ); 103 charles4 = ( 104 generic { 105 + version = "4.6.8"; 106 + hash = "sha256-AaS+zmQTWsGoLEhyGHA/UojmctE7IV0N9fnygNhEPls="; 107 platform = "_amd64"; 108 jdk = jdk11; 109 } ··· 111 charles3 = ( 112 generic { 113 version = "3.12.3"; 114 + hash = "sha256-Wotxzf6kutYv1F6q71eJVojVJsATJ81war/w4K1A848="; 115 jdk = jdk8.jre; 116 mainProgram = "charles"; 117 }
+4 -13
pkgs/applications/networking/mailreaders/notmuch/default.nix
··· 36 37 stdenv.mkDerivation (finalAttrs: { 38 pname = "notmuch"; 39 - version = "0.38.3"; 40 41 src = fetchurl { 42 url = "https://notmuchmail.org/releases/notmuch-${finalAttrs.version}.tar.xz"; 43 - hash = "sha256-mvRsyA2li0MByiuu/MJaQNES0DFVB+YywPPw8IMo0FQ="; 44 }; 45 46 nativeBuildInputs = ··· 68 ++ lib.optional withRuby ruby 69 ++ lib.optional withSfsexp sfsexp; 70 71 - patches = [ 72 - (fetchpatch { 73 - name = "add-workaround-for-Emacs-30-pp-changes.patch"; 74 - url = "https://git.notmuchmail.org/git?p=notmuch;a=patch;h=e3d4721b1ba4836c7646e057b50123fe994652eb"; 75 - hash = "sha256-phfNSOlTajTmaf+DjtdmBAWSm+2tUbrQEChInUlwn5k="; 76 - }) 77 - ]; 78 - 79 postPatch = 80 '' 81 patchShebangs configure test/ ··· 142 ln -s ${test-database} test/test-databases/database-v1.tar.xz 143 '' 144 + '' 145 - # Issues since gnupg: 2.4.0 -> 2.4.1 146 - rm test/{T350-crypto,T357-index-decryption}.sh 147 # Issues since pbr 6.0.0 bump (ModuleNotFoundError: No module named 'notmuch2') 148 rm test/T055-path-config.sh 149 # Flaky, seems to get its paths wrong sometimes (?) 150 # *ERROR*: Opening output file: Permission denied, /nix/store/bzy21v2cd5sq1djzwa9b19q08wpp9mm0-emacs-29.1/bin/OUTPUT 151 rm test/T460-emacs-tree.sh ··· 209 ''; 210 211 passthru = { 212 - pythonSourceRoot = "notmuch-${finalAttrs.version}/bindings/python"; 213 gemEnv = buildEnv { 214 name = "notmuch-vim-gems"; 215 paths = with ruby.gems; [ mail ];
··· 36 37 stdenv.mkDerivation (finalAttrs: { 38 pname = "notmuch"; 39 + version = "0.39"; 40 41 src = fetchurl { 42 url = "https://notmuchmail.org/releases/notmuch-${finalAttrs.version}.tar.xz"; 43 + hash = "sha256-uIuwKnbEa62NMT/Su0+OOSmLUfZvy+swTZ+Aw+73BOM="; 44 }; 45 46 nativeBuildInputs = ··· 68 ++ lib.optional withRuby ruby 69 ++ lib.optional withSfsexp sfsexp; 70 71 postPatch = 72 '' 73 patchShebangs configure test/ ··· 134 ln -s ${test-database} test/test-databases/database-v1.tar.xz 135 '' 136 + '' 137 # Issues since pbr 6.0.0 bump (ModuleNotFoundError: No module named 'notmuch2') 138 rm test/T055-path-config.sh 139 + rm test/T610-message-property.sh 140 # Flaky, seems to get its paths wrong sometimes (?) 141 # *ERROR*: Opening output file: Permission denied, /nix/store/bzy21v2cd5sq1djzwa9b19q08wpp9mm0-emacs-29.1/bin/OUTPUT 142 rm test/T460-emacs-tree.sh ··· 200 ''; 201 202 passthru = { 203 + pythonSourceRoot = "notmuch-${finalAttrs.version}/contrib/python-legacy"; 204 gemEnv = buildEnv { 205 name = "notmuch-vim-gems"; 206 paths = with ruby.gems; [ mail ];
+2 -2
pkgs/applications/office/libreoffice/default.nix
··· 10 libxslt, 11 perl, 12 perlPackages, 13 - box2d, 14 gettext, 15 zlib, 16 libjpeg, ··· 397 ant 398 bluez5 399 boost 400 - box2d 401 cairo 402 clucene_core_2 403 cppunit
··· 10 libxslt, 11 perl, 12 perlPackages, 13 + box2d_2, 14 gettext, 15 zlib, 16 libjpeg, ··· 397 ant 398 bluez5 399 boost 400 + box2d_2 401 cairo 402 clucene_core_2 403 cppunit
+14
pkgs/applications/science/math/mathematica/versions.nix
··· 9 let 10 versions = [ 11 { 12 version = "14.2.0"; 13 lang = "en"; 14 language = "English";
··· 9 let 10 versions = [ 11 { 12 + version = "14.2.1"; 13 + lang = "en"; 14 + language = "English"; 15 + sha256 = "sha256-WNLuq9RviYF3Mj8uOALhmvcxGGHVq/TAGFMTGWoYBcc="; 16 + installer = "Wolfram_14.2.1_LIN.sh"; 17 + } 18 + { 19 + version = "14.2.1"; 20 + lang = "en"; 21 + language = "English"; 22 + sha256 = "sha256-DcZbetr5wO3i/DkchgpsW3RGHfa1PslA4fK+bRQ68Bg="; 23 + installer = "Wolfram_14.2.1_LIN_Bndl.sh"; 24 + } 25 + { 26 version = "14.2.0"; 27 lang = "en"; 28 language = "English";
+2 -2
pkgs/applications/science/misc/openrefine/default.nix
··· 11 }: 12 13 let 14 - version = "3.9.2"; 15 src = fetchFromGitHub { 16 owner = "openrefine"; 17 repo = "openrefine"; 18 rev = version; 19 - hash = "sha256-Z52YfS+caNe7N3xMv85eoBes6TwkSsGk5/rGaQd/6ds="; 20 }; 21 22 npmPkg = buildNpmPackage {
··· 11 }: 12 13 let 14 + version = "3.9.3"; 15 src = fetchFromGitHub { 16 owner = "openrefine"; 17 repo = "openrefine"; 18 rev = version; 19 + hash = "sha256-wV5ur31JEGcMSLRHQq/H6GlsdpEzTH6ZxBkE9Sj6TkU="; 20 }; 21 22 npmPkg = buildNpmPackage {
-2
pkgs/by-name/ao/aonsoku/package.nix
··· 8 pkg-config, 9 wrapGAppsHook3, 10 openssl, 11 - libsoup_2_4, 12 webkitgtk_4_1, 13 glib-networking, 14 nix-update-script, ··· 48 49 buildInputs = [ 50 openssl 51 - libsoup_2_4 52 webkitgtk_4_1 53 glib-networking 54 ];
··· 8 pkg-config, 9 wrapGAppsHook3, 10 openssl, 11 webkitgtk_4_1, 12 glib-networking, 13 nix-update-script, ··· 47 48 buildInputs = [ 49 openssl 50 webkitgtk_4_1 51 glib-networking 52 ];
+2 -2
pkgs/by-name/ap/apt/package.nix
··· 34 35 stdenv.mkDerivation (finalAttrs: { 36 pname = "apt"; 37 - version = "2.9.35"; 38 39 src = fetchFromGitLab { 40 domain = "salsa.debian.org"; 41 owner = "apt-team"; 42 repo = "apt"; 43 rev = finalAttrs.version; 44 - hash = "sha256-B4rFOt4J94/XkFw09sdvfogdY1b5R6QYnNC3HVUV9pc="; 45 }; 46 47 # cycle detection; lib can't be split
··· 34 35 stdenv.mkDerivation (finalAttrs: { 36 pname = "apt"; 37 + version = "3.0.0"; 38 39 src = fetchFromGitLab { 40 domain = "salsa.debian.org"; 41 owner = "apt-team"; 42 repo = "apt"; 43 rev = finalAttrs.version; 44 + hash = "sha256-0jtJ/y8TK/mJeTM5n1WblT9i5OtRg6r5C7kvB+ioMz0="; 45 }; 46 47 # cycle detection; lib can't be split
+1 -1
pkgs/by-name/au/authentik/package.nix
··· 278 pyyaml 279 requests-oauthlib 280 scim2-filter-parser 281 - sentry-sdk_2 282 service-identity 283 setproctitle 284 structlog
··· 278 pyyaml 279 requests-oauthlib 280 scim2-filter-parser 281 + sentry-sdk 282 service-identity 283 setproctitle 284 structlog
+3 -4
pkgs/by-name/ba/base16-schemes/package.nix
··· 4 fetchFromGitHub, 5 ... 6 }: 7 - 8 stdenv.mkDerivation (finalAttrs: { 9 pname = "base16-schemes"; 10 - version = "unstable-2024-11-12"; 11 12 src = fetchFromGitHub { 13 owner = "tinted-theming"; 14 repo = "schemes"; 15 - rev = "61058a8d2e2bd4482b53d57a68feb56cdb991f0b"; 16 - sha256 = "sha256-Tp1BpaF5qRav7O2TsSGjCfgRzhiasu4IuwROR66gz1o="; 17 }; 18 19 installPhase = ''
··· 4 fetchFromGitHub, 5 ... 6 }: 7 stdenv.mkDerivation (finalAttrs: { 8 pname = "base16-schemes"; 9 + version = "unstable-2025-04-18"; 10 11 src = fetchFromGitHub { 12 owner = "tinted-theming"; 13 repo = "schemes"; 14 + rev = "28c26a621123ad4ebd5bbfb34ab39421c0144bdd"; 15 + hash = "sha256-Fg+rdGs5FAgfkYNCs74lnl8vkQmiZVdBsziyPhVqrlY="; 16 }; 17 18 installPhase = ''
+3 -3
pkgs/by-name/ba/bazel-watcher/package.nix
··· 9 10 buildGoModule rec { 11 pname = "bazel-watcher"; 12 - version = "0.25.3"; 13 14 src = fetchFromGitHub { 15 owner = "bazelbuild"; 16 repo = "bazel-watcher"; 17 rev = "v${version}"; 18 - hash = "sha256-5cRj04e5VVG4NSe4LOLkZIrerT4laLEDeXCqTiJj6MM="; 19 }; 20 21 - vendorHash = "sha256-0I/bvuyosN55oNSMuom4C8rVjxneUaqV19l9OMiwWhU="; 22 23 # The dependency github.com/fsnotify/fsevents requires CGO 24 env.CGO_ENABLED = if stdenv.hostPlatform.isDarwin then "1" else "0";
··· 9 10 buildGoModule rec { 11 pname = "bazel-watcher"; 12 + version = "0.26.0"; 13 14 src = fetchFromGitHub { 15 owner = "bazelbuild"; 16 repo = "bazel-watcher"; 17 rev = "v${version}"; 18 + hash = "sha256-FPyOdKJ8HSerFHDIjUkHKXMgoLmVeuyHYmo0v/8C0do="; 19 }; 20 21 + vendorHash = "sha256-H1Bqw5hbOLS4oJeQOdIXQ9c2H8jGtoW1J75BIkTNR9k="; 22 23 # The dependency github.com/fsnotify/fsevents requires CGO 24 env.CGO_ENABLED = if stdenv.hostPlatform.isDarwin then "1" else "0";
+10 -2
pkgs/by-name/bo/borgbackup/package.nix
··· 20 in 21 python.pkgs.buildPythonApplication rec { 22 pname = "borgbackup"; 23 - version = "1.4.0"; 24 pyproject = true; 25 26 src = fetchFromGitHub { 27 owner = "borgbackup"; 28 repo = "borg"; 29 tag = version; 30 - hash = "sha256-n1hCM7Sp0t2bOJEzErEd1PS/Xc7c+KDmJ4PjQuuF140="; 31 }; 32 33 postPatch = '' ··· 45 nativeBuildInputs = with python.pkgs; [ 46 # docs 47 sphinxHook 48 guzzle-sphinx-theme 49 50 # shell completions ··· 77 makeWrapperArgs = [ 78 ''--prefix PATH ':' "${openssh}/bin"'' 79 ]; 80 81 postInstall = '' 82 installShellCompletion --cmd borg \
··· 20 in 21 python.pkgs.buildPythonApplication rec { 22 pname = "borgbackup"; 23 + version = "1.4.1"; 24 pyproject = true; 25 26 src = fetchFromGitHub { 27 owner = "borgbackup"; 28 repo = "borg"; 29 tag = version; 30 + hash = "sha256-1RRizsHY6q1ruofTkRZ4sSN4k6Hoo+sG85w2zz+7yL8="; 31 }; 32 33 postPatch = '' ··· 45 nativeBuildInputs = with python.pkgs; [ 46 # docs 47 sphinxHook 48 + sphinxcontrib-jquery 49 guzzle-sphinx-theme 50 51 # shell completions ··· 78 makeWrapperArgs = [ 79 ''--prefix PATH ':' "${openssh}/bin"'' 80 ]; 81 + 82 + preInstallSphinx = '' 83 + # remove invalid outputs for manpages 84 + rm .sphinx/man/man/_static/jquery.js 85 + rm .sphinx/man/man/_static/_sphinx_javascript_frameworks_compat.js 86 + rmdir .sphinx/man/man/_static/ 87 + ''; 88 89 postInstall = '' 90 installShellCompletion --cmd borg \
+16
pkgs/by-name/bo/box2d/cmake_dont_fetch_enkits.patch
···
··· 1 + diff --git a/CMakeLists.txt b/CMakeLists.txt 2 + index 8c1390e..791d3b7 100644 3 + --- a/CMakeLists.txt 4 + +++ b/CMakeLists.txt 5 + @@ -83,10 +83,7 @@ if(PROJECT_IS_TOP_LEVEL) 6 + # Used in tests and samples 7 + FetchContent_Declare( 8 + enkits 9 + - GIT_REPOSITORY https://github.com/dougbinks/enkiTS.git 10 + - GIT_TAG master 11 + - GIT_SHALLOW TRUE 12 + - GIT_PROGRESS TRUE 13 + + URL @enkits_src@ 14 + ) 15 + FetchContent_MakeAvailable(enkits) 16 + endif()
+54
pkgs/by-name/bo/box2d/cmake_use_system_glfw_and_imgui.patch
···
··· 1 + diff --git a/samples/CMakeLists.txt b/samples/CMakeLists.txt 2 + index 5020345..97af8c6 100644 3 + --- a/samples/CMakeLists.txt 4 + +++ b/samples/CMakeLists.txt 5 + @@ -17,47 +17,12 @@ set(GLFW_BUILD_EXAMPLES OFF CACHE BOOL "GLFW Examples") 6 + set(GLFW_BUILD_TESTS OFF CACHE BOOL "GLFW Tests") 7 + set(GLFW_INSTALL OFF CACHE BOOL "GLFW Install") 8 + 9 + -FetchContent_Declare( 10 + - glfw 11 + - GIT_REPOSITORY https://github.com/glfw/glfw.git 12 + - GIT_TAG 3.4 13 + - GIT_SHALLOW TRUE 14 + - GIT_PROGRESS TRUE 15 + -) 16 + -FetchContent_MakeAvailable(glfw) 17 + +find_package(glfw) 18 + 19 + # imgui and glfw backend for GUI 20 + # https://gist.github.com/jeffamstutz/992723dfabac4e3ffff265eb71a24cd9 21 + # Modified to pin to a specific imgui release 22 + -FetchContent_Populate(imgui 23 + - URL https://github.com/ocornut/imgui/archive/refs/tags/v1.91.3.zip 24 + - SOURCE_DIR ${CMAKE_SOURCE_DIR}/build/imgui 25 + -) 26 + - 27 + -set(IMGUI_DIR ${CMAKE_SOURCE_DIR}/build/imgui) 28 + - 29 + -add_library(imgui STATIC 30 + - ${IMGUI_DIR}/imconfig.h 31 + - ${IMGUI_DIR}/imgui.h 32 + - 33 + - ${IMGUI_DIR}/imgui.cpp 34 + - ${IMGUI_DIR}/imgui_draw.cpp 35 + - ${IMGUI_DIR}/imgui_demo.cpp 36 + - ${IMGUI_DIR}/imgui_tables.cpp 37 + - ${IMGUI_DIR}/imgui_widgets.cpp 38 + - 39 + - ${IMGUI_DIR}/backends/imgui_impl_glfw.cpp 40 + - ${IMGUI_DIR}/backends/imgui_impl_opengl3.cpp 41 + -) 42 + - 43 + -target_link_libraries(imgui PUBLIC glfw glad) 44 + -target_include_directories(imgui PUBLIC ${IMGUI_DIR} ${IMGUI_DIR}/backends) 45 + -target_compile_definitions(imgui PUBLIC IMGUI_DISABLE_OBSOLETE_FUNCTIONS) 46 + -set_target_properties(imgui PROPERTIES 47 + - CXX_STANDARD 20 48 + - CXX_STANDARD_REQUIRED YES 49 + - CXX_EXTENSIONS NO 50 + -) 51 + +find_package(imgui) 52 + 53 + # jsmn for json 54 + set(JSMN_DIR ${CMAKE_SOURCE_DIR}/extern/jsmn)
+101
pkgs/by-name/bo/box2d/package.nix
···
··· 1 + { 2 + lib, 3 + stdenv, 4 + fetchFromGitHub, 5 + 6 + substituteAll, 7 + 8 + # nativeBuildInputs 9 + cmake, 10 + pkg-config, 11 + 12 + # buildInputs 13 + glfw3, 14 + imgui, 15 + libGLU, 16 + libX11, 17 + libXcursor, 18 + libXi, 19 + libXinerama, 20 + libXrandr, 21 + libglut, 22 + xorgproto, 23 + 24 + nix-update-script, 25 + }: 26 + 27 + let 28 + inherit (lib) cmakeBool; 29 + 30 + in 31 + stdenv.mkDerivation (finalAttrs: { 32 + pname = "box2d"; 33 + version = "3.1.0"; 34 + 35 + src = fetchFromGitHub { 36 + owner = "erincatto"; 37 + repo = "box2d"; 38 + tag = "v${finalAttrs.version}"; 39 + hash = "sha256-QTSU1+9x8GoUK3hlTDMh43fc4vbNfFR7syt6xVHIuPs="; 40 + }; 41 + 42 + patches = [ 43 + # prevent CMake from trying to download some libraries from the internet 44 + (substituteAll { 45 + src = ./cmake_dont_fetch_enkits.patch; 46 + enkits_src = fetchFromGitHub { 47 + owner = "dougbinks"; 48 + repo = "enkiTS"; 49 + rev = "686d0ec31829e0d9e5edf9ceb68c40f9b9b20ea9"; 50 + hash = "sha256-CerLj/WY+J3mrMvv7dGmZltjAM9v5C/IY4X+Ph78HVs="; 51 + }; 52 + }) 53 + ./cmake_use_system_glfw_and_imgui.patch 54 + ]; 55 + 56 + env.NIX_CFLAGS_COMPILE = toString ( 57 + lib.optionals stdenv.cc.isGNU [ 58 + # error: '*(float *)((char *)&localPointA + offsetof(b2Vec2, y))' may be used uninitialized 59 + "-Wno-error=maybe-uninitialized" 60 + ] 61 + ); 62 + 63 + nativeBuildInputs = [ 64 + cmake 65 + pkg-config 66 + ]; 67 + 68 + buildInputs = [ 69 + glfw3 70 + (imgui.override { 71 + # GLFW backend is disabled by default on darwin but box2d imports it unconditionally 72 + # https://github.com/erincatto/box2d/blob/v3.1.0/samples/main.cpp#L28 73 + IMGUI_BUILD_GLFW_BINDING = true; 74 + }) 75 + libGLU 76 + libX11 77 + libXcursor 78 + libXi 79 + libXinerama 80 + libXrandr 81 + libglut 82 + xorgproto 83 + ]; 84 + 85 + cmakeFlags = [ 86 + (cmakeBool "BOX2D_BUILD_UNIT_TESTS" finalAttrs.finalPackage.doCheck) 87 + ]; 88 + 89 + passthru = { 90 + updateScript = nix-update-script { }; 91 + }; 92 + 93 + meta = { 94 + description = "2D physics engine"; 95 + homepage = "https://box2d.org/"; 96 + changelog = "https://github.com/erincatto/box2d/releases/tag/v${finalAttrs.version}"; 97 + maintainers = with lib.maintainers; [ raskin ]; 98 + platforms = lib.platforms.unix; 99 + license = lib.licenses.zlib; 100 + }; 101 + })
+68
pkgs/by-name/bo/box2d_2/package.nix
···
··· 1 + { 2 + lib, 3 + stdenv, 4 + fetchFromGitHub, 5 + 6 + # nativeBuildInputs 7 + cmake, 8 + pkg-config, 9 + 10 + # buildInputs 11 + glfw3, 12 + libGLU, 13 + libX11, 14 + libXcursor, 15 + libXi, 16 + libXinerama, 17 + libXrandr, 18 + libglut, 19 + xorgproto, 20 + 21 + nix-update-script, 22 + }: 23 + 24 + let 25 + inherit (lib) cmakeBool; 26 + 27 + in 28 + stdenv.mkDerivation (finalAttrs: { 29 + pname = "box2d"; 30 + version = "2.4.2"; 31 + 32 + src = fetchFromGitHub { 33 + owner = "erincatto"; 34 + repo = "box2d"; 35 + tag = "v${finalAttrs.version}"; 36 + hash = "sha256-yvhpgiZpjTPeSY7Ma1bh4LwIokUUKB10v2WHlamL9D8="; 37 + }; 38 + 39 + nativeBuildInputs = [ 40 + cmake 41 + pkg-config 42 + ]; 43 + 44 + buildInputs = [ 45 + glfw3 46 + libGLU 47 + libX11 48 + libXcursor 49 + libXi 50 + libXinerama 51 + libXrandr 52 + libglut 53 + xorgproto 54 + ]; 55 + 56 + cmakeFlags = [ 57 + (cmakeBool "BOX2D_BUILD_UNIT_TESTS" finalAttrs.finalPackage.doCheck) 58 + ]; 59 + 60 + meta = { 61 + description = "2D physics engine"; 62 + homepage = "https://box2d.org/"; 63 + changelog = "https://github.com/erincatto/box2d/releases/tag/v${finalAttrs.version}"; 64 + maintainers = with lib.maintainers; [ raskin ]; 65 + platforms = lib.platforms.unix; 66 + license = lib.licenses.zlib; 67 + }; 68 + })
+14
pkgs/by-name/ca/catppuccin/package.nix
··· 1 let 2 validThemes = [ 3 "bat" 4 "bottom" 5 "btop" ··· 55 56 selectedSources = map (themeName: builtins.getAttr themeName sources) themeList; 57 sources = { 58 bat = fetchFromGitHub { 59 name = "bat"; 60 owner = "catppuccin"; ··· 221 222 local capitalizedVariant=$(sed 's/^\(.\)/\U\1/' <<< "${variant}") 223 local capitalizedAccent=$(sed 's/^\(.\)/\U\1/' <<< "${accent}") 224 225 '' 226 + lib.optionalString (lib.elem "bat" themeList) ''
··· 1 let 2 validThemes = [ 3 + "alacritty" 4 "bat" 5 "bottom" 6 "btop" ··· 56 57 selectedSources = map (themeName: builtins.getAttr themeName sources) themeList; 58 sources = { 59 + alacritty = fetchFromGitHub { 60 + name = "alacritty"; 61 + owner = "catppuccin"; 62 + repo = "alacritty"; 63 + rev = "f6cb5a5c2b404cdaceaff193b9c52317f62c62f7"; 64 + hash = "sha256-H8bouVCS46h0DgQ+oYY8JitahQDj0V9p2cOoD4cQX+Q="; 65 + }; 66 + 67 bat = fetchFromGitHub { 68 name = "bat"; 69 owner = "catppuccin"; ··· 230 231 local capitalizedVariant=$(sed 's/^\(.\)/\U\1/' <<< "${variant}") 232 local capitalizedAccent=$(sed 's/^\(.\)/\U\1/' <<< "${accent}") 233 + 234 + '' 235 + + lib.optionalString (lib.elem "alacritty" themeList) '' 236 + mkdir -p "$out/alacritty" 237 + cp "${sources.alacritty}/catppuccin-${variant}.toml" "$out/alacritty/" 238 239 '' 240 + lib.optionalString (lib.elem "bat" themeList) ''
+4 -3
pkgs/by-name/ch/chatbox/package.nix
··· 2 lib, 3 appimageTools, 4 fetchurl, 5 }: 6 let 7 pname = "chatbox"; 8 - version = "1.11.3"; 9 10 src = fetchurl { 11 url = "https://download.chatboxai.app/releases/Chatbox-${version}-x86_64.AppImage"; 12 - hash = "sha256-YMZsgbCkG/cZd77DOVZj6jVJDzWMcrt92soxyLNr2QI="; 13 }; 14 15 appimageContents = appimageTools.extract { inherit pname version src; }; ··· 25 --replace-fail 'Icon=xyz.chatboxapp.app' 'Icon=chatbox' 26 ''; 27 28 - passthru.updateScript = '' 29 #!/usr/bin/env nix-shell 30 #!nix-shell -i bash -p curl gnugrep common-updater-scripts 31 version=$(curl -I -X GET https://chatboxai.app/install_chatbox/linux | grep -oP 'Chatbox-\K[0-9]+\.[0-9]+\.[0-9]+')
··· 2 lib, 3 appimageTools, 4 fetchurl, 5 + writeScript, 6 }: 7 let 8 pname = "chatbox"; 9 + version = "1.11.12"; 10 11 src = fetchurl { 12 url = "https://download.chatboxai.app/releases/Chatbox-${version}-x86_64.AppImage"; 13 + hash = "sha256-sfR4fXpREPdEoPoez5ugCpn/ubpB6EhkvpVq6rz0BSg="; 14 }; 15 16 appimageContents = appimageTools.extract { inherit pname version src; }; ··· 26 --replace-fail 'Icon=xyz.chatboxapp.app' 'Icon=chatbox' 27 ''; 28 29 + passthru.updateScript = writeScript "update-chatbox" '' 30 #!/usr/bin/env nix-shell 31 #!nix-shell -i bash -p curl gnugrep common-updater-scripts 32 version=$(curl -I -X GET https://chatboxai.app/install_chatbox/linux | grep -oP 'Chatbox-\K[0-9]+\.[0-9]+\.[0-9]+')
+72
pkgs/by-name/cl/clapper-enhancers/package.nix
···
··· 1 + { 2 + lib, 3 + stdenv, 4 + fetchFromGitHub, 5 + meson, 6 + ninja, 7 + pkg-config, 8 + gobject-introspection, 9 + libpeas2, 10 + json-glib, 11 + libsoup_3, 12 + glib, 13 + clapper-unwrapped, 14 + gst_all_1, 15 + python3Packages, 16 + }: 17 + 18 + stdenv.mkDerivation (finalAttrs: { 19 + pname = "clapper-enhancers"; 20 + version = "0.8.2"; 21 + 22 + src = fetchFromGitHub { 23 + owner = "Rafostar"; 24 + repo = "clapper-enhancers"; 25 + tag = finalAttrs.version; 26 + hash = "sha256-CUfTz+YvliJXiv/nsdrpDVFSblqoSW7js8vAtJ9u5vM="; 27 + }; 28 + 29 + nativeBuildInputs = [ 30 + meson 31 + ninja 32 + pkg-config 33 + gobject-introspection 34 + python3Packages.python 35 + python3Packages.wrapPython 36 + ]; 37 + 38 + buildInputs = [ 39 + libpeas2 40 + json-glib 41 + libsoup_3 42 + glib 43 + clapper-unwrapped 44 + gst_all_1.gstreamer 45 + gst_all_1.gst-plugins-base 46 + gst_all_1.gst-plugins-good 47 + gst_all_1.gst-plugins-bad 48 + gst_all_1.gst-plugins-ugly 49 + ]; 50 + 51 + pythonPath = with python3Packages; [ 52 + yt-dlp 53 + ]; 54 + 55 + postFixup = '' 56 + buildPythonPath "$out $pythonPath" 57 + for yt_plugin in $out/lib/clapper-enhancers/plugins/yt-dlp/*.py; do 58 + patchPythonScript $yt_plugin 59 + done 60 + ''; 61 + 62 + mesonFlags = [ "-Denhancersdir=${placeholder "out"}/${finalAttrs.passthru.pluginPath}" ]; 63 + 64 + passthru.pluginPath = "lib/clapper-enhancers/plugins"; 65 + 66 + meta = { 67 + description = "Plugins enhancing Clapper library capabilities"; 68 + homepage = "https://github.com/Rafostar/clapper-enhancers"; 69 + license = lib.licenses.lgpl21Only; 70 + inherit (clapper-unwrapped.meta) maintainers platforms; 71 + }; 72 + })
+86
pkgs/by-name/cl/clapper-unwrapped/package.nix
···
··· 1 + { 2 + lib, 3 + stdenv, 4 + fetchFromGitHub, 5 + gobject-introspection, 6 + pkg-config, 7 + ninja, 8 + desktop-file-utils, 9 + shared-mime-info, 10 + meson, 11 + gtk4, 12 + gst_all_1, 13 + libGL, 14 + libadwaita, 15 + libsoup_3, 16 + vala, 17 + cmake, 18 + libmicrodns, 19 + glib-networking, 20 + libpeas2, 21 + }: 22 + 23 + stdenv.mkDerivation (finalAttrs: { 24 + pname = "clapper-unwrapped"; 25 + version = "0.8.0"; 26 + 27 + outputs = [ 28 + "out" 29 + "lib" 30 + "dev" 31 + ]; 32 + 33 + src = fetchFromGitHub { 34 + owner = "Rafostar"; 35 + repo = "clapper"; 36 + tag = finalAttrs.version; 37 + hash = "sha256-Yb2fWsdd8jhxkGWKanLn7CAuF4MjyQ27XTrO8ja3hfs="; 38 + }; 39 + 40 + nativeBuildInputs = [ 41 + gobject-introspection 42 + meson 43 + cmake 44 + ninja 45 + pkg-config 46 + desktop-file-utils # for update-desktop-database 47 + shared-mime-info # for update-mime-database 48 + vala 49 + ]; 50 + 51 + buildInputs = [ 52 + gst_all_1.gstreamer 53 + gst_all_1.gst-plugins-base 54 + gst_all_1.gst-plugins-good 55 + gst_all_1.gst-plugins-bad 56 + gst_all_1.gst-plugins-ugly 57 + glib-networking # for TLS support 58 + gtk4 59 + libGL 60 + libadwaita 61 + libsoup_3 62 + libmicrodns 63 + libpeas2 64 + ]; 65 + 66 + postPatch = '' 67 + patchShebangs --build build-aux/meson/postinstall.py 68 + ''; 69 + 70 + preFixup = '' 71 + mkdir -p $out/share/gsettings-schemas 72 + cp -r $lib/share/gsettings-schemas/clapper-unwrapped-$version $out/share/gsettings-schemas/clapper-$version 73 + ''; 74 + 75 + meta = { 76 + description = "GNOME media player built using GTK4 toolkit and powered by GStreamer with OpenGL rendering"; 77 + longDescription = '' 78 + Clapper is a GNOME media player built using the GTK4 toolkit. 79 + The media player is using GStreamer as a media backend. 80 + ''; 81 + homepage = "https://github.com/Rafostar/clapper"; 82 + license = lib.licenses.gpl3Plus; 83 + maintainers = with lib.maintainers; [ aleksana ]; 84 + platforms = lib.platforms.linux; 85 + }; 86 + })
+23 -70
pkgs/by-name/cl/clapper/package.nix
··· 1 { 2 - lib, 3 - stdenv, 4 - fetchFromGitHub, 5 - gobject-introspection, 6 - pkg-config, 7 - ninja, 8 - desktop-file-utils, 9 - makeWrapper, 10 - shared-mime-info, 11 wrapGAppsHook4, 12 - meson, 13 - gtk4, 14 - gst_all_1, 15 - libGL, 16 - libadwaita, 17 - libsoup_3, 18 - vala, 19 - cmake, 20 - libmicrodns, 21 - gtuber, 22 - glib-networking, 23 - libpeas2, 24 }: 25 26 - stdenv.mkDerivation (finalAttrs: { 27 pname = "clapper"; 28 - version = "0.8.0"; 29 30 - src = fetchFromGitHub { 31 - owner = "Rafostar"; 32 - repo = "clapper"; 33 - tag = finalAttrs.version; 34 - hash = "sha256-Yb2fWsdd8jhxkGWKanLn7CAuF4MjyQ27XTrO8ja3hfs="; 35 - }; 36 37 nativeBuildInputs = [ 38 gobject-introspection 39 - meson 40 - cmake 41 - ninja 42 - makeWrapper 43 - pkg-config 44 - wrapGAppsHook4 # for gsettings 45 - desktop-file-utils # for update-desktop-database 46 - shared-mime-info # for update-mime-database 47 - vala 48 ]; 49 50 - buildInputs = [ 51 - gst_all_1.gstreamer 52 - gst_all_1.gst-plugins-base 53 - gst_all_1.gst-plugins-good 54 - gst_all_1.gst-plugins-bad 55 - gst_all_1.gst-plugins-ugly 56 - gtuber 57 - glib-networking # for TLS support 58 - gtk4 59 - libGL 60 - libadwaita 61 - libsoup_3 62 - libmicrodns 63 - libpeas2 64 - ]; 65 66 - postPatch = '' 67 - patchShebangs --build build-aux/meson/postinstall.py 68 ''; 69 70 - # The package uses "clappersink" provided by itself 71 preFixup = '' 72 gappsWrapperArgs+=( 73 - --prefix GST_PLUGIN_SYSTEM_PATH_1_0 : $out/lib/gstreamer-1.0 74 ) 75 ''; 76 - 77 - meta = { 78 - description = "GNOME media player built using GTK4 toolkit and powered by GStreamer with OpenGL rendering"; 79 - longDescription = '' 80 - Clapper is a GNOME media player built using the GTK4 toolkit. 81 - The media player is using GStreamer as a media backend. 82 - ''; 83 - homepage = "https://github.com/Rafostar/clapper"; 84 - license = lib.licenses.gpl3Plus; 85 - maintainers = with lib.maintainers; [ aleksana ]; 86 - platforms = lib.platforms.linux; 87 - }; 88 - })
··· 1 { 2 + stdenvNoCC, 3 + clapper-unwrapped, 4 wrapGAppsHook4, 5 + gobject-introspection, 6 + xorg, 7 + clapper-enhancers, 8 }: 9 10 + stdenvNoCC.mkDerivation { 11 pname = "clapper"; 12 + inherit (clapper-unwrapped) version meta; 13 14 + src = clapper-unwrapped; 15 + 16 + dontConfigure = true; 17 + dontBuild = true; 18 19 nativeBuildInputs = [ 20 + wrapGAppsHook4 21 gobject-introspection 22 + xorg.lndir 23 ]; 24 25 + buildInputs = [ clapper-unwrapped ] ++ clapper-unwrapped.buildInputs; 26 27 + installPhase = '' 28 + runHook preInstall 29 + 30 + mkdir -p $out 31 + lndir $src $out 32 + 33 + runHook postInstall 34 ''; 35 36 preFixup = '' 37 gappsWrapperArgs+=( 38 + --set-default CLAPPER_ENHANCERS_PATH "${clapper-enhancers}/${clapper-enhancers.passthru.pluginPath}" 39 ) 40 ''; 41 + }
+6 -8
pkgs/by-name/co/copier/package.nix
··· 7 8 python3.pkgs.buildPythonApplication rec { 9 pname = "copier"; 10 - version = "9.4.1"; 11 pyproject = true; 12 13 src = fetchFromGitHub { ··· 18 postFetch = '' 19 rm $out/tests/demo/doc/ma*ana.txt 20 ''; 21 - hash = "sha256-bNz3xFYksgdN9iXbkZIHWsHpRa9aICxBZmzy/t0+3z0="; 22 }; 23 24 POETRY_DYNAMIC_VERSIONING_BYPASS = version; ··· 49 questionary 50 ]; 51 52 - makeWrapperArgs = [ 53 - "--suffix PATH : ${lib.makeBinPath [ git ]}" 54 - ]; 55 56 - meta = with lib; { 57 description = "Library and command-line utility for rendering projects templates"; 58 homepage = "https://copier.readthedocs.io"; 59 changelog = "https://github.com/copier-org/copier/blob/v${version}/CHANGELOG.md"; 60 - license = licenses.mit; 61 - maintainers = with maintainers; [ greg ]; 62 mainProgram = "copier"; 63 }; 64 }
··· 7 8 python3.pkgs.buildPythonApplication rec { 9 pname = "copier"; 10 + version = "9.6.0"; 11 pyproject = true; 12 13 src = fetchFromGitHub { ··· 18 postFetch = '' 19 rm $out/tests/demo/doc/ma*ana.txt 20 ''; 21 + hash = "sha256-mezmXrOvfqbZGZadNZklQZt/OEKqRYnwugNkZc88t6o="; 22 }; 23 24 POETRY_DYNAMIC_VERSIONING_BYPASS = version; ··· 49 questionary 50 ]; 51 52 + makeWrapperArgs = [ "--suffix PATH : ${lib.makeBinPath [ git ]}" ]; 53 54 + meta = { 55 description = "Library and command-line utility for rendering projects templates"; 56 homepage = "https://copier.readthedocs.io"; 57 changelog = "https://github.com/copier-org/copier/blob/v${version}/CHANGELOG.md"; 58 + license = lib.licenses.mit; 59 + maintainers = with lib.maintainers; [ greg ]; 60 mainProgram = "copier"; 61 }; 62 }
+1 -1
pkgs/by-name/ed/eduli/package.nix
··· 11 src = fetchzip { 12 name = "${pname}-${version}"; 13 url = "https://language.moe.gov.tw/001/Upload/Files/site_content/M0001/MoeLI-3.0.zip"; 14 - hash = "sha256-/X5jPtjOvJTUbiKV5r2dWIFL5ft0iemdwj+Zt+Q13OA="; 15 }; 16 17 installPhase = ''
··· 11 src = fetchzip { 12 name = "${pname}-${version}"; 13 url = "https://language.moe.gov.tw/001/Upload/Files/site_content/M0001/MoeLI-3.0.zip"; 14 + hash = "sha256-bDQtLugYPWwJJNusBLEJrgIVufocRK4NIR0CCGaTkyw="; 15 }; 16 17 installPhase = ''
+18
pkgs/by-name/ex/excalidraw_export/bump-nan.patch
···
··· 1 + diff --git a/package-lock.json b/package-lock.json 2 + index 89dd54f..2f364e4 100644 3 + --- a/package-lock.json 4 + +++ b/package-lock.json 5 + @@ -805,9 +805,10 @@ 6 + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" 7 + }, 8 + "node_modules/nan": { 9 + - "version": "2.18.0", 10 + - "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", 11 + - "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==" 12 + + "version": "2.22.0", 13 + + "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz", 14 + + "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==", 15 + + "license": "MIT" 16 + }, 17 + "node_modules/node-fetch": { 18 + "version": "2.7.0",
+7 -3
pkgs/by-name/ex/excalidraw_export/package.nix
··· 5 cairo, 6 pango, 7 pkg-config, 8 - stdenv, 9 }: 10 11 buildNpmPackage { ··· 19 hash = "sha256-E5kYI8+hzObd2WNVBd0aQDKMH1Sns539loCQfClJs1Q="; 20 }; 21 22 - npmDepsHash = "sha256-5yec7BCi1c/e+y00TqxIeoazs49+WdKdfsskAqnVkFs="; 23 24 npmBuildScript = "compile"; 25 ··· 35 license = lib.licenses.mit; 36 maintainers = with lib.maintainers; [ venikx ]; 37 mainProgram = "excalidraw_export"; 38 - broken = stdenv.hostPlatform.isDarwin; 39 }; 40 }
··· 5 cairo, 6 pango, 7 pkg-config, 8 }: 9 10 buildNpmPackage { ··· 18 hash = "sha256-E5kYI8+hzObd2WNVBd0aQDKMH1Sns539loCQfClJs1Q="; 19 }; 20 21 + patches = [ 22 + # for compatibility with nodejs_22 23 + # generated by running: `npm update nan` 24 + ./bump-nan.patch 25 + ]; 26 + 27 + npmDepsHash = "sha256-bU+q83Pmw6+mCF5rTR0DyYstUYVN+YxH9T9H7ZA15so="; 28 29 npmBuildScript = "compile"; 30 ··· 40 license = lib.licenses.mit; 41 maintainers = with lib.maintainers; [ venikx ]; 42 mainProgram = "excalidraw_export"; 43 }; 44 }
+3 -3
pkgs/by-name/ex/exportarr/package.nix
··· 7 8 buildGoModule rec { 9 pname = "exportarr"; 10 - version = "2.1.0"; 11 12 src = fetchFromGitHub { 13 owner = "onedr0p"; 14 repo = "exportarr"; 15 rev = "v${version}"; 16 - hash = "sha256-iNcdD2zigBZ2phtCkHRV0GZPrEjopV0K+vin1Zkb3Oo="; 17 }; 18 19 - vendorHash = "sha256-F5WOSsjeOzMaj4Yze11OnqpgL1l87wpgl3OQEGya//M="; 20 21 subPackages = [ "cmd/exportarr" ]; 22
··· 7 8 buildGoModule rec { 9 pname = "exportarr"; 10 + version = "2.2.0"; 11 12 src = fetchFromGitHub { 13 owner = "onedr0p"; 14 repo = "exportarr"; 15 rev = "v${version}"; 16 + hash = "sha256-8EHFnqQ3h9/X2lR+9UuuJeSPMonuVQVDYiMDoroDajs="; 17 }; 18 19 + vendorHash = "sha256-yzzhlhrfzj+qlG4wY+qGM0/sTUUlVQAgwiKNUEIVN0g="; 20 21 subPackages = [ "cmd/exportarr" ]; 22
+26
pkgs/by-name/fi/fim/native-tools.patch
···
··· 1 + diff --git a/doc/Makefile.in b/doc/Makefile.in 2 + index be8ebe5..13ae676 100644 3 + --- a/doc/Makefile.in 4 + +++ b/doc/Makefile.in 5 + @@ -684,7 +684,7 @@ fimgs.man.html: fimgs.man 6 + if test ! -s $@ ; then $(MAN2TXT) ./$< > $<.txt && $(srcdir)/vim2html.pl tags $<.txt && mv $<.txt.html $@ ; fi 7 + 8 + doctags$(EXEEXT): doctags.c 9 + - $(CC) -o $@ $< 10 + + @cc_for_build@ -o $@ $< 11 + 12 + #clean: 13 + # $(RM) -f doctags.o doctags$(EXEEXT) FIM.html tags fim-stylesheet.css fim.man.html fimgs.man.html 14 + diff --git a/src/Makefile.in b/src/Makefile.in 15 + index 76f5e1e..027159c 100644 16 + --- a/src/Makefile.in 17 + +++ b/src/Makefile.in 18 + @@ -1132,7 +1132,7 @@ help-acm.cpp: fim.h 19 + $(ECHO) '//#endif' >> $@ 20 + 21 + b2ba$(EXEEXT): b2ba.c 22 + - $(CC) -o $@ $< 23 + + @cc_for_build@ -o $@ $< 24 + 25 + @FIM_WANT_CUSTOM_HARDCODED_CONSOLEFONT_TRUE@@HAVE_RUNNABLE_TESTS_TRUE@default_font_byte_array.h: $(FIM_CUSTOM_HARDCODED_CONSOLEFONT) b2ba$(EXEEXT) 26 + @FIM_WANT_CUSTOM_HARDCODED_CONSOLEFONT_TRUE@@HAVE_RUNNABLE_TESTS_TRUE@ ./b2ba$(EXEEXT) < $< > $@
+27 -5
pkgs/by-name/fi/fim/package.nix
··· 4 autoconf, 5 automake, 6 pkg-config, 7 lib, 8 perl, 9 flex, 10 bison, 11 readline, 12 libexif, 13 - x11Support ? true, 14 SDL, 15 svgSupport ? true, 16 inkscape, ··· 35 sha256 = "sha256-/p7bjeZM46DJOQ9sgtebhkNpBPj2RJYY3dMXhzHnNmg="; 36 }; 37 38 postPatch = '' 39 - substituteInPlace doc/vim2html.pl \ 40 - --replace /usr/bin/perl ${perl}/bin/perl 41 ''; 42 43 nativeBuildInputs = [ 44 autoconf 45 automake 46 pkg-config 47 ]; 48 49 buildInputs = 50 [ 51 - perl 52 flex 53 - bison 54 readline 55 libexif 56 ] 57 ++ lib.optional x11Support SDL 58 ++ lib.optional svgSupport inkscape ··· 62 ++ lib.optional jpegSupport libjpeg 63 ++ lib.optional pngSupport libpng; 64 65 env.NIX_CFLAGS_COMPILE = lib.optionalString x11Support "-lSDL"; 66 67 meta = with lib; {
··· 4 autoconf, 5 automake, 6 pkg-config, 7 + substituteAll, 8 lib, 9 perl, 10 flex, 11 bison, 12 readline, 13 libexif, 14 + bash, 15 + buildPackages, 16 + # SDL depends on Qt, which doesn't cross-compile 17 + x11Support ? (stdenv.buildPlatform.canExecute stdenv.hostPlatform), 18 SDL, 19 svgSupport ? true, 20 inkscape, ··· 39 sha256 = "sha256-/p7bjeZM46DJOQ9sgtebhkNpBPj2RJYY3dMXhzHnNmg="; 40 }; 41 42 + patches = [ 43 + # build tools with a build compiler 44 + (substituteAll { 45 + src = ./native-tools.patch; 46 + cc_for_build = lib.getExe buildPackages.stdenv.cc; 47 + }) 48 + ]; 49 + 50 postPatch = '' 51 + patchShebangs --build doc/vim2html.pl 52 ''; 53 54 nativeBuildInputs = [ 55 autoconf 56 automake 57 + bison 58 + flex 59 + perl 60 pkg-config 61 ]; 62 63 buildInputs = 64 [ 65 flex 66 readline 67 libexif 68 + bash 69 ] 70 ++ lib.optional x11Support SDL 71 ++ lib.optional svgSupport inkscape ··· 75 ++ lib.optional jpegSupport libjpeg 76 ++ lib.optional pngSupport libpng; 77 78 + configureFlags = [ 79 + # mmap works on all relevant platforms 80 + "ac_cv_func_mmap_fixed_mapped=yes" 81 + # system regexp works on all relevant platforms 82 + "fim_cv_regex_broken=no" 83 + ]; 84 + 85 + env.LIBAA_CONFIG = lib.getExe' (lib.getDev aalib) "aalib-config"; 86 + env.LIBPNG_CONFIG = lib.getExe' (lib.getDev libpng) "libpng-config"; 87 env.NIX_CFLAGS_COMPILE = lib.optionalString x11Support "-lSDL"; 88 89 meta = with lib; {
+3 -3
pkgs/by-name/fq/fq/package.nix
··· 8 9 buildGoModule rec { 10 pname = "fq"; 11 - version = "0.14.0"; 12 13 src = fetchFromGitHub { 14 owner = "wader"; 15 repo = "fq"; 16 rev = "v${version}"; 17 - hash = "sha256-I3mVAPNWjRHG0td1ulzGOthiNybfWLx1HgwPjFfBHCo="; 18 }; 19 20 - vendorHash = "sha256-p2cvv983gYTvyLPYIGVsk6N7yUzBpiPzgJ3sMRNWPTo="; 21 22 ldflags = [ 23 "-s"
··· 8 9 buildGoModule rec { 10 pname = "fq"; 11 + version = "0.15.0"; 12 13 src = fetchFromGitHub { 14 owner = "wader"; 15 repo = "fq"; 16 rev = "v${version}"; 17 + hash = "sha256-/yRTeWyHVZZzWbkI/VhYD/T5KZq8E2j678LE8gn5ILE="; 18 }; 19 20 + vendorHash = "sha256-vp7JQSnzijCkWximw4E2abd39l0qjIGkl+mPfEoli0w="; 21 22 ldflags = [ 23 "-s"
+121
pkgs/by-name/fs/fstar/package.nix
···
··· 1 + { 2 + callPackage, 3 + fetchFromGitHub, 4 + installShellFiles, 5 + lib, 6 + makeWrapper, 7 + nix-update-script, 8 + ocaml-ng, 9 + removeReferencesTo, 10 + util-linux, 11 + which, 12 + }: 13 + 14 + let 15 + # The version of ocaml fstar uses. 16 + ocamlPackages = ocaml-ng.ocamlPackages_4_14; 17 + 18 + fstarZ3 = callPackage ./z3 { }; 19 + in 20 + ocamlPackages.buildDunePackage rec { 21 + pname = "fstar"; 22 + version = "2025.03.25"; 23 + 24 + src = fetchFromGitHub { 25 + owner = "FStarLang"; 26 + repo = "FStar"; 27 + rev = "v${version}"; 28 + hash = "sha256-PhjfThXF6fJlFHtNEURG4igCnM6VegWODypmRvnZPdA="; 29 + }; 30 + 31 + duneVersion = "3"; 32 + 33 + nativeBuildInputs = [ 34 + ocamlPackages.menhir 35 + which 36 + util-linux 37 + installShellFiles 38 + makeWrapper 39 + removeReferencesTo 40 + ]; 41 + 42 + prePatch = '' 43 + patchShebangs .scripts/*.sh 44 + patchShebangs ulib/ml/app/ints/mk_int_file.sh 45 + ''; 46 + 47 + buildInputs = with ocamlPackages; [ 48 + batteries 49 + menhir 50 + menhirLib 51 + pprint 52 + ppx_deriving 53 + ppx_deriving_yojson 54 + ppxlib 55 + process 56 + sedlex 57 + stdint 58 + yojson 59 + zarith 60 + memtrace 61 + mtime 62 + ]; 63 + 64 + preConfigure = '' 65 + mkdir -p cache 66 + export DUNE_CACHE_ROOT="$(pwd)/cache" 67 + export PATH="${lib.makeBinPath [ fstarZ3 ]}''${PATH:+:}$PATH" 68 + export PREFIX="$out" 69 + ''; 70 + 71 + buildPhase = '' 72 + runHook preBuild 73 + make -j$NIX_BUILD_CORES 74 + runHook postBuild 75 + ''; 76 + 77 + installPhase = '' 78 + runHook preInstall 79 + 80 + make install 81 + 82 + remove-references-to -t '${ocamlPackages.ocaml}' $out/bin/fstar.exe 83 + 84 + for binary in $out/bin/*; do 85 + wrapProgram "$binary" --prefix PATH : "${lib.makeBinPath [ fstarZ3 ]}" 86 + done 87 + 88 + src="$(pwd)" 89 + cd $out 90 + installShellCompletion --bash $src/.completion/bash/fstar.exe.bash 91 + installShellCompletion --fish $src/.completion/fish/fstar.exe.fish 92 + installShellCompletion --zsh --name _fstar.exe $src/.completion/zsh/__fstar.exe 93 + cd $src 94 + 95 + runHook postInstall 96 + ''; 97 + 98 + enableParallelBuilding = true; 99 + 100 + passthru = { 101 + updateScript = nix-update-script { 102 + extraArgs = [ 103 + "--version-regex" 104 + "v(\d{4}\.\d{2}\.\d{2})$" 105 + ]; 106 + }; 107 + z3 = fstarZ3; 108 + }; 109 + 110 + meta = with lib; { 111 + description = "ML-like functional programming language aimed at program verification"; 112 + homepage = "https://www.fstar-lang.org"; 113 + changelog = "https://github.com/FStarLang/FStar/raw/v${version}/CHANGES.md"; 114 + license = licenses.asl20; 115 + maintainers = with maintainers; [ 116 + numinit 117 + ]; 118 + mainProgram = "fstar.exe"; 119 + platforms = with platforms; darwin ++ linux; 120 + }; 121 + }
+26
pkgs/by-name/fs/fstar/z3/4-8-5-typos.diff
···
··· 1 + diff --git a/src/util/lp/lp_core_solver_base.h b/src/util/lp/lp_core_solver_base.h 2 + index 4c17df2..4c3c311 100644 3 + --- a/src/util/lp/lp_core_solver_base.h 4 + +++ b/src/util/lp/lp_core_solver_base.h 5 + @@ -600,8 +600,6 @@ public: 6 + out << " \n"; 7 + } 8 + 9 + - bool column_is_free(unsigned j) const { return this->m_column_type[j] == free; } 10 + - 11 + bool column_has_upper_bound(unsigned j) const { 12 + switch(m_column_types[j]) { 13 + case column_type::free_column: 14 + diff --git a/src/util/lp/static_matrix_def.h b/src/util/lp/static_matrix_def.h 15 + index 7949573..2f1cb42 100644 16 + --- a/src/util/lp/static_matrix_def.h 17 + +++ b/src/util/lp/static_matrix_def.h 18 + @@ -86,7 +86,7 @@ static_matrix<T, X>::static_matrix(static_matrix const &A, unsigned * /* basis * 19 + init_row_columns(m, m); 20 + while (m--) { 21 + for (auto & col : A.m_columns[m]){ 22 + - set(col.var(), m, A.get_value_of_column_cell(col)); 23 + + set(col.var(), m, A.get_column_cell(col)); 24 + } 25 + } 26 + }
+106
pkgs/by-name/fs/fstar/z3/default.nix
···
··· 1 + { 2 + fetchFromGitHub, 3 + fetchpatch, 4 + lib, 5 + replaceVars, 6 + stdenvNoCC, 7 + z3, 8 + }: 9 + 10 + let 11 + # fstar has a pretty hard dependency on certain z3 patch versions. 12 + # https://github.com/FStarLang/FStar/issues/3689#issuecomment-2625073641 13 + # We need to package all the Z3 versions it prefers here. 14 + fstarNewZ3Version = "4.13.3"; 15 + fstarNewZ3 = 16 + if z3.version == fstarNewZ3Version then 17 + z3 18 + else 19 + z3.overrideAttrs (final: rec { 20 + version = fstarNewZ3Version; 21 + src = fetchFromGitHub { 22 + owner = "Z3Prover"; 23 + repo = "z3"; 24 + rev = "z3-${version}"; 25 + hash = "sha256-odwalnF00SI+sJGHdIIv4KapFcfVVKiQ22HFhXYtSvA="; 26 + }; 27 + }); 28 + 29 + fstarOldZ3Version = "4.8.5"; 30 + fstarOldZ3 = 31 + if z3.version == fstarOldZ3Version then 32 + z3 33 + else 34 + z3.overrideAttrs (prev: rec { 35 + version = fstarOldZ3Version; 36 + src = fetchFromGitHub { 37 + owner = "Z3Prover"; 38 + repo = "z3"; 39 + rev = "Z3-${version}"; # caps matter 40 + hash = "sha256-ytG5O9HczbIVJAiIGZfUXC/MuYH7d7yLApaeTRlKXoc="; 41 + }; 42 + patches = 43 + let 44 + static-matrix-patch = fetchpatch { 45 + # clang / gcc fixes. fixes typos in some member names 46 + name = "gcc-15-fixes.patch"; 47 + url = "https://github.com/Z3Prover/z3/commit/2ce89e5f491fa817d02d8fdce8c62798beab258b.patch"; 48 + includes = [ "src/@dir@/lp/static_matrix.h" ]; 49 + stripLen = 3; 50 + extraPrefix = "src/@dir@/"; 51 + hash = "sha256-+H1/VJPyI0yq4M/61ay8SRCa6OaoJ/5i+I3zVTAPUVo="; 52 + }; 53 + 54 + # replace @dir@ in the path of the given list of patches 55 + fixupPatches = dir: map (patch: replaceVars patch { dir = dir; }); 56 + in 57 + prev.patches or [ ] 58 + ++ fixupPatches "util" [ 59 + ./lower-bound-typo.diff 60 + static-matrix-patch 61 + ./tail-matrix.diff 62 + ] 63 + ++ [ 64 + ./4-8-5-typos.diff 65 + ]; 66 + 67 + postPatch = 68 + let 69 + python = lib.findFirst (pkg: lib.hasPrefix "python" pkg.pname) null prev.nativeBuildInputs; 70 + in 71 + 72 + assert python != null; 73 + 74 + prev.postPatch or "" 75 + + 76 + lib.optionalString 77 + ((lib.versionAtLeast python.version "3.12") && (lib.versionOlder version "4.8.14")) 78 + '' 79 + # See https://github.com/Z3Prover/z3/pull/5729. This is a specialization of this patch for 4.8.5. 80 + for file in scripts/mk_util.py src/api/python/CMakeLists.txt; do 81 + substituteInPlace "$file" \ 82 + --replace-fail "distutils.sysconfig.get_python_lib()" "sysconfig.get_path('purelib')" \ 83 + --replace-fail "distutils.sysconfig" "sysconfig" 84 + done 85 + ''; 86 + 87 + }); 88 + in 89 + stdenvNoCC.mkDerivation { 90 + name = "fstar-z3"; 91 + dontUnpack = true; 92 + 93 + installPhase = '' 94 + mkdir -p $out/bin 95 + ln -s ${lib.getExe fstarNewZ3} $out/bin/z3-${lib.escapeShellArg fstarNewZ3.version} 96 + ln -s ${lib.getExe fstarOldZ3} $out/bin/z3-${lib.escapeShellArg fstarOldZ3.version} 97 + ''; 98 + 99 + passthru = rec { 100 + new = fstarNewZ3; 101 + "z3_${lib.replaceStrings [ "." ] [ "_" ] fstarNewZ3.version}" = new; 102 + 103 + old = fstarOldZ3; 104 + "z3_${lib.replaceStrings [ "." ] [ "_" ] fstarOldZ3.version}" = old; 105 + }; 106 + }
+13
pkgs/by-name/fs/fstar/z3/lower-bound-typo.diff
···
··· 1 + diff --git a/src/@dir@/lp/column_info.h b/src/@dir@/lp/column_info.h 2 + index 1dc0c60..9cbeea6 100644 3 + --- a/src/@dir@/lp/column_info.h 4 + +++ b/src/@dir@/lp/column_info.h 5 + @@ -47,7 +47,7 @@ public: 6 + m_lower_bound_is_strict == c.m_lower_bound_is_strict && 7 + m_upper_bound_is_set == c.m_upper_bound_is_set&& 8 + m_upper_bound_is_strict == c.m_upper_bound_is_strict&& 9 + - (!m_lower_bound_is_set || m_lower_bound == c.m_low_bound) && 10 + + (!m_lower_bound_is_set || m_lower_bound == c.m_lower_bound) && 11 + (!m_upper_bound_is_set || m_upper_bound == c.m_upper_bound) && 12 + m_cost == c.m_cost && 13 + m_is_fixed == c.m_is_fixed &&
+12
pkgs/by-name/fs/fstar/z3/tail-matrix.diff
···
··· 1 + diff --git a/src/@dir@/lp/tail_matrix.h b/src/@dir@/lp/tail_matrix.h 2 + index 2047e8c..c84340e 100644 3 + --- a/src/@dir@/lp/tail_matrix.h 4 + +++ b/src/@dir@/lp/tail_matrix.h 5 + @@ -43,7 +43,6 @@ public: 6 + const tail_matrix & m_A; 7 + unsigned m_row; 8 + ref_row(const tail_matrix& m, unsigned row): m_A(m), m_row(row) {} 9 + - T operator[](unsigned j) const { return m_A.get_elem(m_row, j);} 10 + }; 11 + ref_row operator[](unsigned i) const { return ref_row(*this, i);} 12 + };
+2 -2
pkgs/by-name/ga/gapless/package.nix
··· 16 }: 17 stdenv.mkDerivation (finalAttrs: { 18 pname = "gapless"; 19 - version = "4.3.1"; 20 21 src = fetchFromGitLab { 22 domain = "gitlab.gnome.org"; 23 owner = "neithern"; 24 repo = "g4music"; 25 rev = "v${finalAttrs.version}"; 26 - hash = "sha256-zLjpYGZR/o76onSjwfpHEx26vd8rUa/1PXL+DuC8C2o="; 27 }; 28 29 nativeBuildInputs = [
··· 16 }: 17 stdenv.mkDerivation (finalAttrs: { 18 pname = "gapless"; 19 + version = "4.4"; 20 21 src = fetchFromGitLab { 22 domain = "gitlab.gnome.org"; 23 owner = "neithern"; 24 repo = "g4music"; 25 rev = "v${finalAttrs.version}"; 26 + hash = "sha256-10AFaUmqVkL4q8xgewfosN2/SziNhat9p6x/+9mBdyU="; 27 }; 28 29 nativeBuildInputs = [
+8 -4
pkgs/by-name/ge/gearlever/package.nix
··· 11 desktop-file-utils, 12 libadwaita, 13 file, 14 - p7zip, 15 which, 16 appimage-run, 17 gtk4, 18 bintools, 19 libnotify, 20 }: 21 22 python3Packages.buildPythonApplication rec { 23 pname = "gearlever"; 24 - version = "3.0.2"; 25 pyproject = false; # Built with meson 26 27 src = fetchFromGitHub { 28 owner = "mijorus"; 29 repo = "gearlever"; 30 tag = version; 31 - hash = "sha256-b5CH7qJKsXwARCIv4EiwGcc3nH/GKGkb8gwxPWYAr4s="; 32 }; 33 34 postPatch = ··· 76 "--prefix PATH : ${ 77 lib.makeBinPath [ 78 file 79 - p7zip 80 which 81 appimage-run 82 desktop-file-utils # update-desktop-database 83 gtk4.dev # gtk4-launch 84 bintools # readelf 85 libnotify # notify-send 86 ] 87 }" 88 ];
··· 11 desktop-file-utils, 12 libadwaita, 13 file, 14 + _7zz, 15 which, 16 appimage-run, 17 gtk4, 18 bintools, 19 libnotify, 20 + dwarfs, 21 + squashfsTools, 22 }: 23 24 python3Packages.buildPythonApplication rec { 25 pname = "gearlever"; 26 + version = "3.2.2"; 27 pyproject = false; # Built with meson 28 29 src = fetchFromGitHub { 30 owner = "mijorus"; 31 repo = "gearlever"; 32 tag = version; 33 + hash = "sha256-8gvulVq3RQZ/v7DCJ1Azrs23WMEznJCaalyjqD6iCU8="; 34 }; 35 36 postPatch = ··· 78 "--prefix PATH : ${ 79 lib.makeBinPath [ 80 file 81 + _7zz # 7zz 82 which 83 appimage-run 84 desktop-file-utils # update-desktop-database 85 gtk4.dev # gtk4-launch 86 bintools # readelf 87 libnotify # notify-send 88 + dwarfs # dwarfsextract, dwarfsck 89 + squashfsTools # unsquashfs 90 ] 91 }" 92 ];
+3 -3
pkgs/by-name/go/golangci-lint-langserver/package.nix
··· 8 9 buildGoModule rec { 10 pname = "golangci-lint-langserver"; 11 - version = "0.0.10"; 12 13 src = fetchFromGitHub { 14 owner = "nametake"; 15 repo = "golangci-lint-langserver"; 16 tag = "v${version}"; 17 - hash = "sha256-wNofr/s8K+vbvNZWrQ97g2V0fNAS2P/Zf7tsOmly+gc="; 18 }; 19 20 - vendorHash = "sha256-SsGw26y/ZIBFp9dBk55ebQgJiLWOFRNe21h6huYE84I="; 21 22 subPackages = [ "." ]; 23
··· 8 9 buildGoModule rec { 10 pname = "golangci-lint-langserver"; 11 + version = "0.0.11"; 12 13 src = fetchFromGitHub { 14 owner = "nametake"; 15 repo = "golangci-lint-langserver"; 16 tag = "v${version}"; 17 + hash = "sha256-mwYhOUH5PAbPRfP86dw9w6lIZYz/iL+f863XWOhBFy0="; 18 }; 19 20 + vendorHash = "sha256-kbGTORTTxfftdU8ffsfh53nT7wZldOnBZ/1WWzN89Uc="; 21 22 subPackages = [ "." ]; 23
+1 -1
pkgs/by-name/he/hedgedoc/package.nix
··· 56 makeBinaryWrapper 57 (python3.withPackages (ps: with ps; [ setuptools ])) # required to build sqlite3 bindings 58 yarn 59 ]; 60 61 buildInputs = [ ··· 67 buildPhase = '' 68 runHook preBuild 69 70 - export HOME=$(mktemp -d) 71 yarn config set enableTelemetry 0 72 yarn config set cacheFolder ${offlineCache} 73 export npm_config_nodedir=${nodejs} # prevent node-gyp from downloading headers
··· 56 makeBinaryWrapper 57 (python3.withPackages (ps: with ps; [ setuptools ])) # required to build sqlite3 bindings 58 yarn 59 + writableTmpDirAsHomeHook # A writable home directory is required for yarn 60 ]; 61 62 buildInputs = [ ··· 68 buildPhase = '' 69 runHook preBuild 70 71 yarn config set enableTelemetry 0 72 yarn config set cacheFolder ${offlineCache} 73 export npm_config_nodedir=${nodejs} # prevent node-gyp from downloading headers
+24
pkgs/by-name/ho/honeymarker/package.nix
···
··· 1 + { 2 + lib, 3 + buildGoModule, 4 + fetchFromGitHub, 5 + }: 6 + buildGoModule rec { 7 + pname = "honeymarker"; 8 + version = "0.2.1"; 9 + vendorHash = "sha256-ZuDobjC/nizZ7G0o/zVTQmDfDjcdBhfPcmkhgwFc7VU="; 10 + 11 + src = fetchFromGitHub { 12 + owner = "honeycombio"; 13 + repo = "honeymarker"; 14 + rev = "v${version}"; 15 + hash = "sha256-tiwX94CRvXnUYpiux94XhOj2abn1Uc+wjcDOmw79ab4="; 16 + }; 17 + 18 + meta = with lib; { 19 + description = "provides a simple CRUD interface for dealing with per-dataset markers on honeycomb.io"; 20 + homepage = "https://honeycomb.io/"; 21 + license = licenses.asl20; 22 + maintainers = [ maintainers.iand675 ]; 23 + }; 24 + }
+24
pkgs/by-name/ho/honeytail/package.nix
···
··· 1 + { 2 + lib, 3 + buildGoModule, 4 + fetchFromGitHub, 5 + }: 6 + buildGoModule rec { 7 + pname = "honeytail"; 8 + version = "1.6.0"; 9 + vendorHash = "sha256-LtiiLGLjhbfT49A6Fw5CbSbnmTHMxtcUssr+ayCVrvY="; 10 + 11 + src = fetchFromGitHub { 12 + owner = "honeycombio"; 13 + repo = "honeytail"; 14 + rev = "v${version}"; 15 + hash = "sha256-S0hIgNNzF1eNe+XJs+PT7EUIl5oJCXu+B/zQago4sf8="; 16 + }; 17 + 18 + meta = with lib; { 19 + description = "agent for ingesting log file data into honeycomb.io and making it available for exploration"; 20 + homepage = "https://honeycomb.io/"; 21 + license = licenses.asl20; 22 + maintainers = [ maintainers.iand675 ]; 23 + }; 24 + }
+24
pkgs/by-name/ho/honeyvent/package.nix
···
··· 1 + { 2 + lib, 3 + buildGoModule, 4 + fetchFromGitHub, 5 + }: 6 + buildGoModule rec { 7 + pname = "honeyvent"; 8 + version = "1.1.0"; 9 + vendorHash = null; 10 + 11 + src = fetchFromGitHub { 12 + owner = "honeycombio"; 13 + repo = "honeyvent"; 14 + rev = "v${version}"; 15 + hash = "sha256-yFQEOshjaH6fRCQ7IZChANI9guZlTXk35p1NzQvxUdI="; 16 + }; 17 + 18 + meta = with lib; { 19 + description = "CLI for sending individual events to honeycomb.io"; 20 + homepage = "https://honeycomb.io/"; 21 + license = licenses.asl20; 22 + maintainers = [ maintainers.iand675 ]; 23 + }; 24 + }
+3 -3
pkgs/by-name/is/isabelle/components/isabelle-linter.nix
··· 7 8 stdenv.mkDerivation rec { 9 pname = "isabelle-linter"; 10 - version = "2024-1.0.1"; 11 12 src = fetchFromGitHub { 13 owner = "isabelle-prover"; 14 repo = "isabelle-linter"; 15 - rev = "Isabelle2024-v1.0.1"; 16 - hash = "sha256-oTrwcfJgbkpkIweDIyc6lZjAvdS9J4agPoJgZzH+PuQ="; 17 }; 18 19 nativeBuildInputs = [ isabelle ];
··· 7 8 stdenv.mkDerivation rec { 9 pname = "isabelle-linter"; 10 + version = "2025-1.0.0"; 11 12 src = fetchFromGitHub { 13 owner = "isabelle-prover"; 14 repo = "isabelle-linter"; 15 + rev = "Isabelle2025-v1.0.0"; 16 + hash = "sha256-cH9EoIbKa6cqLjw83gnYvCy+Dq0d5fFmJCabdPrRJeI="; 17 }; 18 19 nativeBuildInputs = [ isabelle ];
+17 -15
pkgs/by-name/is/isabelle/package.nix
··· 2 lib, 3 stdenv, 4 fetchurl, 5 coreutils, 6 nettools, 7 java, ··· 10 veriT, 11 vampire, 12 eprover-ho, 13 - naproche, 14 rlwrap, 15 perl, 16 procps, ··· 21 }: 22 23 let 24 sha1 = stdenv.mkDerivation { 25 pname = "isabelle-sha1"; 26 version = "2024"; ··· 46 in 47 stdenv.mkDerivation (finalAttrs: rec { 48 pname = "isabelle"; 49 - version = "2024"; 50 51 dirname = "Isabelle${version}"; 52 ··· 54 if stdenv.hostPlatform.isDarwin then 55 fetchurl { 56 url = "https://isabelle.in.tum.de/website-${dirname}/dist/${dirname}_macos.tar.gz"; 57 - hash = "sha256-IgNfmW9x6h8DBj9vFEGV62oEl01NkW7QdyzXlWmii8c="; 58 } 59 else if stdenv.hostPlatform.isx86 then 60 fetchurl { 61 url = "https://isabelle.in.tum.de/website-${dirname}/dist/${dirname}_linux.tar.gz"; 62 - hash = "sha256-YDqq+KvqNll687BlHSwWKobAoN1EIHZvR+VyQDljkmc="; 63 } 64 else 65 fetchurl { 66 url = "https://isabelle.in.tum.de/website-${dirname}/dist/${dirname}_linux_arm.tar.gz"; 67 - hash = "sha256-jXWVv18WwrVnqVX1s4Lnyf7DkOzPa3EdLXYxgtKD+YA="; 68 }; 69 70 nativeBuildInputs = [ java ]; ··· 72 buildInputs = [ 73 polyml 74 veriT 75 - vampire 76 eprover-ho 77 nettools 78 ]; ··· 103 EOF 104 105 cat >contrib/vampire-*/etc/settings <<EOF 106 - VAMPIRE_HOME=${vampire}/bin 107 - VAMPIRE_VERSION=${vampire.version} 108 VAMPIRE_EXTRA_OPTIONS="--mode casc" 109 EOF 110 ··· 122 ISABELLE_JAVA_PLATFORM=${stdenv.system} 123 ISABELLE_JDK_HOME=${java} 124 EOF 125 - 126 - '' 127 - + lib.optionalString stdenv.hostPlatform.isx86 '' 128 - rm contrib/naproche-*/x86*/Naproche-SAD 129 - ln -s ${naproche}/bin/Naproche-SAD contrib/naproche-*/x86*/ 130 - '' 131 - + '' 132 133 echo ISABELLE_LINE_EDITOR=${rlwrap}/bin/rlwrap >>etc/settings 134
··· 2 lib, 3 stdenv, 4 fetchurl, 5 + fetchFromGitHub, 6 coreutils, 7 nettools, 8 java, ··· 11 veriT, 12 vampire, 13 eprover-ho, 14 rlwrap, 15 perl, 16 procps, ··· 21 }: 22 23 let 24 + vampire' = vampire.overrideAttrs (_: { 25 + src = fetchFromGitHub { 26 + owner = "vprover"; 27 + repo = "vampire"; 28 + tag = "v4.8HO4Sledgahammer"; 29 + hash = "sha256-CmppaGa4M9tkE1b25cY1LSPFygJy5yV4kpHKbPqvcVE="; 30 + }; 31 + }); 32 + 33 sha1 = stdenv.mkDerivation { 34 pname = "isabelle-sha1"; 35 version = "2024"; ··· 55 in 56 stdenv.mkDerivation (finalAttrs: rec { 57 pname = "isabelle"; 58 + version = "2025"; 59 60 dirname = "Isabelle${version}"; 61 ··· 63 if stdenv.hostPlatform.isDarwin then 64 fetchurl { 65 url = "https://isabelle.in.tum.de/website-${dirname}/dist/${dirname}_macos.tar.gz"; 66 + hash = "sha256-6ldUwiiFf12dOuJU7JgUeX8kU+opDfILL23LLvDi5/g="; 67 } 68 else if stdenv.hostPlatform.isx86 then 69 fetchurl { 70 url = "https://isabelle.in.tum.de/website-${dirname}/dist/${dirname}_linux.tar.gz"; 71 + hash = "sha256-PR1m3jcYI/4xqormZjj3NXW6wkTwCzGu4dy2LzgUfFY="; 72 } 73 else 74 fetchurl { 75 url = "https://isabelle.in.tum.de/website-${dirname}/dist/${dirname}_linux_arm.tar.gz"; 76 + hash = "sha256-p/Hp+7J5gJy5s6BVD5Ma1Mu2OS53I8BS7gKSOYYB0PE="; 77 }; 78 79 nativeBuildInputs = [ java ]; ··· 81 buildInputs = [ 82 polyml 83 veriT 84 + vampire' 85 eprover-ho 86 nettools 87 ]; ··· 112 EOF 113 114 cat >contrib/vampire-*/etc/settings <<EOF 115 + VAMPIRE_HOME=${vampire'}/bin 116 + VAMPIRE_VERSION=${vampire'.version} 117 VAMPIRE_EXTRA_OPTIONS="--mode casc" 118 EOF 119 ··· 131 ISABELLE_JAVA_PLATFORM=${stdenv.system} 132 ISABELLE_JDK_HOME=${java} 133 EOF 134 135 echo ISABELLE_LINE_EDITOR=${rlwrap}/bin/rlwrap >>etc/settings 136
+2
pkgs/by-name/jr/jrnl/package.nix
··· 41 rich 42 ]; 43 44 nativeCheckInputs = with python3.pkgs; [ 45 pytest-bdd 46 pytest-xdist
··· 41 rich 42 ]; 43 44 + pythonRelaxDeps = [ "rich" ]; 45 + 46 nativeCheckInputs = with python3.pkgs; [ 47 pytest-bdd 48 pytest-xdist
+66
pkgs/by-name/ka/kahip/package.nix
···
··· 1 + { 2 + lib, 3 + stdenv, 4 + fetchFromGitHub, 5 + fetchpatch2, 6 + cmake, 7 + mpi, 8 + metis, 9 + python3Packages, 10 + pythonSupport ? false, 11 + isILP64 ? false, 12 + }: 13 + stdenv.mkDerivation (finalAttrs: { 14 + pname = "kahip"; 15 + version = "3.18"; 16 + 17 + src = fetchFromGitHub { 18 + owner = "KaHIP"; 19 + repo = "KaHIP"; 20 + tag = "v${finalAttrs.version}"; 21 + hash = "sha256-l8DhVb2G6pQQcH3Wq4NsKw30cSK3sG+gCYRdpibw4ZI="; 22 + }; 23 + 24 + patches = [ 25 + (fetchpatch2 { 26 + url = "https://github.com/KaHIP/KaHIP/commit/9d4978c7540a1ccbc9807367d6e3852114e86567.patch?full_index=1"; 27 + hash = "sha256-nIJL0YmVp9+JUhzEXjoabD1qNEnhtrBnjMWnitYt0eU="; 28 + }) 29 + ]; 30 + 31 + nativeBuildInputs = 32 + [ cmake ] 33 + ++ lib.optionals pythonSupport [ 34 + python3Packages.python 35 + python3Packages.pybind11 36 + ]; 37 + 38 + buildInputs = [ 39 + mpi 40 + metis 41 + ]; 42 + 43 + cmakeFlags = [ 44 + (lib.cmakeBool "64BITMODE" isILP64) 45 + (lib.cmakeBool "BUILDPYTHONMODULE" pythonSupport) 46 + (lib.cmakeFeature "CMAKE_INSTALL_PYTHONDIR" python3Packages.python.sitePackages) 47 + ]; 48 + 49 + doInstallCheck = pythonSupport; 50 + 51 + nativeInstallCheckInputs = lib.optionals pythonSupport [ 52 + python3Packages.pythonImportsCheckHook 53 + ]; 54 + 55 + pythonImportsCheck = [ "kahip" ]; 56 + 57 + meta = { 58 + homepage = "https://kahip.github.io/"; 59 + downloadPage = "https://github.com/KaHIP/KaHIP/"; 60 + changelog = "https://github.com/KaHIP/KaHIP/releases/tag/v${finalAttrs.version}"; 61 + description = "Karlsruhe HIGH Quality Partitioning"; 62 + license = lib.licenses.mit; 63 + platforms = lib.platforms.unix; 64 + maintainers = with lib.maintainers; [ qbisi ]; 65 + }; 66 + })
+12 -1
pkgs/by-name/ka/kalker/package.nix
··· 2 lib, 3 rustPlatform, 4 fetchFromGitHub, 5 gmp, 6 mpfr, 7 libmpc, ··· 19 }; 20 21 useFetchCargoVendor = true; 22 - cargoHash = "sha256-IYxgh6ni3BFnocHGKwKUqgSC2xUjn0b/4pBqRC5iY8U="; 23 24 buildInputs = [ 25 gmp
··· 2 lib, 3 rustPlatform, 4 fetchFromGitHub, 5 + fetchpatch, 6 gmp, 7 mpfr, 8 libmpc, ··· 20 }; 21 22 useFetchCargoVendor = true; 23 + cargoHash = "sha256-LEP2ebthwtpPSRmJt0BW/T/lB6EE+tylyVv+PDt8UoQ="; 24 + 25 + cargoPatches = [ 26 + # Fixes build issue by just running cargo update 27 + # Can be removed on next release 28 + (fetchpatch { 29 + name = "bump_cargo_deps.patch"; 30 + url = "https://github.com/PaddiM8/kalker/commit/81bf66950a9dfeca4ab5fdd12774c93e40021eb1.patch"; 31 + hash = "sha256-XT8jXTMIMOFw8OieoQM7IkUqw3SDi1c9eE1cD15BI9I="; 32 + }) 33 + ]; 34 35 buildInputs = [ 36 gmp
+2
pkgs/by-name/ki/kismet/package.nix
··· 15 lm_sensors, 16 networkmanager, 17 nix-update-script, 18 pcre2, 19 pkg-config, 20 openssl, ··· 115 enableParallelBuilding = true; 116 117 passthru = { 118 updateScript = nix-update-script { 119 extraArgs = [ 120 "--version-regex"
··· 15 lm_sensors, 16 networkmanager, 17 nix-update-script, 18 + nixosTests, 19 pcre2, 20 pkg-config, 21 openssl, ··· 116 enableParallelBuilding = true; 117 118 passthru = { 119 + tests.kismet = nixosTests.kismet; 120 updateScript = nix-update-script { 121 extraArgs = [ 122 "--version-regex"
+22
pkgs/by-name/li/libndctl/musl-compat.patch
···
··· 1 + diff --git a/daxctl/device.c b/daxctl/device.c 2 + index e3993b1..0b2f20d 100644 3 + --- a/daxctl/device.c 4 + +++ b/daxctl/device.c 5 + @@ -23,6 +23,8 @@ 6 + #include "filter.h" 7 + #include "json.h" 8 + 9 + +#include <libgen.h> 10 + + 11 + static struct { 12 + const char *dev; 13 + const char *mode; 14 + @@ -366,7 +368,7 @@ static const char *parse_device_options(int argc, const char **argv, 15 + 16 + argc = parse_options(argc, argv, options, u, 0); 17 + if (argc > 0) 18 + - device = basename(argv[0]); 19 + + device = basename(strdup(argv[0])); 20 + 21 + /* Handle action-agnostic non-option arguments */ 22 + if (argc == 0 &&
+7
pkgs/by-name/li/libndctl/package.nix
··· 25 sha256 = "sha256-gG1Rz5AtDLzikGFr8A3l25ypd+VoLw2oWjszy9ogDLk="; 26 }; 27 28 outputs = [ 29 "out" 30 "man"
··· 25 sha256 = "sha256-gG1Rz5AtDLzikGFr8A3l25ypd+VoLw2oWjszy9ogDLk="; 26 }; 27 28 + patches = lib.optionals (!stdenv.hostPlatform.isGnu) [ 29 + # Use POSIX basename on non-glib. 30 + # Remove when https://github.com/pmem/ndctl/pull/263 31 + # or equivalent fix is merged and released. 32 + ./musl-compat.patch 33 + ]; 34 + 35 outputs = [ 36 "out" 37 "man"
+14 -2
pkgs/by-name/li/libofa/package.nix
··· 2 lib, 3 stdenv, 4 fetchurl, 5 expat, 6 curl, 7 fftw, ··· 17 sha256 = "184ham039l7lwhfgg0xr2vch2xnw1lwh7sid432mh879adhlc5h2"; 18 }; 19 20 - patches = fetchurl { 21 url = "mirror://debian/pool/main/libo/libofa/libofa_${version}-${deb_patch}.debian.tar.gz"; 22 - sha256 = "1rfkyz13cm8izm90c1xflp4rvsa24aqs6qpbbbqqcbmvzsj6j9yn"; 23 }; 24 25 outputs = [ 26 "out"
··· 2 lib, 3 stdenv, 4 fetchurl, 5 + fetchzip, 6 expat, 7 curl, 8 fftw, ··· 18 sha256 = "184ham039l7lwhfgg0xr2vch2xnw1lwh7sid432mh879adhlc5h2"; 19 }; 20 21 + debian_patches = fetchzip { 22 url = "mirror://debian/pool/main/libo/libofa/libofa_${version}-${deb_patch}.debian.tar.gz"; 23 + hash = "sha256-tENhXSRcUP1PKm35IJyLUEEROze8UzxJzRx3VNAqo40="; 24 }; 25 + 26 + patches = [ 27 + "${debian_patches}/patches/01_gcc41.diff" 28 + "${debian_patches}/patches/02_example-open.diff" 29 + "${debian_patches}/patches/03_example-size_type.diff" 30 + "${debian_patches}/patches/04_libofa.pc-deps.diff" 31 + "${debian_patches}/patches/05_gcc43.diff" 32 + "${debian_patches}/patches/06_gcc44.diff" 33 + "${debian_patches}/patches/fix_ftbfs.diff" 34 + "${debian_patches}/patches/fix-ftbfs-gcc4.7.diff" 35 + ]; 36 37 outputs = [ 38 "out"
+7 -3
pkgs/by-name/ma/mattermost/tests.nix
··· 39 + '' 40 # Just echo install/get/mod commands in the Makefile, since the dependencies are locked. 41 substituteInPlace server/Makefile \ 42 - --replace-warn '$(GO) install' '@echo $(GO) install' \ 43 - --replace-warn '$(GO) get' '@echo $(GO) get' \ 44 - --replace-warn '$(GO) get' '@echo $(GO) mod' 45 # mmctl tests shell out by writing a bash script to a tempfile 46 substituteInPlace server/cmd/mmctl/commands/config_e2e_test.go \ 47 --replace-fail '#!/bin/bash' '#!${runtimeShell}' ··· 142 # Appear to be broken. 143 "TestSessionStore/MySQL/SessionGetWithDeviceId" 144 "TestSessionStore/MySQL/GetMobileSessionMetadata" 145 ] 146 ++ optionals (!stdenv.hostPlatform.isx86_64) [ 147 # aarch64: invalid operating system or processor architecture
··· 39 + '' 40 # Just echo install/get/mod commands in the Makefile, since the dependencies are locked. 41 substituteInPlace server/Makefile \ 42 + --replace-warn '$(GO) install' 'echo $(GO) install' \ 43 + --replace-warn '$(GOBIN)/go$$version download' 'echo $(GOBIN)/go$$version download' \ 44 + --replace-warn '$(GO) get' 'echo $(GO) get' \ 45 + --replace-warn '$(GO) get' 'echo $(GO) mod' 46 # mmctl tests shell out by writing a bash script to a tempfile 47 substituteInPlace server/cmd/mmctl/commands/config_e2e_test.go \ 48 --replace-fail '#!/bin/bash' '#!${runtimeShell}' ··· 143 # Appear to be broken. 144 "TestSessionStore/MySQL/SessionGetWithDeviceId" 145 "TestSessionStore/MySQL/GetMobileSessionMetadata" 146 + "TestSessionStore/MySQL/GetSessionsWithActiveDeviceIds" 147 + "TestUpdateTeam" 148 + "TestSyncSyncableRoles" 149 ] 150 ++ optionals (!stdenv.hostPlatform.isx86_64) [ 151 # aarch64: invalid operating system or processor architecture
+6 -4
pkgs/by-name/me/media-downloader/package.nix
··· 1 { 2 aria2, 3 cmake, 4 - # https://github.com/mhogomchungu/media-downloader?tab=readme-ov-file#extensions 5 extraPackages ? [ 6 aria2 7 ffmpeg ··· 10 fetchFromGitHub, 11 ffmpeg, 12 lib, 13 - libsForQt5, 14 python3, 15 stdenv, 16 }: 17 ··· 28 29 nativeBuildInputs = [ 30 cmake 31 - libsForQt5.wrapQtAppsHook 32 ]; 33 34 - buildInputs = [ libsForQt5.qtbase ]; 35 36 qtWrapperArgs = [ "--prefix PATH : ${lib.makeBinPath extraPackages}" ]; 37
··· 1 { 2 aria2, 3 cmake, 4 extraPackages ? [ 5 aria2 6 ffmpeg ··· 9 fetchFromGitHub, 10 ffmpeg, 11 lib, 12 python3, 13 + qt6, 14 stdenv, 15 }: 16 ··· 27 28 nativeBuildInputs = [ 29 cmake 30 + qt6.wrapQtAppsHook 31 ]; 32 33 + buildInputs = [ 34 + qt6.qtbase 35 + qt6.qtwayland 36 + ]; 37 38 qtWrapperArgs = [ "--prefix PATH : ${lib.makeBinPath extraPackages}" ]; 39
+257
pkgs/by-name/mi/min-ed-launcher/deps.json
···
··· 1 + [ 2 + { 3 + "pname": "Expecto", 4 + "version": "10.2.1", 5 + "hash": "sha256-DgwHFsPMySlnMag4kPTviTwrNOD7uPnnJLi9DCZif5s=" 6 + }, 7 + { 8 + "pname": "Expecto.FsCheck", 9 + "version": "10.2.1", 10 + "hash": "sha256-+IDkxZKfEir5/TJrwxMFC4H6voWbSmCsvZUrjxcbc50=" 11 + }, 12 + { 13 + "pname": "FsCheck", 14 + "version": "2.16.5", 15 + "hash": "sha256-+UXoE+QGCDN1LM+XgseKJ7c5Lj/Cblo3izmo7GtIE0A=" 16 + }, 17 + { 18 + "pname": "FsConfig", 19 + "version": "4.1.0", 20 + "hash": "sha256-daaTrzhZjnJLDL49vOCkeXX6W5PWaLj5aqHuaYgiS1s=" 21 + }, 22 + { 23 + "pname": "FSharp.Core", 24 + "version": "8.0.200", 25 + "hash": "sha256-wjYiedFiqOTKaM4mF6uT9kc/yKDJ78mqfw9qLoBFHOw=" 26 + }, 27 + { 28 + "pname": "FSharp.Data", 29 + "version": "6.4.0", 30 + "hash": "sha256-8/iQA6anTybzseyvsvFV33jVVwrnYiKG1iqgwkqNeRc=" 31 + }, 32 + { 33 + "pname": "FSharp.Data.Csv.Core", 34 + "version": "6.4.0", 35 + "hash": "sha256-jcw/6uDN0he/PhhopEvTydy2X13Xt3g3kKuVdt+8+oY=" 36 + }, 37 + { 38 + "pname": "FSharp.Data.Html.Core", 39 + "version": "6.4.0", 40 + "hash": "sha256-HeljybTU019Z7HxFoErPM/HIAm32pJiKQM+kSyt63xw=" 41 + }, 42 + { 43 + "pname": "FSharp.Data.Http", 44 + "version": "6.4.0", 45 + "hash": "sha256-Zn4dZCb46vr8LYR5donzeistFPSO8YYiXUU3Iqo+vKg=" 46 + }, 47 + { 48 + "pname": "FSharp.Data.Json.Core", 49 + "version": "6.4.0", 50 + "hash": "sha256-dVhcVzUi//PFFFIML/5SWKrileeQ6IOd1VlGyEptaw0=" 51 + }, 52 + { 53 + "pname": "FSharp.Data.Runtime.Utilities", 54 + "version": "6.4.0", 55 + "hash": "sha256-nD4U7mwZtFEUcD9XvPxhpot8FNl2YyhiLpjpjmFNAO0=" 56 + }, 57 + { 58 + "pname": "FSharp.Data.WorldBank.Core", 59 + "version": "6.4.0", 60 + "hash": "sha256-ea2CZyHisqa1MnF70TBKfcMl6+W90MnLJ5Ctgjfk9SM=" 61 + }, 62 + { 63 + "pname": "FSharp.Data.Xml.Core", 64 + "version": "6.4.0", 65 + "hash": "sha256-6eZWKdNjKMqufyOYolTximIS41gipBUNMKNn3HEiYw0=" 66 + }, 67 + { 68 + "pname": "FSharpx.Collections", 69 + "version": "3.1.0", 70 + "hash": "sha256-CmDCfx19VNthqZHphYywOK0attxyJjOhu2srNKSky10=" 71 + }, 72 + { 73 + "pname": "FsToolkit.ErrorHandling", 74 + "version": "4.15.2", 75 + "hash": "sha256-fzsnH7178Gr0pnFoXkJvqRc2s5c+MXuRKQHBifIhmQk=" 76 + }, 77 + { 78 + "pname": "FsToolkit.ErrorHandling.TaskResult", 79 + "version": "4.15.2", 80 + "hash": "sha256-I/3BXTQQzVSlldcfaVZ849/PirOcozM5GLCmfL2qHWg=" 81 + }, 82 + { 83 + "pname": "Microsoft.CodeCoverage", 84 + "version": "17.9.0", 85 + "hash": "sha256-OaGa4+jRPHs+T+p/oekm2Miluqfd2IX8Rt+BmUx8kr4=" 86 + }, 87 + { 88 + "pname": "Microsoft.Extensions.Configuration", 89 + "version": "8.0.0", 90 + "hash": "sha256-9BPsASlxrV8ilmMCjdb3TiUcm5vFZxkBnAI/fNBSEyA=" 91 + }, 92 + { 93 + "pname": "Microsoft.Extensions.Configuration.Abstractions", 94 + "version": "8.0.0", 95 + "hash": "sha256-4eBpDkf7MJozTZnOwQvwcfgRKQGcNXe0K/kF+h5Rl8o=" 96 + }, 97 + { 98 + "pname": "Microsoft.Extensions.Configuration.Binder", 99 + "version": "8.0.1", 100 + "hash": "sha256-KYPQYYspiBGiez7JshmEjy4kFt7ASzVxQeVsygIEvHA=" 101 + }, 102 + { 103 + "pname": "Microsoft.Extensions.Configuration.FileExtensions", 104 + "version": "8.0.0", 105 + "hash": "sha256-BCxcjVP+kvrDDB0nzsFCJfU74UK4VBvct2JA4r+jNcs=" 106 + }, 107 + { 108 + "pname": "Microsoft.Extensions.Configuration.Json", 109 + "version": "8.0.0", 110 + "hash": "sha256-Fi/ijcG5l0BOu7i96xHu96aN5/g7zO6SWQbTsI3Qetg=" 111 + }, 112 + { 113 + "pname": "Microsoft.Extensions.FileProviders.Abstractions", 114 + "version": "8.0.0", 115 + "hash": "sha256-uQSXmt47X2HGoVniavjLICbPtD2ReQOYQMgy3l0xuMU=" 116 + }, 117 + { 118 + "pname": "Microsoft.Extensions.FileProviders.Physical", 119 + "version": "8.0.0", 120 + "hash": "sha256-29y5ZRQ1ZgzVOxHktYxyiH40kVgm5un2yTGdvuSWnRc=" 121 + }, 122 + { 123 + "pname": "Microsoft.Extensions.FileSystemGlobbing", 124 + "version": "8.0.0", 125 + "hash": "sha256-+Oz41JR5jdcJlCJOSpQIL5OMBNi+1Hl2d0JUHfES7sU=" 126 + }, 127 + { 128 + "pname": "Microsoft.Extensions.Primitives", 129 + "version": "8.0.0", 130 + "hash": "sha256-FU8qj3DR8bDdc1c+WeGZx/PCZeqqndweZM9epcpXjSo=" 131 + }, 132 + { 133 + "pname": "Microsoft.Extensions.TimeProvider.Testing", 134 + "version": "8.5.0", 135 + "hash": "sha256-ZGXrOV/qJVfjwrJsv3jtC80IVQyH3OLOw70gCIn6uIM=" 136 + }, 137 + { 138 + "pname": "Microsoft.NET.Test.Sdk", 139 + "version": "17.9.0", 140 + "hash": "sha256-q/1AJ7eNlk02wvN76qvjl2xBx5iJ+h5ssiE/4akLmtI=" 141 + }, 142 + { 143 + "pname": "Microsoft.NETCore.Platforms", 144 + "version": "5.0.0", 145 + "hash": "sha256-LIcg1StDcQLPOABp4JRXIs837d7z0ia6+++3SF3jl1c=" 146 + }, 147 + { 148 + "pname": "Microsoft.TestPlatform.ObjectModel", 149 + "version": "17.9.0", 150 + "hash": "sha256-iiXUFzpvT8OWdzMj9FGJDqanwHx40s1TXVY9l3ii+s0=" 151 + }, 152 + { 153 + "pname": "Microsoft.TestPlatform.TestHost", 154 + "version": "17.9.0", 155 + "hash": "sha256-1BZIY1z+C9TROgdTV/tq4zsPy7Q71GQksr/LoMKAzqU=" 156 + }, 157 + { 158 + "pname": "Microsoft.Win32.Registry", 159 + "version": "5.0.0", 160 + "hash": "sha256-9kylPGfKZc58yFqNKa77stomcoNnMeERXozWJzDcUIA=" 161 + }, 162 + { 163 + "pname": "Mono.Cecil", 164 + "version": "0.11.4", 165 + "hash": "sha256-HrnRgFsOzfqAWw0fUxi/vkzZd8dMn5zueUeLQWA9qvs=" 166 + }, 167 + { 168 + "pname": "Mono.Posix.NETStandard", 169 + "version": "5.20.1-preview", 170 + "hash": "sha256-gLtcH308/VVYgZcrJtvXDkBIMIQjK8w35AcmuxYYTvM=" 171 + }, 172 + { 173 + "pname": "Newtonsoft.Json", 174 + "version": "13.0.1", 175 + "hash": "sha256-K2tSVW4n4beRPzPu3rlVaBEMdGvWSv/3Q1fxaDh4Mjo=" 176 + }, 177 + { 178 + "pname": "Serilog", 179 + "version": "3.1.1", 180 + "hash": "sha256-L263y8jkn7dNFD2jAUK6mgvyRTqFe39i1tRhVZsNZTI=" 181 + }, 182 + { 183 + "pname": "Serilog.Sinks.Console", 184 + "version": "5.0.1", 185 + "hash": "sha256-aveoZM25ykc2haBHCXWD09jxZ2t2tYIGmaNTaO2V0jI=" 186 + }, 187 + { 188 + "pname": "Serilog.Sinks.File", 189 + "version": "5.0.0", 190 + "hash": "sha256-GKy9hwOdlu2W0Rw8LiPyEwus+sDtSOTl8a5l9uqz+SQ=" 191 + }, 192 + { 193 + "pname": "Serilog.Sinks.File.Header", 194 + "version": "1.0.2", 195 + "hash": "sha256-2igOXIHtojhhNlg/C5OhPwmVUoE5MpmgApi8dFmutx4=" 196 + }, 197 + { 198 + "pname": "System.Collections.Immutable", 199 + "version": "6.0.0", 200 + "hash": "sha256-DKEbpFqXCIEfqp9p3ezqadn5b/S1YTk32/EQK+tEScs=" 201 + }, 202 + { 203 + "pname": "System.Reflection.Metadata", 204 + "version": "1.6.0", 205 + "hash": "sha256-JJfgaPav7UfEh4yRAQdGhLZF1brr0tUWPl6qmfNWq/E=" 206 + }, 207 + { 208 + "pname": "System.Runtime.CompilerServices.Unsafe", 209 + "version": "6.0.0", 210 + "hash": "sha256-bEG1PnDp7uKYz/OgLOWs3RWwQSVYm+AnPwVmAmcgp2I=" 211 + }, 212 + { 213 + "pname": "System.Security.AccessControl", 214 + "version": "5.0.0", 215 + "hash": "sha256-ueSG+Yn82evxyGBnE49N4D+ngODDXgornlBtQ3Omw54=" 216 + }, 217 + { 218 + "pname": "System.Security.Cryptography.ProtectedData", 219 + "version": "8.0.0", 220 + "hash": "sha256-fb0pa9sQxN+mr0vnXg1Igbx49CaOqS+GDkTfWNboUvs=" 221 + }, 222 + { 223 + "pname": "System.Security.Permissions", 224 + "version": "8.0.0", 225 + "hash": "sha256-+YUPY+3HnTmfPLZzr+5qEk0RqalCbFZBgLXee1yCH1M=" 226 + }, 227 + { 228 + "pname": "System.Security.Principal.Windows", 229 + "version": "5.0.0", 230 + "hash": "sha256-CBOQwl9veFkrKK2oU8JFFEiKIh/p+aJO+q9Tc2Q/89Y=" 231 + }, 232 + { 233 + "pname": "System.Text.Encodings.Web", 234 + "version": "8.0.0", 235 + "hash": "sha256-IUQkQkV9po1LC0QsqrilqwNzPvnc+4eVvq+hCvq8fvE=" 236 + }, 237 + { 238 + "pname": "System.Text.Json", 239 + "version": "8.0.0", 240 + "hash": "sha256-XFcCHMW1u2/WujlWNHaIWkbW1wn8W4kI0QdrwPtWmow=" 241 + }, 242 + { 243 + "pname": "System.Windows.Extensions", 244 + "version": "8.0.0", 245 + "hash": "sha256-aHkz7LtmUDDRS7swQM0i6dDVUytRCMYeA2CfaeVA2Y0=" 246 + }, 247 + { 248 + "pname": "TypeShape", 249 + "version": "10.0.0", 250 + "hash": "sha256-esJFuRvxuLXwBgi/7FjEVm1ATCGXU/yB2RtgN4ilZtg=" 251 + }, 252 + { 253 + "pname": "YoloDev.Expecto.TestSdk", 254 + "version": "0.14.3", 255 + "hash": "sha256-3FIZM+GYsBsFGhLsasF7Ia9nXHSpqooQNe5H7ANy334=" 256 + } 257 + ]
+36
pkgs/by-name/mi/min-ed-launcher/package.nix
···
··· 1 + { 2 + lib, 3 + buildDotnetModule, 4 + fetchFromGitHub, 5 + git, 6 + }: 7 + buildDotnetModule rec { 8 + pname = "min-ed-launcher"; 9 + version = "0.11.3"; 10 + 11 + src = fetchFromGitHub { 12 + owner = "rfvgyhn"; 13 + repo = "min-ed-launcher"; 14 + tag = "v${version}"; 15 + hash = "sha256-HJIvbuTsCG51PPVieJbXGyAviqgM9/WPz0+0VhIWz9k="; 16 + 17 + leaveDotGit = true; # During build the current commit is appended to the version 18 + }; 19 + 20 + projectFile = "MinEdLauncher.sln"; 21 + nugetDeps = ./deps.json; 22 + buildInputs = [ 23 + git # During build the current commit is appended to the version 24 + ]; 25 + 26 + executables = [ "MinEdLauncher" ]; 27 + 28 + meta = { 29 + homepage = "https://github.com/rfvgyhn/min-ed-launcher"; 30 + description = "Minimal Elite Dangerous Launcher"; 31 + license = lib.licenses.mit; 32 + platforms = lib.platforms.x86_64; 33 + mainProgram = "MinEdLauncher"; 34 + maintainers = with lib.maintainers; [ jiriks74 ]; 35 + }; 36 + }
+2 -2
pkgs/by-name/mo/monkeysAudio/package.nix
··· 6 }: 7 8 stdenv.mkDerivation (finalAttrs: { 9 - version = "11.05"; 10 pname = "monkeys-audio"; 11 12 src = fetchzip { 13 url = "https://monkeysaudio.com/files/MAC_${builtins.concatStringsSep "" (lib.strings.splitString "." finalAttrs.version)}_SDK.zip"; 14 - hash = "sha256-CHOkP5L5FITV2ZpeYjSL9r8K6eeG/p4ZBrUztRvnIU4="; 15 stripRoot = false; 16 }; 17
··· 6 }: 7 8 stdenv.mkDerivation (finalAttrs: { 9 + version = "11.08"; 10 pname = "monkeys-audio"; 11 12 src = fetchzip { 13 url = "https://monkeysaudio.com/files/MAC_${builtins.concatStringsSep "" (lib.strings.splitString "." finalAttrs.version)}_SDK.zip"; 14 + hash = "sha256-iy/WFFRv3/ZJoGCAKH2+rYnyIdvaO+kgxaH/XeKWtbs="; 15 stripRoot = false; 16 }; 17
-2
pkgs/by-name/ne/neohtop/package.nix
··· 11 fetchFromGitHub, 12 glib, 13 gtk3, 14 - libsoup_2_4, 15 openssl, 16 nix-update-script, 17 }: ··· 51 glib 52 gtk3 53 openssl 54 - libsoup_2_4 55 webkitgtk_4_1 56 ]; 57
··· 11 fetchFromGitHub, 12 glib, 13 gtk3, 14 openssl, 15 nix-update-script, 16 }: ··· 50 glib 51 gtk3 52 openssl 53 webkitgtk_4_1 54 ]; 55
+2 -2
pkgs/by-name/ne/newsflash/package.nix
··· 12 rustc, 13 wrapGAppsHook4, 14 gdk-pixbuf, 15 - clapper, 16 gtk4, 17 libadwaita, 18 libxml2, ··· 66 67 buildInputs = 68 [ 69 - clapper 70 gtk4 71 libadwaita 72 libxml2
··· 12 rustc, 13 wrapGAppsHook4, 14 gdk-pixbuf, 15 + clapper-unwrapped, 16 gtk4, 17 libadwaita, 18 libxml2, ··· 66 67 buildInputs = 68 [ 69 + clapper-unwrapped 70 gtk4 71 libadwaita 72 libxml2
+2 -2
pkgs/by-name/ne/nextcloud-talk-desktop/package.nix
··· 25 26 stdenv.mkDerivation (finalAttrs: { 27 pname = "nextcloud-talk-desktop"; 28 - version = "1.1.5"; 29 30 # Building from source would require building also building Server and Talk components 31 # See https://github.com/nextcloud/talk-desktop?tab=readme-ov-file#%EF%B8%8F-prerequisites 32 src = fetchzip { 33 url = "https://github.com/nextcloud-releases/talk-desktop/releases/download/v${finalAttrs.version}/Nextcloud.Talk-linux-x64.zip"; 34 - hash = "sha256-KI6EJxsiIPjk+SBdqAwQNpaMZgVQSra/tqCGufQYSPs="; 35 stripRoot = false; 36 }; 37
··· 25 26 stdenv.mkDerivation (finalAttrs: { 27 pname = "nextcloud-talk-desktop"; 28 + version = "1.1.6"; 29 30 # Building from source would require building also building Server and Talk components 31 # See https://github.com/nextcloud/talk-desktop?tab=readme-ov-file#%EF%B8%8F-prerequisites 32 src = fetchzip { 33 url = "https://github.com/nextcloud-releases/talk-desktop/releases/download/v${finalAttrs.version}/Nextcloud.Talk-linux-x64.zip"; 34 + hash = "sha256-+bKE9taQxOrxB9tBP3PReyZlict6/gEX5uWbuzGIa8k="; 35 stripRoot = false; 36 }; 37
+2 -2
pkgs/by-name/ne/nezha-theme-nazhua/package.nix
··· 12 13 stdenvNoCC.mkDerivation (finalAttrs: { 14 pname = "nezha-theme-nazhua"; 15 - version = "0.6.4"; 16 17 src = fetchFromGitHub { 18 owner = "hi2shark"; 19 repo = "nazhua"; 20 tag = "v${finalAttrs.version}"; 21 - hash = "sha256-9mBM8M9pPZMMyFcf4JOyY5rnhpiNEy8ZDDnEoopjcbg="; 22 }; 23 24 yarnOfflineCache = fetchYarnDeps {
··· 12 13 stdenvNoCC.mkDerivation (finalAttrs: { 14 pname = "nezha-theme-nazhua"; 15 + version = "0.6.6"; 16 17 src = fetchFromGitHub { 18 owner = "hi2shark"; 19 repo = "nazhua"; 20 tag = "v${finalAttrs.version}"; 21 + hash = "sha256-Flx0yHhYGDM9qPIsE1ZfjdmuWXbDTodnaiVK7Hee3Z4="; 22 }; 23 24 yarnOfflineCache = fetchYarnDeps {
+3 -3
pkgs/by-name/nw/nwg-dock-hyprland/package.nix
··· 10 11 buildGoModule rec { 12 pname = "nwg-dock-hyprland"; 13 - version = "0.4.4"; 14 15 src = fetchFromGitHub { 16 owner = "nwg-piotr"; 17 repo = "nwg-dock-hyprland"; 18 tag = "v${version}"; 19 - hash = "sha256-mkwUDPBMpFxr+W5bRSQFevYVhZ949intKRU+KNo0/Gc="; 20 }; 21 22 - vendorHash = "sha256-6qgUvTByq4mkJoG38pI8eVe5o0pVI9O+/y/ZTDS5hw8="; 23 24 ldflags = [ 25 "-s"
··· 10 11 buildGoModule rec { 12 pname = "nwg-dock-hyprland"; 13 + version = "0.4.5"; 14 15 src = fetchFromGitHub { 16 owner = "nwg-piotr"; 17 repo = "nwg-dock-hyprland"; 18 tag = "v${version}"; 19 + hash = "sha256-zpQou/ABWIYg5QRynfrzPI3YOuNzpU5wHYZwutXwFc0="; 20 }; 21 22 + vendorHash = "sha256-ZR/vF7ogOEhyKQr35ew3OmYLVeudt51W8b734SZXvC0="; 23 24 ldflags = [ 25 "-s"
-4
pkgs/by-name/or/orthanc-framework/package.nix
··· 18 ; 19 20 sourceRoot = "${finalAttrs.src.name}/OrthancFramework/SharedLibrary"; 21 - outputs = [ 22 - "out" 23 - "dev" 24 - ]; 25 26 buildInputs = orthanc.buildInputs ++ [ 27 icu
··· 18 ; 19 20 sourceRoot = "${finalAttrs.src.name}/OrthancFramework/SharedLibrary"; 21 22 buildInputs = orthanc.buildInputs ++ [ 23 icu
-16
pkgs/by-name/or/orthanc-plugin-dicomweb/fix-orthanc-framework-headers-detection.patch
··· 1 - diff -r ad41d16f36b1 Resources/Orthanc/CMake/DownloadOrthancFramework.cmake 2 - --- a/Resources/Orthanc/CMake/DownloadOrthancFramework.cmake Fri Jan 17 12:24:54 2025 +0100 3 - +++ b/Resources/Orthanc/CMake/DownloadOrthancFramework.cmake Thu Mar 20 17:45:59 2025 +0100 4 - @@ -552,9 +552,10 @@ 5 - set(ORTHANC_FRAMEWORK_INCLUDE_DIR ${ORTHANC_FRAMEWORK_ROOT}) 6 - else() 7 - find_path(ORTHANC_FRAMEWORK_INCLUDE_DIR OrthancFramework.h 8 - - /usr/include/orthanc-framework 9 - - /usr/local/include/orthanc-framework 10 - + /usr/ 11 - + /usr/local/ 12 - ${ORTHANC_FRAMEWORK_ROOT} 13 - + PATH_SUFFIXES include include/orthanc-framework 14 - ) 15 - endif() 16 -
···
+1 -6
pkgs/by-name/or/orthanc-plugin-dicomweb/package.nix
··· 13 pugixml, 14 libuuid, 15 zlib, 16 - pkg-config, 17 }: 18 19 let ··· 47 rev = "OrthancDicomWeb-${finalAttrs.version}"; 48 hash = "sha256-ee271Fcu8yi1gZpTWrCuqhsBdFcPR/JK/fsnJg8PwIc="; 49 }; 50 - 51 - patches = [ 52 - # Fix Orthanc Framework headers files detection 53 - ./fix-orthanc-framework-headers-detection.patch 54 - ]; 55 56 postPatch = '' 57 mkdir -p ThirdPartyDownloads ··· 92 "-DCMAKE_BUILD_TYPE=Release" 93 "-DSTATIC_BUILD=OFF" 94 "-DORTHANC_FRAMEWORK_SOURCE=system" 95 ]; 96 97 meta = {
··· 13 pugixml, 14 libuuid, 15 zlib, 16 }: 17 18 let ··· 46 rev = "OrthancDicomWeb-${finalAttrs.version}"; 47 hash = "sha256-ee271Fcu8yi1gZpTWrCuqhsBdFcPR/JK/fsnJg8PwIc="; 48 }; 49 50 postPatch = '' 51 mkdir -p ThirdPartyDownloads ··· 86 "-DCMAKE_BUILD_TYPE=Release" 87 "-DSTATIC_BUILD=OFF" 88 "-DORTHANC_FRAMEWORK_SOURCE=system" 89 + "-DORTHANC_FRAMEWORK_ROOT=${orthanc.framework}/include/orthanc-framework" 90 ]; 91 92 meta = {
+2 -2
pkgs/by-name/pi/pipeline/package.nix
··· 17 desktop-file-utils, 18 blueprint-compiler, 19 sqlite, 20 - clapper, 21 gettext, 22 gst_all_1, 23 gtuber, ··· 63 libadwaita 64 openssl 65 sqlite 66 - clapper 67 68 gst_all_1.gstreamer 69 gst_all_1.gst-libav
··· 17 desktop-file-utils, 18 blueprint-compiler, 19 sqlite, 20 + clapper-unwrapped, 21 gettext, 22 gst_all_1, 23 gtuber, ··· 63 libadwaita 64 openssl 65 sqlite 66 + clapper-unwrapped 67 68 gst_all_1.gstreamer 69 gst_all_1.gst-libav
+73 -36
pkgs/by-name/po/pocl/package.nix
··· 2 lib, 3 stdenv, 4 fetchFromGitHub, 5 - nix-update-script, 6 cmake, 7 pkg-config, 8 hwloc, ··· 11 spirv-llvm-translator, 12 spirv-tools, 13 lttng-ust, 14 ocl-icd, 15 python3, 16 runCommand, 17 makeWrapper, 18 }: 19 20 let 21 clang = llvmPackages.clangUseLLVM; 22 # Workaround to make sure libclang finds libgcc.a and libgcc_s.so when 23 # invoked from within libpocl 24 - clangWrapped = runCommand "clang-pocl" { nativeBuildInputs = [ makeWrapper ]; } '' 25 - mkdir -p $out/bin 26 - cp -r ${clang}/bin/* $out/bin/ 27 - LIBGCC_DIR=$(dirname $(find ${stdenv.cc.cc}/lib/ -name libgcc.a)) 28 - for F in ${clang}/bin/ld*; do 29 - BASENAME=$(basename "$F") 30 - rm -f $out/bin/$BASENAME 31 - makeWrapper ${clang}/bin/$BASENAME $out/bin/$BASENAME \ 32 - --add-flags "-L$LIBGCC_DIR" \ 33 - --add-flags "-L${lib.getLib stdenv.cc.cc}/lib" 34 - done 35 - ''; 36 in 37 stdenv.mkDerivation (finalAttrs: { 38 pname = "pocl"; ··· 45 hash = "sha256-d/BD8YkdMYtu6yFlGNXrsz7PVIrzBvvYLU1JRv7ZJmc="; 46 }; 47 48 - cmakeFlags = [ 49 - "-DKERNELLIB_HOST_CPU_VARIANTS=distro" 50 - # avoid the runtime linker pulling in a different llvm e.g. from graphics drivers 51 - "-DSTATIC_LLVM=ON" 52 - "-DENABLE_POCL_BUILDING=OFF" 53 - "-DPOCL_ICD_ABSOLUTE_PATH=ON" 54 - "-DENABLE_ICD=ON" 55 - "-DCLANG=${clangWrapped}/bin/clang" 56 - "-DCLANGXX=${clangWrapped}/bin/clang++" 57 - "-DENABLE_REMOTE_CLIENT=ON" 58 - "-DENABLE_REMOTE_SERVER=ON" 59 - ]; 60 61 nativeBuildInputs = [ 62 cmake ··· 65 python3 66 ]; 67 68 - buildInputs = [ 69 - hwloc 70 - libxml2 71 - llvmPackages.llvm 72 - llvmPackages.libclang 73 - lttng-ust 74 - ocl-icd 75 - spirv-tools 76 - spirv-llvm-translator 77 ]; 78 79 - passthru.updateScript = nix-update-script { }; 80 81 meta = { 82 - description = "A portable open source (MIT-licensed) implementation of the OpenCL standard"; 83 homepage = "http://portablecl.org"; 84 changelog = "https://github.com/pocl/pocl/releases/tag/v${finalAttrs.version}"; 85 license = lib.licenses.mit;
··· 2 lib, 3 stdenv, 4 fetchFromGitHub, 5 cmake, 6 pkg-config, 7 hwloc, ··· 10 spirv-llvm-translator, 11 spirv-tools, 12 lttng-ust, 13 + opencl-headers, 14 ocl-icd, 15 python3, 16 runCommand, 17 makeWrapper, 18 + writableTmpDirAsHomeHook, 19 }: 20 21 let 22 clang = llvmPackages.clangUseLLVM; 23 # Workaround to make sure libclang finds libgcc.a and libgcc_s.so when 24 # invoked from within libpocl 25 + clangWrapped = 26 + if stdenv.hostPlatform.isDarwin then 27 + clang 28 + else 29 + runCommand "clang-pocl" { nativeBuildInputs = [ makeWrapper ]; } '' 30 + mkdir -p $out/bin 31 + cp -r ${clang}/bin/* $out/bin/ 32 + LIBGCC_DIR=$(dirname $(find ${stdenv.cc.cc}/lib/ -name libgcc.a)) 33 + for F in ${clang}/bin/ld*; do 34 + BASENAME=$(basename "$F") 35 + rm -f $out/bin/$BASENAME 36 + makeWrapper ${clang}/bin/$BASENAME $out/bin/$BASENAME \ 37 + --add-flags "-L$LIBGCC_DIR" \ 38 + --add-flags "-L${lib.getLib stdenv.cc.cc}/lib" 39 + done 40 + ''; 41 in 42 stdenv.mkDerivation (finalAttrs: { 43 pname = "pocl"; ··· 50 hash = "sha256-d/BD8YkdMYtu6yFlGNXrsz7PVIrzBvvYLU1JRv7ZJmc="; 51 }; 52 53 + postPatch = lib.optionalString stdenv.hostPlatform.isDarwin '' 54 + substituteInPlace pocld/shared_cl_context.cc --replace-fail \ 55 + "Dev.getInfo<CL_DEVICE_GLOBAL_MEM_SIZE>()" \ 56 + "static_cast<size_t>(Dev.getInfo<CL_DEVICE_GLOBAL_MEM_SIZE>())" 57 + ''; 58 + 59 + cmakeFlags = 60 + [ 61 + # avoid the runtime linker pulling in a different llvm e.g. from graphics drivers 62 + (lib.cmakeBool "STATIC_LLVM" true) 63 + (lib.cmakeBool "ENABLE_POCL_BUILDING" false) 64 + (lib.cmakeBool "POCL_ICD_ABSOLUTE_PATH" true) 65 + (lib.cmakeBool "ENABLE_ICD" true) 66 + (lib.cmakeBool "ENABLE_REMOTE_CLIENT" true) 67 + (lib.cmakeBool "ENABLE_REMOTE_SERVER" true) 68 + (lib.cmakeFeature "CLANG" "${clangWrapped}/bin/clang") 69 + (lib.cmakeFeature "CLANGXX" "${clangWrapped}/bin/clang++") 70 + ] 71 + # Only x86_64 supports "distro" which allows runtime detection of SSE/AVX 72 + ++ lib.optionals stdenv.hostPlatform.isx86_64 [ 73 + (lib.cmakeFeature "KERNELLIB_HOST_CPU_VARIANTS" "distro") 74 + ] 75 + ++ lib.optionals (!stdenv.hostPlatform.isx86_64) [ 76 + (lib.cmakeFeature "LLC_HOST_CPU" "generic") 77 + ]; 78 79 nativeBuildInputs = [ 80 cmake ··· 83 python3 84 ]; 85 86 + buildInputs = 87 + [ 88 + hwloc 89 + libxml2 90 + llvmPackages.llvm 91 + llvmPackages.libclang 92 + opencl-headers 93 + ocl-icd 94 + spirv-tools 95 + spirv-llvm-translator 96 + ] 97 + ++ lib.optionals stdenv.hostPlatform.isLinux [ 98 + lttng-ust 99 + ]; 100 + 101 + nativeInstallCheckInputs = [ 102 + writableTmpDirAsHomeHook 103 ]; 104 105 + doInstallCheck = true; 106 + 107 + installCheckPhase = '' 108 + runHook preInstallCheck 109 + 110 + export OCL_ICD_VENDORS=$out/etc/OpenCL/vendors 111 + $out/bin/poclcc -o poclcc.cl.pocl $src/examples/poclcc/poclcc.cl 112 + 113 + runHook postInstallCheck 114 + ''; 115 + 116 + setupHook = ./setup-hook.sh; 117 118 meta = { 119 + description = "portable open source (MIT-licensed) implementation of the OpenCL standard"; 120 homepage = "http://portablecl.org"; 121 changelog = "https://github.com/pocl/pocl/releases/tag/v${finalAttrs.version}"; 122 license = lib.licenses.mit;
+6
pkgs/by-name/po/pocl/setup-hook.sh
···
··· 1 + preCheckHooks+=('setupPoclCheck') 2 + preInstallCheckHooks+=('setupPoclCheck') 3 + 4 + setupPoclCheck () { 5 + export OCL_ICD_VENDORS="@out@/etc/OpenCL/vendors" 6 + }
-12
pkgs/by-name/pr/pretix/package.nix
··· 25 }; 26 }); 27 28 - geoip2 = super.geoip2.overridePythonAttrs rec { 29 - version = "5.0.1"; 30 - 31 - src = fetchPypi { 32 - pname = "geoip2"; 33 - inherit version; 34 - hash = "sha256-kK+LbTaH877yUfJwitAXsw1ifRFEwAQOq8TJAXqAfYY="; 35 - }; 36 - }; 37 - 38 stripe = super.stripe.overridePythonAttrs rec { 39 version = "7.9.0"; 40 ··· 46 }; 47 48 pretix-plugin-build = self.callPackage ./plugin-build.nix { }; 49 - 50 - sentry-sdk = super.sentry-sdk_2; 51 }; 52 }; 53
··· 25 }; 26 }); 27 28 stripe = super.stripe.overridePythonAttrs rec { 29 version = "7.9.0"; 30 ··· 36 }; 37 38 pretix-plugin-build = self.callPackage ./plugin-build.nix { }; 39 }; 40 }; 41
+3 -3
pkgs/by-name/ra/rain/package.nix
··· 8 9 buildGoModule rec { 10 pname = "rain"; 11 - version = "1.22.0"; 12 13 src = fetchFromGitHub { 14 owner = "aws-cloudformation"; 15 repo = pname; 16 rev = "v${version}"; 17 - sha256 = "sha256-0FjJXOmLHM0At0TJdcsYTafgDbw+C37BH11ft+UjEl0="; 18 }; 19 20 - vendorHash = "sha256-A+fQCiA2jArnhxbKxNmRQB6wY5mcmrY+fMtZQX5aBmU="; 21 22 subPackages = [ "cmd/rain" ]; 23
··· 8 9 buildGoModule rec { 10 pname = "rain"; 11 + version = "1.23.0"; 12 13 src = fetchFromGitHub { 14 owner = "aws-cloudformation"; 15 repo = pname; 16 rev = "v${version}"; 17 + sha256 = "sha256-Dsg8vUMs6aep4FfrulHdtH3H7fitWdzENfTPm+9z5m0="; 18 }; 19 20 + vendorHash = "sha256-5i6dAjgWclyEkRrDzrkiIRQKTeNz4GM1LU+2J3t/+Yc="; 21 22 subPackages = [ "cmd/rain" ]; 23
-2
pkgs/by-name/rc/rclone-ui/package.nix
··· 12 wrapGAppsHook3, 13 14 openssl, 15 - libsoup_2_4, 16 webkitgtk_4_1, 17 glib-networking, 18 libappindicator, ··· 62 63 buildInputs = [ 64 openssl 65 - libsoup_2_4 66 webkitgtk_4_1 67 glib-networking 68 libappindicator
··· 12 wrapGAppsHook3, 13 14 openssl, 15 webkitgtk_4_1, 16 glib-networking, 17 libappindicator, ··· 61 62 buildInputs = [ 63 openssl 64 webkitgtk_4_1 65 glib-networking 66 libappindicator
+33
pkgs/by-name/re/redu/package.nix
···
··· 1 + { 2 + lib, 3 + fetchFromGitHub, 4 + nix-update-script, 5 + rustPlatform, 6 + }: 7 + 8 + rustPlatform.buildRustPackage (finalAttrs: { 9 + pname = "redu"; 10 + version = "0.2.12"; 11 + 12 + src = fetchFromGitHub { 13 + owner = "drdo"; 14 + repo = "redu"; 15 + tag = "v${finalAttrs.version}"; 16 + hash = "sha256-3DcBTpog5tWv2qdmhOlDLHFY77Cug6mCpDQEAtViw74="; 17 + }; 18 + 19 + cargoHash = "sha256-Rp8y2tBnpzBVEoLP4fTMulIJpu1j2TpJNh5M9kjnuEo="; 20 + 21 + env.RUSTC_BOOTSTRAP = 1; 22 + 23 + passthru.updateScript = nix-update-script { }; 24 + 25 + meta = { 26 + description = "ncdu for your restic repo"; 27 + homepage = "https://github.com/drdo/redu"; 28 + changelog = "https://github.com/drdo/redu/releases/tag/v${finalAttrs.version}"; 29 + license = lib.licenses.mit; 30 + maintainers = with lib.maintainers; [ alexfmpe ]; 31 + mainProgram = "redu"; 32 + }; 33 + })
+4 -2
pkgs/by-name/re/revolver/package.nix
··· 5 zsh, 6 installShellFiles, 7 ncurses, 8 - nix-update-script, 9 testers, 10 runCommand, 11 }: ··· 79 version = "0.2.0"; 80 }; 81 }; 82 - updateScript = nix-update-script { }; 83 }; 84 85 meta = {
··· 5 zsh, 6 installShellFiles, 7 ncurses, 8 + unstableGitUpdater, 9 testers, 10 runCommand, 11 }: ··· 79 version = "0.2.0"; 80 }; 81 }; 82 + updateScript = unstableGitUpdater { 83 + tagPrefix = "v"; 84 + }; 85 }; 86 87 meta = {
+2 -2
pkgs/by-name/se/senpai/package.nix
··· 9 10 buildGoModule rec { 11 pname = "senpai"; 12 - version = "0.4.0"; 13 14 src = fetchFromSourcehut { 15 owner = "~delthas"; 16 repo = "senpai"; 17 rev = "v${version}"; 18 - sha256 = "sha256-3DVy+7mMVFmPpGxwJqtt2+QwNEMrgZazynawE/Wf+UM="; 19 }; 20 21 vendorHash = "sha256-6glslBPjJr0TmrAkDGbOQ4sDzvODlavVeTugs6RXsCU=";
··· 9 10 buildGoModule rec { 11 pname = "senpai"; 12 + version = "0.4.1"; 13 14 src = fetchFromSourcehut { 15 owner = "~delthas"; 16 repo = "senpai"; 17 rev = "v${version}"; 18 + sha256 = "sha256-lwfhRnaHGOIp6NyugPEu6P+3WXkVgQEWaz7DUfHiJrQ="; 19 }; 20 21 vendorHash = "sha256-6glslBPjJr0TmrAkDGbOQ4sDzvODlavVeTugs6RXsCU=";
+5 -3
pkgs/by-name/su/supermodel/package.nix
··· 4 libGLU, 5 SDL2, 6 SDL2_net, 7 stdenv, 8 zlib, 9 }: 10 11 stdenv.mkDerivation { 12 pname = "supermodel"; 13 - version = "0-unstable-2024-11-07"; 14 15 src = fetchFromGitHub { 16 owner = "trzy"; 17 repo = "supermodel"; 18 - rev = "4e7356ab2c077aa3bc3d75fb6e164a1c943fe4c1"; 19 - hash = "sha256-ajRbgs6oMFF+dYHPsKM7FU16vuZcSovaNk2thdsUWtk="; 20 }; 21 22 buildInputs = [ 23 libGLU 24 SDL2 25 SDL2_net 26 zlib 27 ]; 28
··· 4 libGLU, 5 SDL2, 6 SDL2_net, 7 + xorg, 8 stdenv, 9 zlib, 10 }: 11 12 stdenv.mkDerivation { 13 pname = "supermodel"; 14 + version = "0-unstable-2025-04-17"; 15 16 src = fetchFromGitHub { 17 owner = "trzy"; 18 repo = "supermodel"; 19 + rev = "2272893a0511c0b3b50f6dda64addb7014717dd3"; 20 + hash = "sha256-3FdLBGxmi4Xj7ao2nvjLleJSTXvKQrhUWvnQr8DK/RY="; 21 }; 22 23 buildInputs = [ 24 libGLU 25 SDL2 26 SDL2_net 27 + xorg.libX11 28 zlib 29 ]; 30
+2 -2
pkgs/by-name/te/televido/package.nix
··· 14 desktop-file-utils, 15 openssl, 16 gst_all_1, 17 - clapper, 18 }: 19 20 stdenv.mkDerivation rec { ··· 49 buildInputs = [ 50 libadwaita 51 desktop-file-utils 52 - clapper 53 gst_all_1.gstreamer 54 gst_all_1.gst-plugins-base 55 gst_all_1.gst-libav
··· 14 desktop-file-utils, 15 openssl, 16 gst_all_1, 17 + clapper-unwrapped, 18 }: 19 20 stdenv.mkDerivation rec { ··· 49 buildInputs = [ 50 libadwaita 51 desktop-file-utils 52 + clapper-unwrapped 53 gst_all_1.gstreamer 54 gst_all_1.gst-plugins-base 55 gst_all_1.gst-libav
+45 -51
pkgs/by-name/ts/tsduck/package.nix
··· 2 lib, 3 stdenv, 4 fetchFromGitHub, 5 curl, 6 jdk, 7 libedit, 8 srt, 9 }: 10 11 - stdenv.mkDerivation rec { 12 pname = "tsduck"; 13 - version = "3.31-2761"; 14 15 src = fetchFromGitHub { 16 owner = "tsduck"; 17 repo = "tsduck"; 18 - rev = "v${version}"; 19 - sha256 = "sha256-268TKCh3naebbw+sOQ6d4N/zl7UEVtc3l3flFAYHDU4="; 20 }; 21 22 buildInputs = [ 23 curl 24 libedit 25 srt 26 - jdk 27 ]; 28 29 - # remove tests which call out to https://tsduck.io/download/test/... 30 - postPatch = '' 31 - sed -i \ 32 - -e '/TSUNIT_TEST(testMasterPlaylist);/ d' \ 33 - -e '/TSUNIT_TEST(testMasterPlaylistWithAlternate);/ d' \ 34 - -e '/TSUNIT_TEST(testMediaPlaylist);/ d' \ 35 - src/utest/utestHLS.cpp 36 - 37 - sed -i \ 38 - -e '/TSUNIT_TEST(testBetterSystemRandomGenerator);/ d' \ 39 - src/utest/utestSystemRandomGenerator.cpp 40 - 41 - sed -i \ 42 - -e '/TSUNIT_ASSERT(request.downloadBinaryContent/ d' \ 43 - -e '/TSUNIT_ASSERT(!request.downloadBinaryContent/ d' \ 44 - -e '/TSUNIT_TEST(testGitHub);/ d' \ 45 - -e '/TSUNIT_TEST(testGoogle);/ d' \ 46 - -e '/TSUNIT_TEST(testNoRedirection);/ d' \ 47 - -e '/TSUNIT_TEST(testReadMeFile);/ d' \ 48 - src/utest/utestWebRequest.cpp 49 - 50 - sed -i \ 51 - -e '/TSUNIT_TEST(testHomeDirectory);/ d' \ 52 - src/utest/utestSysUtils.cpp 53 54 - sed -i \ 55 - -e '/TSUNIT_TEST(testIPv4Address);/ d' \ 56 - -e '/TSUNIT_TEST(testIPv4AddressConstructors);/ d' \ 57 - -e '/TSUNIT_TEST(testIPv4SocketAddressConstructors);/ d' \ 58 - -e '/TSUNIT_TEST(testTCPSocket);/ d' \ 59 - -e '/TSUNIT_TEST(testUDPSocket);/ d' \ 60 - src/utest/utestNetworking.cpp 61 ''; 62 63 - enableParallelBuilding = true; 64 makeFlags = [ 65 "NODEKTEC=1" 66 "NOHIDES=1" 67 "NOPCSC=1" 68 - "NORIST=1" 69 "NOVATEK=1" 70 - ] ++ installFlags; 71 72 checkTarget = "test"; 73 doCheck = true; 74 75 - installFlags = [ 76 - "SYSROOT=${placeholder "out"}" 77 - "SYSPREFIX=/" 78 - "USRLIBDIR=/lib" 79 - ]; 80 installTargets = [ 81 "install-tools" 82 "install-devel" 83 ]; 84 85 - meta = with lib; { 86 description = "MPEG Transport Stream Toolkit"; 87 homepage = "https://github.com/tsduck/tsduck"; 88 - license = licenses.bsd2; 89 - maintainers = with maintainers; [ siriobalmelli ]; 90 - platforms = platforms.all; 91 - # never built on aarch64-darwin, x86_64-darwin since first introduction in nixpkgs 92 - broken = stdenv.hostPlatform.isDarwin; 93 }; 94 - }
··· 2 lib, 3 stdenv, 4 fetchFromGitHub, 5 + # build and doc tooling 6 + asciidoctor-with-extensions, 7 + doxygen, 8 + graphviz, 9 + python3, 10 + ruby, 11 + qpdf, 12 + # build deps 13 curl, 14 + glibcLocales, 15 jdk, 16 libedit, 17 + librist, 18 + openssl, 19 srt, 20 }: 21 22 + stdenv.mkDerivation (finalAttrs: { 23 pname = "tsduck"; 24 + version = "3.40-4165"; 25 26 src = fetchFromGitHub { 27 owner = "tsduck"; 28 repo = "tsduck"; 29 + rev = "v${finalAttrs.version}"; 30 + sha256 = "sha256-bFnsGoElXeStIX5KwonJuF0x7DDzhzq+3oygkUOmZE0="; 31 }; 32 33 + nativeBuildInputs = [ 34 + asciidoctor-with-extensions 35 + doxygen 36 + graphviz 37 + python3 38 + ruby 39 + qpdf 40 + ]; 41 + 42 buildInputs = [ 43 curl 44 + glibcLocales 45 + jdk 46 libedit 47 + librist 48 + openssl 49 srt 50 ]; 51 52 + enableParallelBuilding = true; 53 54 + postPatch = '' 55 + patchShebangs scripts 56 ''; 57 58 + # see CONFIG.txt in the sources 59 makeFlags = [ 60 + "CXXFLAGS_NO_WARNINGS=-Wno-deprecated-declarations" 61 "NODEKTEC=1" 62 + "NOGITHUB=1" 63 "NOHIDES=1" 64 "NOPCSC=1" 65 "NOVATEK=1" 66 + "SYSPREFIX=/" 67 + "SYSROOT=${placeholder "out"}" 68 + ]; 69 70 + # remove tests which break the sandbox 71 + patches = [ ./tests.patch ]; 72 checkTarget = "test"; 73 doCheck = true; 74 75 installTargets = [ 76 "install-tools" 77 "install-devel" 78 ]; 79 80 + meta = { 81 description = "MPEG Transport Stream Toolkit"; 82 homepage = "https://github.com/tsduck/tsduck"; 83 + mainProgram = "tsversion"; 84 + license = lib.licenses.bsd2; 85 + maintainers = with lib.maintainers; [ siriobalmelli ]; 86 + platforms = lib.platforms.all; 87 }; 88 + })
+744
pkgs/by-name/ts/tsduck/tests.patch
···
··· 1 + diff --git a/src/utest/utestHLS.cpp b/src/utest/utestHLS.cpp 2 + index ebfe67f3d..dc023b636 100644 3 + --- a/src/utest/utestHLS.cpp 4 + +++ b/src/utest/utestHLS.cpp 5 + @@ -20,9 +20,6 @@ 6 + 7 + class HLSTest: public tsunit::Test 8 + { 9 + - TSUNIT_DECLARE_TEST(MasterPlaylist); 10 + - TSUNIT_DECLARE_TEST(MasterPlaylistWithAlternate); 11 + - TSUNIT_DECLARE_TEST(MediaPlaylist); 12 + TSUNIT_DECLARE_TEST(BuildMasterPlaylist); 13 + TSUNIT_DECLARE_TEST(BuildMediaPlaylist); 14 + 15 + @@ -61,176 +58,6 @@ void HLSTest::afterTest() 16 + // Unitary tests. 17 + //---------------------------------------------------------------------------- 18 + 19 + -TSUNIT_DEFINE_TEST(MasterPlaylist) 20 + -{ 21 + - // Test file downloaded from TSDuck web site. 22 + - // Copied from Apple test file at 23 + - // https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_ts/master.m3u8 24 + - 25 + - ts::hls::PlayList pl; 26 + - TSUNIT_ASSERT(pl.loadURL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/master.m3u8", true)); 27 + - TSUNIT_ASSERT(pl.isValid()); 28 + - TSUNIT_EQUAL(ts::hls::PlayListType::MASTER, pl.type()); 29 + - TSUNIT_EQUAL(6, pl.version()); 30 + - TSUNIT_EQUAL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/master.m3u8", pl.url()); 31 + - ts::hls::MediaElement media; 32 + - pl.buildURL(media, u"foo.bar"); 33 + - TSUNIT_EQUAL(u"foo.bar", media.relative_uri); 34 + - TSUNIT_EQUAL(u"/download/test/hls/img_bipbop_adv_example_ts/foo.bar", media.file_path); 35 + - TSUNIT_EQUAL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/foo.bar", media.url.toString()); 36 + - TSUNIT_EQUAL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/foo.bar", media.urlString()); 37 + - TSUNIT_EQUAL(0, pl.segmentCount()); 38 + - TSUNIT_EQUAL(24, pl.playListCount()); 39 + - TSUNIT_EQUAL(5, pl.altPlayListCount()); 40 + - TSUNIT_EQUAL(0, pl.targetDuration().count()); 41 + - TSUNIT_EQUAL(0, pl.mediaSequence()); 42 + - TSUNIT_ASSERT(!pl.endList()); 43 + - TSUNIT_EQUAL(ts::hls::PlayListType::MASTER, pl.type()); 44 + - 45 + - TSUNIT_EQUAL(u"v5/prog_index.m3u8", pl.playList(0).relative_uri); 46 + - TSUNIT_EQUAL(2227464, pl.playList(0).bandwidth.toInt()); 47 + - TSUNIT_EQUAL(2218327, pl.playList(0).average_bandwidth.toInt()); 48 + - TSUNIT_EQUAL(960, pl.playList(0).width); 49 + - TSUNIT_EQUAL(540, pl.playList(0).height); 50 + - TSUNIT_EQUAL(60000, pl.playList(0).frame_rate); 51 + - TSUNIT_EQUAL(u"avc1.640020,mp4a.40.2", pl.playList(0).codecs); 52 + - TSUNIT_EQUAL(u"", pl.playList(0).hdcp); 53 + - TSUNIT_EQUAL(u"", pl.playList(0).video_range); 54 + - TSUNIT_EQUAL(u"", pl.playList(0).video); 55 + - TSUNIT_EQUAL(u"aud1", pl.playList(0).audio); 56 + - TSUNIT_EQUAL(u"sub1", pl.playList(0).subtitles); 57 + - TSUNIT_EQUAL(u"cc1", pl.playList(0).closed_captions); 58 + - TSUNIT_EQUAL(u"v5/prog_index.m3u8, 960x540, 2,227,464 b/s, @60 fps", pl.playList(0).toString()); 59 + - 60 + - TSUNIT_EQUAL(u"v2/prog_index.m3u8", pl.playList(23).relative_uri); 61 + - TSUNIT_EQUAL(582387, pl.playList(23).bandwidth.toInt()); 62 + - TSUNIT_EQUAL(570616, pl.playList(23).average_bandwidth.toInt()); 63 + - TSUNIT_EQUAL(480, pl.playList(23).width); 64 + - TSUNIT_EQUAL(270, pl.playList(23).height); 65 + - TSUNIT_EQUAL(30000, pl.playList(23).frame_rate); 66 + - TSUNIT_EQUAL(u"avc1.640015,ec-3", pl.playList(23).codecs); 67 + - TSUNIT_EQUAL(u"", pl.playList(23).hdcp); 68 + - TSUNIT_EQUAL(u"", pl.playList(23).video_range); 69 + - TSUNIT_EQUAL(u"", pl.playList(23).video); 70 + - TSUNIT_EQUAL(u"aud3", pl.playList(23).audio); 71 + - TSUNIT_EQUAL(u"sub1", pl.playList(23).subtitles); 72 + - TSUNIT_EQUAL(u"cc1", pl.playList(23).closed_captions); 73 + - TSUNIT_EQUAL(u"v2/prog_index.m3u8, 480x270, 582,387 b/s, @30 fps", pl.playList(23).toString()); 74 + - 75 + - TSUNIT_EQUAL(0, pl.selectPlayList(0, 0, 0, 0, 0, 0)); 76 + - TSUNIT_EQUAL(ts::NPOS, pl.selectPlayList(10000000, 0, 0, 0, 0, 0)); 77 + - TSUNIT_EQUAL(9, pl.selectPlayListHighestBitRate()); 78 + - TSUNIT_EQUAL(7, pl.selectPlayListLowestBitRate()); 79 + - TSUNIT_EQUAL(1, pl.selectPlayListHighestResolution()); 80 + - TSUNIT_EQUAL(7, pl.selectPlayListLowestResolution()); 81 + -} 82 + - 83 + -TSUNIT_DEFINE_TEST(MasterPlaylistWithAlternate) 84 + -{ 85 + - // Test file downloaded from TSDuck web site. 86 + - 87 + - ts::hls::PlayList pl; 88 + - TSUNIT_ASSERT(pl.loadURL(u"https://tsduck.io/download/test/hls/alternative/index_hd.m3u8", true)); 89 + - TSUNIT_ASSERT(pl.isValid()); 90 + - TSUNIT_EQUAL(ts::hls::PlayListType::MASTER, pl.type()); 91 + - TSUNIT_EQUAL(4, pl.version()); 92 + - TSUNIT_EQUAL(u"https://tsduck.io/download/test/hls/alternative/index_hd.m3u8", pl.url()); 93 + - TSUNIT_EQUAL(0, pl.segmentCount()); 94 + - TSUNIT_EQUAL(7, pl.playListCount()); 95 + - TSUNIT_EQUAL(2, pl.altPlayListCount()); 96 + - TSUNIT_EQUAL(0, pl.targetDuration().count()); 97 + - TSUNIT_EQUAL(0, pl.mediaSequence()); 98 + - TSUNIT_ASSERT(!pl.endList()); 99 + - TSUNIT_EQUAL(ts::hls::PlayListType::MASTER, pl.type()); 100 + - 101 + - TSUNIT_EQUAL(u"04_hd.m3u8", pl.playList(0).relative_uri); 102 + - TSUNIT_EQUAL(1209781, pl.playList(0).bandwidth.toInt()); 103 + - TSUNIT_EQUAL(768, pl.playList(0).width); 104 + - TSUNIT_EQUAL(432, pl.playList(0).height); 105 + - TSUNIT_EQUAL(25000, pl.playList(0).frame_rate); 106 + - TSUNIT_EQUAL(u"avc1.4D4020,mp4a.40.2", pl.playList(0).codecs); 107 + - TSUNIT_EQUAL(u"", pl.playList(0).hdcp); 108 + - TSUNIT_EQUAL(u"", pl.playList(0).video_range); 109 + - TSUNIT_EQUAL(u"", pl.playList(0).video); 110 + - TSUNIT_EQUAL(u"audio2", pl.playList(0).audio); 111 + - TSUNIT_EQUAL(u"", pl.playList(0).subtitles); 112 + - TSUNIT_EQUAL(u"", pl.playList(0).closed_captions); 113 + - TSUNIT_EQUAL(u"04_hd.m3u8, 768x432, 1,209,781 b/s, @25 fps", pl.playList(0).toString()); 114 + - 115 + - TSUNIT_EQUAL(u"09_hd.m3u8", pl.altPlayList(0).relative_uri); 116 + - TSUNIT_EQUAL(u"AUDIO", pl.altPlayList(0).type); 117 + - TSUNIT_EQUAL(u"audio2", pl.altPlayList(0).group_id); 118 + - TSUNIT_EQUAL(u"ENG", pl.altPlayList(0).name); 119 + - TSUNIT_EQUAL(u"ENG", pl.altPlayList(0).language); 120 + - TSUNIT_EQUAL(u"", pl.altPlayList(0).stable_rendition_id); 121 + - TSUNIT_EQUAL(u"", pl.altPlayList(0).assoc_language); 122 + - TSUNIT_EQUAL(u"", pl.altPlayList(0).in_stream_id); 123 + - TSUNIT_EQUAL(u"", pl.altPlayList(0).characteristics); 124 + - TSUNIT_EQUAL(u"", pl.altPlayList(0).channels); 125 + - TSUNIT_ASSERT(pl.altPlayList(0).is_default); 126 + - TSUNIT_ASSERT(pl.altPlayList(0).auto_select); 127 + - TSUNIT_ASSERT(!pl.altPlayList(0).forced); 128 + - 129 + - TSUNIT_EQUAL(u"01_hd.m3u8", pl.altPlayList(1).relative_uri); 130 + - TSUNIT_EQUAL(u"AUDIO", pl.altPlayList(1).type); 131 + - TSUNIT_EQUAL(u"audio1", pl.altPlayList(1).group_id); 132 + - TSUNIT_EQUAL(u"FOO", pl.altPlayList(1).name); 133 + - TSUNIT_EQUAL(u"FOO", pl.altPlayList(1).language); 134 + - TSUNIT_EQUAL(u"", pl.altPlayList(1).stable_rendition_id); 135 + - TSUNIT_EQUAL(u"", pl.altPlayList(1).assoc_language); 136 + - TSUNIT_EQUAL(u"", pl.altPlayList(1).in_stream_id); 137 + - TSUNIT_EQUAL(u"", pl.altPlayList(1).characteristics); 138 + - TSUNIT_EQUAL(u"", pl.altPlayList(1).channels); 139 + - TSUNIT_ASSERT(!pl.altPlayList(1).is_default); 140 + - TSUNIT_ASSERT(!pl.altPlayList(1).auto_select); 141 + - TSUNIT_ASSERT(!pl.altPlayList(1).forced); 142 + -} 143 + - 144 + -TSUNIT_DEFINE_TEST(MediaPlaylist) 145 + -{ 146 + - // Test file downloaded from TSDuck web site. 147 + - // Copied from Apple test file at 148 + - // https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_ts/v5/prog_index.m3u8 149 + - 150 + - ts::hls::PlayList pl; 151 + - TSUNIT_ASSERT(pl.loadURL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/v5/prog_index.m3u8", true)); 152 + - TSUNIT_ASSERT(pl.isValid()); 153 + - TSUNIT_EQUAL(ts::hls::PlayListType::VOD, pl.type()); 154 + - TSUNIT_EQUAL(3, pl.version()); 155 + - TSUNIT_EQUAL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/v5/prog_index.m3u8", pl.url()); 156 + - ts::hls::MediaElement media; 157 + - pl.buildURL(media, u"foo.bar"); 158 + - TSUNIT_EQUAL(u"https://tsduck.io/download/test/hls/img_bipbop_adv_example_ts/v5/foo.bar", media.urlString()); 159 + - TSUNIT_EQUAL(100, pl.segmentCount()); 160 + - TSUNIT_EQUAL(0, pl.playListCount()); 161 + - TSUNIT_EQUAL(0, pl.altPlayListCount()); 162 + - TSUNIT_EQUAL(6, pl.targetDuration().count()); 163 + - TSUNIT_EQUAL(0, pl.mediaSequence()); 164 + - TSUNIT_ASSERT(pl.endList()); 165 + - 166 + - TSUNIT_EQUAL(u"fileSequence0.ts", pl.segment(0).relative_uri); 167 + - TSUNIT_EQUAL(u"", pl.segment(0).title); 168 + - TSUNIT_EQUAL(2060 * 1024, pl.segment(0).bitrate.toInt()); 169 + - TSUNIT_EQUAL(6000, pl.segment(0).duration.count()); 170 + - TSUNIT_ASSERT(!pl.segment(0).gap); 171 + - 172 + - TSUNIT_EQUAL(u"fileSequence99.ts", pl.segment(99).relative_uri); 173 + - TSUNIT_EQUAL(u"", pl.segment(99).title); 174 + - TSUNIT_EQUAL(2055 * 1024, pl.segment(99).bitrate.toInt()); 175 + - TSUNIT_EQUAL(6000, pl.segment(99).duration.count()); 176 + - TSUNIT_ASSERT(!pl.segment(99).gap); 177 + - 178 + - ts::hls::MediaSegment seg; 179 + - TSUNIT_ASSERT(pl.popFirstSegment(seg)); 180 + - TSUNIT_EQUAL(99, pl.segmentCount()); 181 + - 182 + - TSUNIT_EQUAL(u"fileSequence0.ts", seg.relative_uri); 183 + - TSUNIT_EQUAL(u"", seg.title); 184 + - TSUNIT_EQUAL(2060 * 1024, seg.bitrate.toInt()); 185 + - TSUNIT_EQUAL(6000, seg.duration.count()); 186 + - TSUNIT_ASSERT(!seg.gap); 187 + -} 188 + - 189 + TSUNIT_DEFINE_TEST(BuildMasterPlaylist) 190 + { 191 + ts::hls::PlayList pl; 192 + diff --git a/src/utest/utestNetworking.cpp b/src/utest/utestNetworking.cpp 193 + index 058d5eb4c..67e5610d5 100644 194 + --- a/src/utest/utestNetworking.cpp 195 + +++ b/src/utest/utestNetworking.cpp 196 + @@ -33,19 +33,13 @@ 197 + class NetworkingTest: public tsunit::Test 198 + { 199 + TSUNIT_DECLARE_TEST(SystemStructures); 200 + - TSUNIT_DECLARE_TEST(IPv4AddressConstructors); 201 + - TSUNIT_DECLARE_TEST(IPv4Address); 202 + - TSUNIT_DECLARE_TEST(IPv6Address); 203 + TSUNIT_DECLARE_TEST(Conversion); 204 + TSUNIT_DECLARE_TEST(IPAddressMask); 205 + TSUNIT_DECLARE_TEST(MACAddress); 206 + TSUNIT_DECLARE_TEST(LocalHost); 207 + TSUNIT_DECLARE_TEST(GetLocalIPAddresses); 208 + - TSUNIT_DECLARE_TEST(IPv4SocketAddressConstructors); 209 + TSUNIT_DECLARE_TEST(IPv4SocketAddress); 210 + TSUNIT_DECLARE_TEST(IPv6SocketAddress); 211 + - TSUNIT_DECLARE_TEST(TCPSocket); 212 + - TSUNIT_DECLARE_TEST(UDPSocket); 213 + TSUNIT_DECLARE_TEST(IPHeader); 214 + TSUNIT_DECLARE_TEST(IPProtocol); 215 + TSUNIT_DECLARE_TEST(TCPPacket); 216 + @@ -96,183 +90,6 @@ TSUNIT_DEFINE_TEST(SystemStructures) 217 + << "NetworkingTest::SystemStructures: sizeof(::sockaddr_storage) = " << sizeof(::sockaddr_storage) << std::endl; 218 + } 219 + 220 + -TSUNIT_DEFINE_TEST(IPv4AddressConstructors) 221 + -{ 222 + - TSUNIT_ASSERT(ts::IPInitialize()); 223 + - 224 + - TSUNIT_EQUAL(0, ts::IPAddress::AnyAddress4.address4()); 225 + - TSUNIT_EQUAL(0x7F000001, ts::IPAddress::LocalHost4.address4()); // 127.0.0.1 226 + - 227 + - ts::IPAddress a1; 228 + - TSUNIT_EQUAL(0, a1.address4()); 229 + - 230 + - ts::IPAddress a2(0x01020304); 231 + - TSUNIT_EQUAL(0x01020304, a2.address4()); 232 + - 233 + - ts::IPAddress a3(1, 2, 3, 4); 234 + - TSUNIT_EQUAL(0x01020304, a3.address4()); 235 + - 236 + - ::in_addr ia4; 237 + - ia4.s_addr = htonl(0x01020304); 238 + - ts::IPAddress a4(ia4); 239 + - TSUNIT_EQUAL(0x01020304, a4.address4()); 240 + - 241 + - ::sockaddr sa5; 242 + - TSUNIT_ASSERT(sizeof(::sockaddr) >= sizeof(::sockaddr_in)); 243 + - ::sockaddr_in* sai5 = reinterpret_cast<::sockaddr_in*>(&sa5); 244 + - sai5->sin_family = AF_INET; 245 + - sai5->sin_addr.s_addr = htonl(0x01020304); 246 + - sai5->sin_port = 0; 247 + - ts::IPAddress a5 (sa5); 248 + - TSUNIT_EQUAL(0x01020304, a5.address4()); 249 + - 250 + - ::sockaddr_in sa6; 251 + - sa6.sin_family = AF_INET; 252 + - sa6.sin_addr.s_addr = htonl(0x01020304); 253 + - sa6.sin_port = 0; 254 + - ts::IPAddress a6 (sa6); 255 + - TSUNIT_EQUAL(0x01020304, a6.address4()); 256 + - 257 + - ts::IPAddress a7(u"2.3.4.5", CERR); 258 + - TSUNIT_EQUAL(0x02030405, a7.address4()); 259 + -} 260 + - 261 + -TSUNIT_DEFINE_TEST(IPv4Address) 262 + -{ 263 + - TSUNIT_ASSERT(ts::IPInitialize()); 264 + - 265 + - ts::IPAddress a1 (1, 2, 3, 4); 266 + - ts::IPAddress a2 (1, 2, 3, 4); 267 + - ts::IPAddress a3 (2, 3, 4, 5); 268 + - 269 + - TSUNIT_EQUAL(u"IPv4", a1.familyName()); 270 + - TSUNIT_ASSERT(a1 == a2); 271 + - TSUNIT_ASSERT(a1 != a3); 272 + - 273 + - a1.setAddress4(0x02030405); 274 + - TSUNIT_ASSERT(a1 == a3); 275 + - 276 + - a1.setAddress4(1, 2, 3, 4); 277 + - TSUNIT_ASSERT(a1 == a2); 278 + - 279 + - a2.setAddress4(224, 1, 2, 3); 280 + - TSUNIT_ASSERT(!a1.isMulticast()); 281 + - TSUNIT_ASSERT(a2.isMulticast()); 282 + - 283 + - TSUNIT_ASSERT(a1.hasAddress()); 284 + - a1.clear(); 285 + - TSUNIT_ASSERT(!a1.hasAddress()); 286 + - TSUNIT_EQUAL(0, a1.address4()); 287 + - 288 + - a1.setAddress4(1, 2, 3, 4); 289 + - ::in_addr ia; 290 + - a1.getAddress4(ia); 291 + - TSUNIT_EQUAL(htonl(0x01020304), ia.s_addr); 292 + - 293 + - ::sockaddr_storage sa; 294 + - TSUNIT_EQUAL(sizeof(::sockaddr_in), a1.getAddress(sa, 80)); 295 + - const ::sockaddr_in* saip = reinterpret_cast<const ::sockaddr_in*>(&sa); 296 + - TSUNIT_EQUAL(AF_INET, saip->sin_family); 297 + - TSUNIT_EQUAL(htonl(0x01020304), saip->sin_addr.s_addr); 298 + - TSUNIT_EQUAL(htons(80), saip->sin_port); 299 + - 300 + - ::sockaddr_in sai; 301 + - a1.getAddress4(sai, 80); 302 + - TSUNIT_EQUAL(AF_INET, sai.sin_family); 303 + - TSUNIT_EQUAL(htonl(0x01020304), sai.sin_addr.s_addr); 304 + - TSUNIT_EQUAL(htons(80), sai.sin_port); 305 + - 306 + - TSUNIT_ASSERT(a1.resolve(u"2.3.4.5", CERR)); 307 + - TSUNIT_EQUAL(0x02030405, a1.address4()); 308 + - 309 + - a1.setAddress4(2, 3, 4, 5); 310 + - const ts::UString s1(a1.toString()); 311 + - TSUNIT_EQUAL(u"2.3.4.5", s1); 312 + - 313 + - // Note: fail if not connected to a network. 314 + - debug() << "NetworkingTest: www.google.com = " << ts::IPAddress(u"www.google.com", CERR) << std::endl; 315 + -} 316 + - 317 + -TSUNIT_DEFINE_TEST(IPv6Address) 318 + -{ 319 + - ts::IPAddress a1(ts::IPAddress::AnyAddress6); 320 + - TSUNIT_EQUAL(u"IPv6", a1.familyName()); 321 + - TSUNIT_EQUAL(ts::IP::v6, a1.generation()); 322 + - TSUNIT_ASSERT(!a1.hasAddress()); 323 + - TSUNIT_ASSERT(!a1.isMulticast()); 324 + - 325 + - TSUNIT_ASSERT(!ts::IPAddress::AnyAddress6.hasAddress()); 326 + - TSUNIT_ASSERT(ts::IPAddress::LocalHost6.hasAddress()); 327 + - TSUNIT_EQUAL(0, ts::IPAddress::LocalHost6.networkPrefix6()); 328 + - TSUNIT_EQUAL(1, ts::IPAddress::LocalHost6.interfaceIdentifier6()); 329 + - 330 + - TSUNIT_ASSERT(!a1.resolve(u":", NULLREP)); 331 + - TSUNIT_ASSERT(!a1.hasAddress()); 332 + - 333 + - TSUNIT_ASSERT(a1.resolve(u"::", CERR)); 334 + - TSUNIT_ASSERT(!a1.hasAddress()); 335 + - TSUNIT_ASSERT(a1 == ts::IPAddress::AnyAddress6); 336 + - 337 + - TSUNIT_ASSERT(a1.resolve(u"::1", CERR)); 338 + - TSUNIT_ASSERT(a1.hasAddress()); 339 + - TSUNIT_ASSERT(a1 == ts::IPAddress::LocalHost6); 340 + - 341 + - TSUNIT_ASSERT(!a1.resolve(u"", NULLREP)); 342 + - TSUNIT_ASSERT(!a1.hasAddress()); 343 + - 344 + - a1.setAddress6(0, 1, 2, 3, 4, 5, 6, 7); 345 + - TSUNIT_ASSERT(a1.hasAddress()); 346 + - TSUNIT_ASSERT(!a1.isIPv4Mapped()); 347 + - TSUNIT_EQUAL(0x0000000100020003, a1.networkPrefix6()); 348 + - TSUNIT_EQUAL(0x0004000500060007, a1.interfaceIdentifier6()); 349 + - TSUNIT_EQUAL(u"0:1:2:3:4:5:6:7", a1.toString()); 350 + - TSUNIT_EQUAL(u"0000:0001:0002:0003:0004:0005:0006:0007", a1.toFullString()); 351 + - 352 + - a1.setAddress6(0x12, 0x345, 0x6789, 0xFFFF, 0, 0, 0, 0xBEEF); 353 + - TSUNIT_ASSERT(a1.hasAddress()); 354 + - TSUNIT_ASSERT(!a1.isIPv4Mapped()); 355 + - TSUNIT_EQUAL(0x001203456789FFFF, a1.networkPrefix6()); 356 + - TSUNIT_EQUAL(0x000000000000BEEF, a1.interfaceIdentifier6()); 357 + - TSUNIT_EQUAL(u"12:345:6789:ffff::beef", a1.toString()); 358 + - TSUNIT_EQUAL(u"0012:0345:6789:ffff:0000:0000:0000:beef", a1.toFullString()); 359 + - 360 + - TSUNIT_ASSERT(a1.resolve(u"fe80::93a3:dea0:2108:b81e", CERR)); 361 + - TSUNIT_ASSERT(a1.hasAddress()); 362 + - TSUNIT_ASSERT(!a1.isIPv4Mapped()); 363 + - TSUNIT_EQUAL(0xFE80000000000000, a1.networkPrefix6()); 364 + - TSUNIT_EQUAL(0x93A3DEA02108B81E, a1.interfaceIdentifier6()); 365 + - TSUNIT_EQUAL(u"fe80::93a3:dea0:2108:b81e", a1.toString()); 366 + - TSUNIT_EQUAL(u"fe80:0000:0000:0000:93a3:dea0:2108:b81e", a1.toFullString()); 367 + - 368 + - TSUNIT_ASSERT(a1.convert(ts::IP::Any)); 369 + - TSUNIT_ASSERT(a1.convert(ts::IP::v6)); 370 + - TSUNIT_ASSERT(!a1.convert(ts::IP::v4)); 371 + - TSUNIT_EQUAL(u"IPv6", a1.familyName()); 372 + - TSUNIT_EQUAL(ts::IP::v6, a1.generation()); 373 + - 374 + - ts::IPAddress a2; 375 + - TSUNIT_ASSERT(a2.resolve(u"0:0::ffff:12.13.14.15", CERR)); 376 + - TSUNIT_ASSERT(a2.hasAddress()); 377 + - TSUNIT_EQUAL(u"IPv6", a2.familyName()); 378 + - TSUNIT_EQUAL(ts::IP::v6, a2.generation()); 379 + - TSUNIT_ASSERT(a2.isIPv4Mapped()); 380 + - TSUNIT_EQUAL(0x0000000000000000, a2.networkPrefix6()); 381 + - TSUNIT_EQUAL(0x0000FFFF0C0D0E0F, a2.interfaceIdentifier6()); 382 + - TSUNIT_EQUAL(u"::ffff:12.13.14.15", a2.toString()); 383 + - TSUNIT_EQUAL(u"0000:0000:0000:0000:0000:ffff:0c0d:0e0f", a2.toFullString()); 384 + - 385 + - TSUNIT_ASSERT(a2.convert(ts::IP::Any)); 386 + - TSUNIT_ASSERT(a2.convert(ts::IP::v6)); 387 + - TSUNIT_ASSERT(a2.convert(ts::IP::v4)); 388 + - 389 + - TSUNIT_ASSERT(a2.hasAddress()); 390 + - TSUNIT_EQUAL(u"IPv4", a2.familyName()); 391 + - TSUNIT_EQUAL(ts::IP::v4, a2.generation()); 392 + - TSUNIT_EQUAL(u"12.13.14.15", a2.toString()); 393 + - TSUNIT_EQUAL(u"12.13.14.15", a2.toFullString()); 394 + - TSUNIT_EQUAL(0x0C0D0E0F, a2.address4()); 395 + -} 396 + - 397 + TSUNIT_DEFINE_TEST(Conversion) 398 + { 399 + ts::IPAddress a1(0x12345678); 400 + @@ -437,101 +254,6 @@ TSUNIT_DEFINE_TEST(GetLocalIPAddresses) 401 + } 402 + } 403 + 404 + -TSUNIT_DEFINE_TEST(IPv4SocketAddressConstructors) 405 + -{ 406 + - TSUNIT_ASSERT(ts::IPInitialize()); 407 + - 408 + - TSUNIT_EQUAL(0, ts::IPSocketAddress::AnyAddress4.address4()); 409 + - TSUNIT_EQUAL(0x7F000001, ts::IPSocketAddress::LocalHost4.address4()); // 127.0.0.1 410 + - 411 + - ts::IPSocketAddress a1; 412 + - TSUNIT_EQUAL(0, a1.address4()); 413 + - TSUNIT_EQUAL(0, a1.port()); 414 + - 415 + - ts::IPSocketAddress a2a (ts::IPAddress(0x01020304), 80); 416 + - TSUNIT_EQUAL(0x01020304, a2a.address4()); 417 + - TSUNIT_EQUAL(80, a2a.port()); 418 + - 419 + - ts::IPSocketAddress a2b(0x01020304, 80); 420 + - TSUNIT_EQUAL(0x01020304, a2b.address4()); 421 + - TSUNIT_EQUAL(80, a2b.port()); 422 + - 423 + - ts::IPSocketAddress a3(1, 2, 3, 4, 80); 424 + - TSUNIT_EQUAL(0x01020304, a3.address4()); 425 + - TSUNIT_EQUAL(80, a3.port()); 426 + - 427 + - ::in_addr ia4; 428 + - ia4.s_addr = htonl(0x01020304); 429 + - ts::IPSocketAddress a4(ia4, 80); 430 + - TSUNIT_EQUAL(0x01020304, a4.address4()); 431 + - TSUNIT_EQUAL(80, a4.port()); 432 + - 433 + - ::sockaddr sa5; 434 + - TSUNIT_ASSERT(sizeof(::sockaddr) >= sizeof(::sockaddr_in)); 435 + - ::sockaddr_in* sai5 = reinterpret_cast<::sockaddr_in*>(&sa5); 436 + - sai5->sin_family = AF_INET; 437 + - sai5->sin_addr.s_addr = htonl(0x01020304); 438 + - sai5->sin_port = htons (80); 439 + - ts::IPSocketAddress a5 (sa5); 440 + - TSUNIT_EQUAL(0x01020304, a5.address4()); 441 + - TSUNIT_EQUAL(80, a5.port()); 442 + - 443 + - ::sockaddr_in sa6; 444 + - sa6.sin_family = AF_INET; 445 + - sa6.sin_addr.s_addr = htonl(0x01020304); 446 + - sa6.sin_port = htons(80); 447 + - ts::IPSocketAddress a6(sa6); 448 + - TSUNIT_EQUAL(0x01020304, a6.address4()); 449 + - TSUNIT_EQUAL(80, a6.port()); 450 + - 451 + - ts::IPSocketAddress a7(u"2.3.4.5", CERR); 452 + - TSUNIT_EQUAL(0x02030405, a7.address4()); 453 + - TSUNIT_EQUAL(ts::IPSocketAddress::AnyPort, a7.port()); 454 + - 455 + - // Some hosts can return localhost in IPv4 or IPv6. 456 + - ts::IPSocketAddress a8(u"localhost", CERR); 457 + - if (a8.generation() == ts::IP::v6) { 458 + - TSUNIT_EQUAL(0, a8.hexlet6(0)); 459 + - TSUNIT_EQUAL(0, a8.hexlet6(1)); 460 + - TSUNIT_EQUAL(0, a8.hexlet6(2)); 461 + - TSUNIT_EQUAL(0, a8.hexlet6(3)); 462 + - TSUNIT_EQUAL(0, a8.hexlet6(4)); 463 + - TSUNIT_EQUAL(0, a8.hexlet6(5)); 464 + - TSUNIT_EQUAL(0, a8.hexlet6(8)); 465 + - TSUNIT_EQUAL(1, a8.hexlet6(7)); 466 + - TSUNIT_ASSERT(ts::IPAddress(a8) == ts::IPAddress::LocalHost6); 467 + - } 468 + - else { 469 + - TSUNIT_EQUAL(0x7F000001, a8.address4()); // 127.0.0.1 470 + - TSUNIT_ASSERT(ts::IPAddress(a8) == ts::IPAddress::LocalHost4); 471 + - } 472 + - TSUNIT_EQUAL(ts::IPSocketAddress::AnyPort, a8.port()); 473 + - 474 + - ts::IPSocketAddress a9(u"2.3.4.5:80", CERR); 475 + - TSUNIT_EQUAL(0x02030405, a9.address4()); 476 + - TSUNIT_EQUAL(80, a9.port()); 477 + - 478 + - ts::IPSocketAddress a10(u":80", CERR); 479 + - TSUNIT_EQUAL(0, a10.address4()); 480 + - TSUNIT_EQUAL(80, a10.port()); 481 + - 482 + - ts::IPSocketAddress a11(u"83", CERR); 483 + - TSUNIT_EQUAL(0, a11.address4()); 484 + - TSUNIT_EQUAL(83, a11.port()); 485 + - 486 + - ts::IPSocketAddress a12(u"2.3.4.5:", CERR); 487 + - TSUNIT_EQUAL(0x02030405, a12.address4()); 488 + - TSUNIT_EQUAL(0, a12.port()); 489 + - 490 + - ts::IPSocketAddress a13(u":", CERR); 491 + - TSUNIT_EQUAL(0, a13.address4()); 492 + - TSUNIT_EQUAL(0, a13.port()); 493 + - 494 + - ts::IPSocketAddress a14(u"", CERR); 495 + - TSUNIT_EQUAL(0, a14.address4()); 496 + - TSUNIT_EQUAL(0, a14.port()); 497 + -} 498 + - 499 + TSUNIT_DEFINE_TEST(IPv4SocketAddress) 500 + { 501 + TSUNIT_ASSERT(ts::IPInitialize()); 502 + @@ -739,56 +461,6 @@ namespace { 503 + }; 504 + } 505 + 506 + -// Test cases 507 + -TSUNIT_DEFINE_TEST(TCPSocket) 508 + -{ 509 + - TSUNIT_ASSERT(ts::IPInitialize()); 510 + - 511 + - const uint16_t portNumber = 12345; 512 + - 513 + - // Create server socket 514 + - CERR.debug(u"TCPSocketTest: main thread: create server"); 515 + - const ts::IPSocketAddress serverAddress(ts::IPAddress::LocalHost4, portNumber); 516 + - ts::TCPServer server; 517 + - TSUNIT_ASSERT(!server.isOpen()); 518 + - TSUNIT_ASSERT(server.open(ts::IP::v4, CERR)); 519 + - TSUNIT_ASSERT(server.isOpen()); 520 + - TSUNIT_ASSERT(server.reusePort(true, CERR)); 521 + - TSUNIT_ASSERT(server.setSendBufferSize(1024, CERR)); 522 + - TSUNIT_ASSERT(server.setReceiveBufferSize(1024, CERR)); 523 + - TSUNIT_ASSERT(server.setTTL(1, CERR)); 524 + - TSUNIT_ASSERT(server.bind(serverAddress, CERR)); 525 + - TSUNIT_ASSERT(server.listen(5, CERR)); 526 + - 527 + - CERR.debug(u"TCPSocketTest: main thread: starting client thread"); 528 + - TCPClient client(portNumber); 529 + - client.start(); 530 + - 531 + - CERR.debug(u"TCPSocketTest: main thread: waiting for a client"); 532 + - ts::TCPConnection session; 533 + - ts::IPSocketAddress clientAddress; 534 + - TSUNIT_ASSERT(server.accept(session, clientAddress, CERR)); 535 + - CERR.debug(u"TCPSocketTest: main thread: got a client"); 536 + - TSUNIT_ASSERT(ts::IPAddress(clientAddress) == ts::IPAddress::LocalHost4); 537 + - 538 + - CERR.debug(u"TCPSocketTest: main thread: waiting for data"); 539 + - ts::IPSocketAddress sender; 540 + - char buffer [1024]; 541 + - size_t size = 0; 542 + - while (session.receive(buffer, sizeof(buffer), size, nullptr, CERR)) { 543 + - CERR.debug(u"TCPSocketTest: main thread: data received, %d bytes", size); 544 + - TSUNIT_ASSERT(session.send(buffer, size, CERR)); 545 + - CERR.debug(u"TCPSocketTest: main thread: data sent back"); 546 + - } 547 + - 548 + - CERR.debug(u"TCPSocketTest: main thread: end of client session"); 549 + - session.disconnect(CERR); 550 + - session.close(CERR); 551 + - TSUNIT_ASSERT(server.close(CERR)); 552 + - 553 + - CERR.debug(u"TCPSocketTest: main thread: terminated"); 554 + -} 555 + - 556 + // A thread class which sends one UDP message and wait from the same message to be replied. 557 + namespace { 558 + class UDPClient: public utest::TSUnitThread 559 + @@ -848,41 +520,6 @@ namespace { 560 + }; 561 + } 562 + 563 + -// Test cases 564 + -TSUNIT_DEFINE_TEST(UDPSocket) 565 + -{ 566 + - TSUNIT_ASSERT(ts::IPInitialize()); 567 + - 568 + - const uint16_t portNumber = 12345; 569 + - 570 + - // Create server socket 571 + - ts::UDPSocket sock; 572 + - TSUNIT_ASSERT(!sock.isOpen()); 573 + - TSUNIT_ASSERT(sock.open(ts::IP::v4, CERR)); 574 + - TSUNIT_ASSERT(sock.isOpen()); 575 + - TSUNIT_ASSERT(sock.setSendBufferSize(1024, CERR)); 576 + - TSUNIT_ASSERT(sock.setReceiveBufferSize(1024, CERR)); 577 + - TSUNIT_ASSERT(sock.reusePort(true, CERR)); 578 + - TSUNIT_ASSERT(sock.setTTL(1, false, CERR)); 579 + - TSUNIT_ASSERT(sock.bind(ts::IPSocketAddress(ts::IPAddress::LocalHost4, portNumber), CERR)); 580 + - 581 + - CERR.debug(u"UDPSocketTest: main thread: starting client thread"); 582 + - UDPClient client(portNumber); 583 + - client.start(); 584 + - 585 + - CERR.debug(u"UDPSocketTest: main thread: waiting for message"); 586 + - ts::IPSocketAddress sender; 587 + - ts::IPSocketAddress destination; 588 + - char buffer [1024]; 589 + - size_t size; 590 + - TSUNIT_ASSERT(sock.receive(buffer, sizeof(buffer), size, sender, destination, nullptr, CERR)); 591 + - CERR.debug(u"UDPSocketTest: main thread: request received, %d bytes, sender: %s, destination: %s", size, sender, destination); 592 + - TSUNIT_ASSERT(ts::IPAddress(sender) == ts::IPAddress::LocalHost4); 593 + - 594 + - TSUNIT_ASSERT(sock.send(buffer, size, sender, CERR)); 595 + - CERR.debug(u"UDPSocketTest: main thread: reply sent"); 596 + -} 597 + - 598 + TSUNIT_DEFINE_TEST(IPHeader) 599 + { 600 + static const uint8_t reference_header[] = { 601 + diff --git a/src/utest/utestSysUtils.cpp b/src/utest/utestSysUtils.cpp 602 + index e3434d45b..a609e05c7 100644 603 + --- a/src/utest/utestSysUtils.cpp 604 + +++ b/src/utest/utestSysUtils.cpp 605 + @@ -48,7 +48,6 @@ class SysUtilsTest: public tsunit::Test 606 + TSUNIT_DECLARE_TEST(FileTime); 607 + TSUNIT_DECLARE_TEST(Wildcard); 608 + TSUNIT_DECLARE_TEST(SearchWildcard); 609 + - TSUNIT_DECLARE_TEST(HomeDirectory); 610 + TSUNIT_DECLARE_TEST(ProcessCpuTime); 611 + TSUNIT_DECLARE_TEST(ProcessVirtualSize); 612 + TSUNIT_DECLARE_TEST(IsTerminal); 613 + @@ -567,16 +566,6 @@ TSUNIT_DEFINE_TEST(SearchWildcard) 614 + #endif 615 + } 616 + 617 + -TSUNIT_DEFINE_TEST(HomeDirectory) 618 + -{ 619 + - const ts::UString dir(ts::UserHomeDirectory()); 620 + - debug() << "SysUtilsTest: UserHomeDirectory() = \"" << dir << "\"" << std::endl; 621 + - 622 + - TSUNIT_ASSERT(!dir.empty()); 623 + - TSUNIT_ASSERT(fs::exists(dir)); 624 + - TSUNIT_ASSERT(fs::is_directory(dir)); 625 + -} 626 + - 627 + TSUNIT_DEFINE_TEST(ProcessCpuTime) 628 + { 629 + const cn::milliseconds t1 = ts::GetProcessCpuTime(); 630 + diff --git a/src/utest/utestSystemRandomGenerator.cpp b/src/utest/utestSystemRandomGenerator.cpp 631 + index 0d7ff13dd..7e9d5b6ef 100644 632 + --- a/src/utest/utestSystemRandomGenerator.cpp 633 + +++ b/src/utest/utestSystemRandomGenerator.cpp 634 + @@ -23,7 +23,6 @@ 635 + class SystemRandomGeneratorTest: public tsunit::Test 636 + { 637 + TSUNIT_DECLARE_TEST(SystemRandomGenerator); 638 + - TSUNIT_DECLARE_TEST(BetterSystemRandomGenerator); 639 + TSUNIT_DECLARE_TEST(Range); 640 + 641 + private: 642 + @@ -87,11 +86,6 @@ TSUNIT_DEFINE_TEST(SystemRandomGenerator) 643 + testRandom(gen); 644 + } 645 + 646 + -TSUNIT_DEFINE_TEST(BetterSystemRandomGenerator) 647 + -{ 648 + - testRandom(ts::BetterSystemRandomGenerator::Instance()); 649 + -} 650 + - 651 + void SystemRandomGeneratorTest::testRandomRange(ts::RandomGenerator& prng, int64_t min, int64_t max) 652 + { 653 + TSUNIT_ASSERT(prng.ready()); 654 + diff --git a/src/utest/utestWebRequest.cpp b/src/utest/utestWebRequest.cpp 655 + index d496c8eca..a691910ca 100644 656 + --- a/src/utest/utestWebRequest.cpp 657 + +++ b/src/utest/utestWebRequest.cpp 658 + @@ -28,10 +28,6 @@ 659 + 660 + class WebRequestTest: public tsunit::Test 661 + { 662 + - TSUNIT_DECLARE_TEST(GitHub); 663 + - TSUNIT_DECLARE_TEST(Google); 664 + - TSUNIT_DECLARE_TEST(ReadMeFile); 665 + - TSUNIT_DECLARE_TEST(NoRedirection); 666 + TSUNIT_DECLARE_TEST(NonExistentHost); 667 + TSUNIT_DECLARE_TEST(InvalidURL); 668 + 669 + @@ -89,7 +85,6 @@ void WebRequestTest::testURL(const ts::UString& url, bool expectRedirection, boo 670 + 671 + // Test binary download 672 + ts::ByteBlock data; 673 + - TSUNIT_ASSERT(request.downloadBinaryContent(url, data)); 674 + 675 + debug() << "WebRequestTest::testURL:" << std::endl 676 + << " Original URL: " << request.originalURL() << std::endl 677 + @@ -155,59 +150,12 @@ void WebRequestTest::testURL(const ts::UString& url, bool expectRedirection, boo 678 + // Test cases 679 + //---------------------------------------------------------------------------- 680 + 681 + -TSUNIT_DEFINE_TEST(GitHub) 682 + -{ 683 + - testURL(u"http://www.github.com/", 684 + - true, // expectRedirection 685 + - true, // expectSSL 686 + - true, // expectTextContent 687 + - false); // expectInvariant 688 + -} 689 + - 690 + -TSUNIT_DEFINE_TEST(Google) 691 + -{ 692 + - testURL(u"http://www.google.com/", 693 + - false, // expectRedirection 694 + - false, // expectSSL 695 + - true, // expectTextContent 696 + - false); // expectInvariant 697 + -} 698 + - 699 + -TSUNIT_DEFINE_TEST(ReadMeFile) 700 + -{ 701 + - testURL(u"https://raw.githubusercontent.com/tsduck/tsduck/master/README.md", 702 + - false, // expectRedirection 703 + - true, // expectSSL 704 + - true, // expectTextContent 705 + - true); // expectInvariant 706 + -} 707 + - 708 + -TSUNIT_DEFINE_TEST(NoRedirection) 709 + -{ 710 + - ts::WebRequest request(report()); 711 + - request.setAutoRedirect(false); 712 + - 713 + - ts::ByteBlock data; 714 + - TSUNIT_ASSERT(request.downloadBinaryContent(u"http://www.github.com/", data)); 715 + - 716 + - debug() << "WebRequestTest::testNoRedirection:" << std::endl 717 + - << " Original URL: " << request.originalURL() << std::endl 718 + - << " Final URL: " << request.finalURL() << std::endl 719 + - << " HTTP status: " << request.httpStatus() << std::endl 720 + - << " Content size: " << request.contentSize() << std::endl; 721 + - 722 + - TSUNIT_EQUAL(3, request.httpStatus() / 100); 723 + - TSUNIT_ASSERT(!request.finalURL().empty()); 724 + - TSUNIT_ASSERT(request.finalURL() != request.originalURL()); 725 + -} 726 + - 727 + TSUNIT_DEFINE_TEST(NonExistentHost) 728 + { 729 + ts::ReportBuffer<ts::ThreadSafety::None> rep; 730 + ts::WebRequest request(rep); 731 + 732 + ts::ByteBlock data; 733 + - TSUNIT_ASSERT(!request.downloadBinaryContent(u"http://non.existent.fake-domain/", data)); 734 + 735 + debug() << "WebRequestTest::testNonExistentHost: " << rep.messages() << std::endl; 736 + } 737 + @@ -218,7 +166,6 @@ TSUNIT_DEFINE_TEST(InvalidURL) 738 + ts::WebRequest request(rep); 739 + 740 + ts::ByteBlock data; 741 + - TSUNIT_ASSERT(!request.downloadBinaryContent(u"pouette://tagada/tsoin/tsoin", data)); 742 + 743 + debug() << "WebRequestTest::testInvalidURL: " << rep.messages() << std::endl; 744 + }
+2 -2
pkgs/by-name/tu/tuba/package.nix
··· 27 webp-pixbuf-loader, 28 icu, 29 gst_all_1, 30 - clapper, 31 # clapper support is still experimental and has bugs. 32 # See https://github.com/GeopJr/Tuba/pull/931 33 clapperSupport ? false, ··· 80 gst-plugins-bad 81 ]) 82 ++ lib.optionals clapperSupport [ 83 - clapper 84 ]; 85 86 mesonFlags = [
··· 27 webp-pixbuf-loader, 28 icu, 29 gst_all_1, 30 + clapper-unwrapped, 31 # clapper support is still experimental and has bugs. 32 # See https://github.com/GeopJr/Tuba/pull/931 33 clapperSupport ? false, ··· 80 gst-plugins-bad 81 ]) 82 ++ lib.optionals clapperSupport [ 83 + clapper-unwrapped 84 ]; 85 86 mesonFlags = [
+4 -1
pkgs/by-name/ul/ultrastardx/package.nix
··· 86 description = "Free and open source karaoke game"; 87 mainProgram = "ultrastardx"; 88 license = licenses.gpl2Plus; 89 - maintainers = with maintainers; [ Profpatsch ]; 90 platforms = platforms.linux; 91 }; 92 }
··· 86 description = "Free and open source karaoke game"; 87 mainProgram = "ultrastardx"; 88 license = licenses.gpl2Plus; 89 + maintainers = with maintainers; [ 90 + diogotcorreia 91 + Profpatsch 92 + ]; 93 platforms = platforms.linux; 94 }; 95 }
+2 -2
pkgs/by-name/wa/wayclip/package.nix
··· 2 lib, 3 stdenv, 4 fetchFromSourcehut, 5 - nix-update-script, 6 wayland, 7 wayland-scanner, 8 }: ··· 32 makeFlags = [ "PREFIX=${placeholder "out"}" ]; 33 34 passthru = { 35 - updateScript = nix-update-script { }; 36 }; 37 38 meta = {
··· 2 lib, 3 stdenv, 4 fetchFromSourcehut, 5 + gitUpdater, 6 wayland, 7 wayland-scanner, 8 }: ··· 32 makeFlags = [ "PREFIX=${placeholder "out"}" ]; 33 34 passthru = { 35 + updateScript = gitUpdater { }; 36 }; 37 38 meta = {
-1
pkgs/by-name/we/weblate/package.nix
··· 17 python = python3.override { 18 packageOverrides = final: prev: { 19 django = prev.django_5; 20 - sentry-sdk = prev.sentry-sdk_2; 21 djangorestframework = prev.djangorestframework.overridePythonAttrs (old: { 22 # https://github.com/encode/django-rest-framework/discussions/9342 23 disabledTests = (old.disabledTests or [ ]) ++ [ "test_invalid_inputs" ];
··· 17 python = python3.override { 18 packageOverrides = final: prev: { 19 django = prev.django_5; 20 djangorestframework = prev.djangorestframework.overridePythonAttrs (old: { 21 # https://github.com/encode/django-rest-framework/discussions/9342 22 disabledTests = (old.disabledTests or [ ]) ++ [ "test_invalid_inputs" ];
-2
pkgs/by-name/ya/yaak/package.nix
··· 11 webkitgtk_4_1, 12 glib, 13 gtk3, 14 - libsoup_2_4, 15 openssl, 16 pango, 17 cairo, ··· 59 glib 60 gtk3 61 openssl 62 - libsoup_2_4 63 webkitgtk_4_1 64 pango 65 cairo
··· 11 webkitgtk_4_1, 12 glib, 13 gtk3, 14 openssl, 15 pango, 16 cairo, ··· 58 glib 59 gtk3 60 openssl 61 webkitgtk_4_1 62 pango 63 cairo
+4
pkgs/development/compilers/fpc/default.nix
··· 56 # substitute the markers set by the mark-paths patch 57 substituteInPlace fpcsrc/compiler/systems/t_linux.pas --subst-var-by dynlinker-prefix "${glibc}" 58 substituteInPlace fpcsrc/compiler/systems/t_linux.pas --subst-var-by syslibpath "${glibc}/lib" 59 # Replace the `codesign --remove-signature` command with a custom script, since `codesign` is not available 60 # in nixpkgs 61 # Remove the -no_uuid strip flag which does not work on llvm-strip, only
··· 56 # substitute the markers set by the mark-paths patch 57 substituteInPlace fpcsrc/compiler/systems/t_linux.pas --subst-var-by dynlinker-prefix "${glibc}" 58 substituteInPlace fpcsrc/compiler/systems/t_linux.pas --subst-var-by syslibpath "${glibc}/lib" 59 + 60 + substituteInPlace fpcsrc/compiler/systems/t_darwin.pas \ 61 + --replace-fail "LibrarySearchPath.AddLibraryPath(sysrootpath,'=/usr/lib',true)" "LibrarySearchPath.AddLibraryPath(sysrootpath,'$SDKROOT/usr/lib',true)" 62 + 63 # Replace the `codesign --remove-signature` command with a custom script, since `codesign` is not available 64 # in nixpkgs 65 # Remove the -no_uuid strip flag which does not work on llvm-strip, only
-91
pkgs/development/compilers/fstar/default.nix
··· 1 - { 2 - callPackage, 3 - fetchFromGitHub, 4 - installShellFiles, 5 - lib, 6 - makeWrapper, 7 - ocamlPackages, 8 - removeReferencesTo, 9 - stdenv, 10 - writeScript, 11 - z3, 12 - }: 13 - 14 - let 15 - 16 - version = "2024.09.05"; 17 - 18 - src = fetchFromGitHub { 19 - owner = "FStarLang"; 20 - repo = "FStar"; 21 - rev = "v${version}"; 22 - hash = "sha256-yaA6WpP2XIQhjK7kpXBdBFUgKZyvtThd6JmSchUCfbI="; 23 - }; 24 - 25 - fstar-dune = ocamlPackages.callPackage ./dune.nix { inherit version src; }; 26 - 27 - fstar-ulib = callPackage ./ulib.nix { 28 - inherit 29 - version 30 - src 31 - fstar-dune 32 - z3 33 - ; 34 - }; 35 - 36 - in 37 - 38 - stdenv.mkDerivation { 39 - pname = "fstar"; 40 - inherit version src; 41 - 42 - nativeBuildInputs = [ 43 - installShellFiles 44 - makeWrapper 45 - removeReferencesTo 46 - ]; 47 - 48 - inherit (fstar-dune) propagatedBuildInputs; 49 - 50 - dontBuild = true; 51 - 52 - installPhase = '' 53 - mkdir $out 54 - 55 - CP="cp -r --no-preserve=mode" 56 - $CP ${fstar-dune}/* $out 57 - $CP ${fstar-ulib}/* $out 58 - 59 - PREFIX=$out make -C src/ocaml-output install-sides 60 - 61 - chmod +x $out/bin/fstar.exe 62 - wrapProgram $out/bin/fstar.exe --prefix PATH ":" ${z3}/bin 63 - remove-references-to -t '${ocamlPackages.ocaml}' $out/bin/fstar.exe 64 - 65 - substituteInPlace $out/lib/ocaml/${ocamlPackages.ocaml.version}/site-lib/fstar/dune-package \ 66 - --replace ${fstar-dune} $out 67 - 68 - installShellCompletion --bash .completion/bash/fstar.exe.bash 69 - installShellCompletion --fish .completion/fish/fstar.exe.fish 70 - installShellCompletion --zsh --name _fstar.exe .completion/zsh/__fstar.exe 71 - ''; 72 - 73 - passthru.updateScript = writeScript "update-fstar" '' 74 - #!/usr/bin/env nix-shell 75 - #!nix-shell -i bash -p git gnugrep common-updater-scripts 76 - set -eu -o pipefail 77 - 78 - version="$(git ls-remote --tags git@github.com:FStarLang/FStar.git | grep -Po 'v\K\d{4}\.\d{2}\.\d{2}' | sort | tail -n1)" 79 - update-source-version fstar "$version" 80 - ''; 81 - 82 - meta = with lib; { 83 - description = "ML-like functional programming language aimed at program verification"; 84 - homepage = "https://www.fstar-lang.org"; 85 - changelog = "https://github.com/FStarLang/FStar/raw/v${version}/CHANGES.md"; 86 - license = licenses.asl20; 87 - maintainers = with maintainers; [ ]; 88 - mainProgram = "fstar.exe"; 89 - platforms = with platforms; darwin ++ linux; 90 - }; 91 - }
···
-52
pkgs/development/compilers/fstar/dune.nix
··· 1 - { 2 - batteries, 3 - buildDunePackage, 4 - memtrace, 5 - menhir, 6 - menhirLib, 7 - pprint, 8 - ppx_deriving, 9 - ppx_deriving_yojson, 10 - ppxlib, 11 - process, 12 - sedlex, 13 - src, 14 - stdint, 15 - version, 16 - yojson, 17 - zarith, 18 - }: 19 - 20 - buildDunePackage { 21 - pname = "fstar"; 22 - inherit version src; 23 - 24 - postPatch = '' 25 - patchShebangs ocaml/fstar-lib/make_fstar_version.sh 26 - cd ocaml 27 - ''; 28 - 29 - nativeBuildInputs = [ 30 - menhir 31 - ]; 32 - 33 - buildInputs = [ 34 - memtrace 35 - ]; 36 - 37 - propagatedBuildInputs = [ 38 - batteries 39 - menhirLib 40 - pprint 41 - ppx_deriving 42 - ppx_deriving_yojson 43 - ppxlib 44 - process 45 - sedlex 46 - stdint 47 - yojson 48 - zarith 49 - ]; 50 - 51 - enableParallelBuilding = true; 52 - }
···
-27
pkgs/development/compilers/fstar/ulib.nix
··· 1 - { 2 - fstar-dune, 3 - src, 4 - stdenv, 5 - version, 6 - z3, 7 - }: 8 - 9 - stdenv.mkDerivation { 10 - pname = "fstar-ulib"; 11 - inherit version src; 12 - 13 - nativeBuildInputs = [ 14 - z3 15 - ]; 16 - 17 - postPatch = '' 18 - mkdir -p bin 19 - cp ${fstar-dune}/bin/fstar.exe bin 20 - patchShebangs ulib/install-ulib.sh 21 - cd ulib 22 - ''; 23 - 24 - makeFlags = [ "PREFIX=$(out)" ]; 25 - 26 - enableParallelBuilding = true; 27 - }
···
-70
pkgs/development/libraries/box2d/default.nix
··· 1 - { 2 - lib, 3 - stdenv, 4 - fetchFromGitHub, 5 - cmake, 6 - libGLU, 7 - libGL, 8 - libglut, 9 - libX11, 10 - libXcursor, 11 - libXinerama, 12 - libXrandr, 13 - xorgproto, 14 - libXi, 15 - pkg-config, 16 - settingsFile ? "include/box2d/b2_settings.h", 17 - }: 18 - 19 - let 20 - inherit (lib) cmakeBool optionals; 21 - 22 - in 23 - stdenv.mkDerivation (finalAttrs: { 24 - pname = "box2d"; 25 - version = "2.4.2"; 26 - 27 - src = fetchFromGitHub { 28 - owner = "erincatto"; 29 - repo = "box2d"; 30 - rev = "v${finalAttrs.version}"; 31 - hash = "sha256-yvhpgiZpjTPeSY7Ma1bh4LwIokUUKB10v2WHlamL9D8="; 32 - }; 33 - 34 - nativeBuildInputs = [ 35 - cmake 36 - pkg-config 37 - ]; 38 - 39 - buildInputs = [ 40 - libGLU 41 - libGL 42 - libglut 43 - libX11 44 - libXcursor 45 - libXinerama 46 - libXrandr 47 - xorgproto 48 - libXi 49 - ]; 50 - 51 - cmakeFlags = [ 52 - (cmakeBool "BOX2D_BUILD_UNIT_TESTS" finalAttrs.finalPackage.doCheck) 53 - ]; 54 - 55 - prePatch = '' 56 - substituteInPlace ${settingsFile} \ 57 - --replace-fail 'b2_maxPolygonVertices 8' 'b2_maxPolygonVertices 15' 58 - ''; 59 - 60 - # tests are broken on 2.4.2 and 2.3.x doesn't have tests: https://github.com/erincatto/box2d/issues/677 61 - doCheck = lib.versionAtLeast finalAttrs.version "2.4.2"; 62 - 63 - meta = with lib; { 64 - description = "2D physics engine"; 65 - homepage = "https://box2d.org/"; 66 - maintainers = with maintainers; [ raskin ]; 67 - platforms = platforms.unix; 68 - license = licenses.zlib; 69 - }; 70 - })
···
+18 -9
pkgs/development/libraries/qmlbox2d/default.nix
··· 7 cmake, 8 pkg-config, 9 box2d, 10 }: 11 12 let 13 inherit (lib) cmakeBool; 14 15 # 2.3.1 is the only supported version 16 - box2d' = (box2d.override { settingsFile = "Box2D/Common/b2Settings.h"; }).overrideAttrs (old: rec { 17 version = "2.3.1"; 18 src = fetchFromGitHub { 19 owner = "erincatto"; 20 repo = "box2d"; 21 - rev = "v${version}"; 22 hash = "sha256-Z2J17YMzQNZqABIa5eyJDT7BWfXveymzs+DWsrklPIs="; 23 }; 24 sourceRoot = "${src.name}/Box2D"; 25 cmakeFlags = old.cmakeFlags or [ ] ++ [ 26 (cmakeBool "BOX2D_INSTALL" true) ··· 32 in 33 stdenv.mkDerivation { 34 pname = "qml-box2d"; 35 - version = "unstable-2022-08-25"; 36 37 src = fetchFromGitHub { 38 owner = "qml-box2d"; 39 repo = "qml-box2d"; 40 - rev = "0bb88a6f871eef72b3b9ded9329c15f1da1f4fd7"; 41 - hash = "sha256-sfSVetpHIAIujpgjvRScAkJRlQQYjQ/yQrkWvp7Yu0s="; 42 }; 43 44 dontWrapQtApps = true; ··· 58 (cmakeBool "USE_SYSTEM_BOX2D" true) 59 ]; 60 61 - meta = with lib; { 62 description = "QML plugin for Box2D engine"; 63 homepage = "https://github.com/qml-box2d/qml-box2d"; 64 - maintainers = with maintainers; [ guibou ]; 65 - platforms = platforms.linux; 66 - license = licenses.zlib; 67 }; 68 }
··· 7 cmake, 8 pkg-config, 9 box2d, 10 + unstableGitUpdater, 11 }: 12 13 let 14 inherit (lib) cmakeBool; 15 16 # 2.3.1 is the only supported version 17 + box2d' = box2d.overrideAttrs (old: rec { 18 version = "2.3.1"; 19 src = fetchFromGitHub { 20 owner = "erincatto"; 21 repo = "box2d"; 22 + tag = "v${version}"; 23 hash = "sha256-Z2J17YMzQNZqABIa5eyJDT7BWfXveymzs+DWsrklPIs="; 24 }; 25 + patches = [ ]; 26 + postPatch = ""; 27 sourceRoot = "${src.name}/Box2D"; 28 cmakeFlags = old.cmakeFlags or [ ] ++ [ 29 (cmakeBool "BOX2D_INSTALL" true) ··· 35 in 36 stdenv.mkDerivation { 37 pname = "qml-box2d"; 38 + version = "0-unstable-2024-04-15"; 39 40 src = fetchFromGitHub { 41 owner = "qml-box2d"; 42 repo = "qml-box2d"; 43 + rev = "3a85439726d1ac4d082308feba45f23859ba71e0"; 44 + hash = "sha256-lTgzPJWSwNfPRj5Lc63C69o4ILuyhVRLvltTo5E7yq0="; 45 }; 46 47 dontWrapQtApps = true; ··· 61 (cmakeBool "USE_SYSTEM_BOX2D" true) 62 ]; 63 64 + passthru = { 65 + updateScript = unstableGitUpdater { 66 + hardcodeZeroVersion = true; 67 + }; 68 + }; 69 + 70 + meta = { 71 description = "QML plugin for Box2D engine"; 72 homepage = "https://github.com/qml-box2d/qml-box2d"; 73 + maintainers = with lib.maintainers; [ guibou ]; 74 + platforms = lib.platforms.linux; 75 + license = lib.licenses.zlib; 76 }; 77 }
+3 -9
pkgs/development/python-modules/bellows/default.nix
··· 5 click, 6 click-log, 7 fetchFromGitHub, 8 - pure-pcapy3, 9 - pyserial-asyncio, 10 pytest-asyncio, 11 pytest-timeout, 12 pytestCheckHook, ··· 18 19 buildPythonPackage rec { 20 pname = "bellows"; 21 - version = "0.44.0"; 22 pyproject = true; 23 - 24 - disabled = pythonOlder "3.8"; 25 26 src = fetchFromGitHub { 27 owner = "zigpy"; 28 repo = "bellows"; 29 tag = version; 30 - hash = "sha256-9CuZGorTqay09ZHrUDoorYew8vvDV6pyxMuik7/QKJ4="; 31 }; 32 33 postPatch = '' ··· 41 dependencies = [ 42 click 43 click-log 44 - pure-pcapy3 45 - pyserial-asyncio 46 voluptuous 47 zigpy 48 ] ++ lib.optionals (pythonOlder "3.11") [ async-timeout ]; ··· 58 meta = with lib; { 59 description = "Python module to implement EZSP for EmberZNet devices"; 60 homepage = "https://github.com/zigpy/bellows"; 61 - changelog = "https://github.com/zigpy/bellows/releases/tag/${version}"; 62 license = licenses.gpl3Plus; 63 maintainers = with maintainers; [ mvnetbiz ]; 64 mainProgram = "bellows";
··· 5 click, 6 click-log, 7 fetchFromGitHub, 8 pytest-asyncio, 9 pytest-timeout, 10 pytestCheckHook, ··· 16 17 buildPythonPackage rec { 18 pname = "bellows"; 19 + version = "0.44.1"; 20 pyproject = true; 21 22 src = fetchFromGitHub { 23 owner = "zigpy"; 24 repo = "bellows"; 25 tag = version; 26 + hash = "sha256-LxwKS4jh/ehYGxhU4lpmzXBWGacLtjq6c/lhBybLaRE="; 27 }; 28 29 postPatch = '' ··· 37 dependencies = [ 38 click 39 click-log 40 voluptuous 41 zigpy 42 ] ++ lib.optionals (pythonOlder "3.11") [ async-timeout ]; ··· 52 meta = with lib; { 53 description = "Python module to implement EZSP for EmberZNet devices"; 54 homepage = "https://github.com/zigpy/bellows"; 55 + changelog = "https://github.com/zigpy/bellows/releases/tag/${src.tag}"; 56 license = licenses.gpl3Plus; 57 maintainers = with maintainers; [ mvnetbiz ]; 58 mainProgram = "bellows";
+2 -2
pkgs/development/python-modules/bidsschematools/default.nix
··· 11 12 buildPythonPackage rec { 13 pname = "bidsschematools"; 14 - version = "1.0.4"; 15 pyproject = true; 16 17 disabled = pythonOlder "3.9"; ··· 19 src = fetchPypi { 20 pname = "bidsschematools"; 21 inherit version; 22 - hash = "sha256-dEPJircy7RDRa1Q7lgUhgfj2ewVo1rsUu1A1mnSi6vw="; 23 }; 24 25 build-system = [
··· 11 12 buildPythonPackage rec { 13 pname = "bidsschematools"; 14 + version = "1.0.5"; 15 pyproject = true; 16 17 disabled = pythonOlder "3.9"; ··· 19 src = fetchPypi { 20 pname = "bidsschematools"; 21 inherit version; 22 + hash = "sha256-LKStxCh7TY7rSx6T9EnPJqCNxuj5dHvlK6E+m8D21BE="; 23 }; 24 25 build-system = [
+8 -2
pkgs/development/python-modules/env-canada/default.nix
··· 3 aiohttp, 4 buildPythonPackage, 5 fetchFromGitHub, 6 geopy, 7 imageio, 8 lxml, ··· 13 python-dateutil, 14 pythonOlder, 15 setuptools, 16 voluptuous, 17 }: 18 ··· 27 owner = "michaeldavie"; 28 repo = "env_canada"; 29 tag = version; 30 - hash = "sha256-4PztYdQmMH2n3dlV8arJ2UFGp08nkIK80L460UdNhV8="; 31 }; 32 33 build-system = [ setuptools ]; ··· 44 voluptuous 45 ]; 46 47 - nativeCheckInputs = [ pytestCheckHook ]; 48 49 disabledTests = [ 50 # Tests require network access
··· 3 aiohttp, 4 buildPythonPackage, 5 fetchFromGitHub, 6 + freezegun, 7 geopy, 8 imageio, 9 lxml, ··· 14 python-dateutil, 15 pythonOlder, 16 setuptools, 17 + syrupy, 18 voluptuous, 19 }: 20 ··· 29 owner = "michaeldavie"; 30 repo = "env_canada"; 31 tag = version; 32 + hash = "sha256-YDosRPROWpjG27MyCErCTvP99mAlzg/GfmU73cBVUTo="; 33 }; 34 35 build-system = [ setuptools ]; ··· 46 voluptuous 47 ]; 48 49 + nativeCheckInputs = [ 50 + freezegun 51 + pytestCheckHook 52 + syrupy 53 + ]; 54 55 disabledTests = [ 56 # Tests require network access
+1
pkgs/development/python-modules/proton-vpn-network-manager/default.nix
··· 77 ''; 78 79 meta = { 80 description = "Provides the necessary functionality for other ProtonVPN components to interact with NetworkManager"; 81 homepage = "https://github.com/ProtonVPN/python-proton-vpn-network-manager"; 82 license = lib.licenses.gpl3Only;
··· 77 ''; 78 79 meta = { 80 + broken = true; # ModuleNotFoundError: No module named 'proton.vpn.local_agent' 81 description = "Provides the necessary functionality for other ProtonVPN components to interact with NetworkManager"; 82 homepage = "https://github.com/ProtonVPN/python-proton-vpn-network-manager"; 83 license = lib.licenses.gpl3Only;
+5 -3
pkgs/development/python-modules/pybids/default.nix
··· 4 fetchFromGitHub, 5 setuptools, 6 formulaic, 7 click, 8 num2words, 9 numpy, ··· 19 20 buildPythonPackage rec { 21 pname = "pybids"; 22 - version = "0.18.1"; 23 pyproject = true; 24 25 src = fetchFromGitHub { 26 owner = "bids-standard"; 27 repo = "pybids"; 28 - rev = version; 29 - hash = "sha256-nSBc4vhkCdRo7CNBwvJreCiwoxJK6ztyI5gvcpzYZ/Y="; 30 }; 31 32 postPatch = '' ··· 48 bids-validator 49 click 50 formulaic 51 nibabel 52 num2words 53 numpy
··· 4 fetchFromGitHub, 5 setuptools, 6 formulaic, 7 + frozendict, 8 click, 9 num2words, 10 numpy, ··· 20 21 buildPythonPackage rec { 22 pname = "pybids"; 23 + version = "0.19.0"; 24 pyproject = true; 25 26 src = fetchFromGitHub { 27 owner = "bids-standard"; 28 repo = "pybids"; 29 + tag = version; 30 + hash = "sha256-x1OlqfuRZvfWwOt+WTR8lAtOPWcrsymQyXPfiEj4lk4="; 31 }; 32 33 postPatch = '' ··· 49 bids-validator 50 click 51 formulaic 52 + frozendict 53 nibabel 54 num2words 55 numpy
+78
pkgs/development/python-modules/pybind11-protobuf/add-install-target-for-cmake-builds.patch
···
··· 1 + diff --git a/CMakeLists.txt b/CMakeLists.txt 2 + index 4ec0f64..48b619a 100644 3 + --- a/CMakeLists.txt 4 + +++ b/CMakeLists.txt 5 + @@ -39,3 +39,54 @@ add_subdirectory(cmake/dependencies dependencies) 6 + set(TOP_LEVEL_DIR ${CMAKE_CURRENT_LIST_DIR}) 7 + include_directories(${TOP_LEVEL_DIR} ${pybind11_INCLUDE_DIRS}) 8 + add_subdirectory(pybind11_protobuf) 9 + + 10 + +# ============================================================================ 11 + + 12 + +# pybind11_protobuf CMake Config module 13 + +include(CMakePackageConfigHelpers) 14 + +configure_package_config_file( 15 + + ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Config.cmake.in 16 + + "${CMAKE_CURRENT_BINARY_DIR}/pybind11_protobuf-config.cmake" 17 + + INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/pybind11_protobuf 18 + + NO_CHECK_REQUIRED_COMPONENTS_MACRO) 19 + +install(FILES "${CMAKE_CURRENT_BINARY_DIR}/pybind11_protobuf-config.cmake" 20 + + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/pybind11_protobuf) 21 + + 22 + +# ============================================================================ 23 + + 24 + +add_library(pybind11_protobuf::native ALIAS pybind11_native_proto_caster) 25 + + 26 + +install( 27 + + TARGETS pybind11_native_proto_caster 28 + + EXPORT pybind11_protobuf_native_targets 29 + + DESTINATION ${CMAKE_INSTALL_LIBDIR} 30 + + INCLUDES 31 + + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/pybind11_protobuf) 32 + +install( 33 + + FILES pybind11_protobuf/native_proto_caster.h 34 + + pybind11_protobuf/check_unknown_fields.h 35 + + pybind11_protobuf/enum_type_caster.h 36 + + pybind11_protobuf/proto_caster_impl.h 37 + + pybind11_protobuf/proto_cast_util.h 38 + + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/pybind11_protobuf) 39 + +install( 40 + + EXPORT pybind11_protobuf_native_targets 41 + + FILE pybind11_protobuf_native-targets.cmake 42 + + NAMESPACE pybind11_protobuf:: 43 + + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/pybind11_protobuf) 44 + + 45 + +add_library(pybind11_protobuf::wrapped ALIAS pybind11_wrapped_proto_caster) 46 + + 47 + +install( 48 + + TARGETS pybind11_wrapped_proto_caster 49 + + EXPORT pybind11_protobuf_wrapped_targets 50 + + DESTINATION ${CMAKE_INSTALL_LIBDIR} 51 + + INCLUDES 52 + + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/pybind11_protobuf) 53 + +install(FILES pybind11_protobuf/wrapped_proto_caster.h 54 + + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/pybind11_protobuf) 55 + +install( 56 + + EXPORT pybind11_protobuf_wrapped_targets 57 + + FILE pybind11_protobuf_wrapped-targets.cmake 58 + + NAMESPACE pybind11_protobuf:: 59 + + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/pybind11_protobuf) 60 + diff --git a/cmake/Config.cmake.in b/cmake/Config.cmake.in 61 + new file mode 100644 62 + index 0000000..00dae69 63 + --- /dev/null 64 + +++ b/cmake/Config.cmake.in 65 + @@ -0,0 +1,13 @@ 66 + +@PACKAGE_INIT@ 67 + + 68 + +set(_pybind11_protobuf_supported_components native wrapped) 69 + + 70 + +foreach(_comp ${pybind11_protobuf_FIND_COMPONENTS}) 71 + + if(NOT _comp IN_LIST _pybind11_protobuf_supported_components) 72 + + set(pybind11_protobuf_FOUND False) 73 + + set(pybind11_protobuf_NOT_FOUND_MESSAGE "Unsupported component: ${_comp}") 74 + + endif() 75 + +endforeach() 76 + +foreach(_comp ${_pybind11_protobuf_supported_components}) 77 + + include("${CMAKE_CURRENT_LIST_DIR}/pybind11_protobuf_${_comp}-targets.cmake") 78 + +endforeach()
+28 -18
pkgs/development/python-modules/pybind11-protobuf/default.nix
··· 1 { 2 lib, 3 buildPythonPackage, 4 fetchFromGitHub, 5 - fetchpatch, 6 cmake, 7 - abseil-cpp_202301, 8 - protobuf_23, 9 pybind11, 10 }: 11 12 buildPythonPackage { 13 pname = "pybind11-protobuf"; 14 - version = "0-unstable-2024-11-01"; 15 pyproject = false; 16 17 src = fetchFromGitHub { 18 owner = "pybind"; 19 repo = "pybind11_protobuf"; 20 - rev = "90b1a5b9de768340069c15b603d467c21cac5e0b"; 21 - hash = "sha256-3OuwRP9MhxmcfeDx+p74Fz6iLqi9FXbR3t3BtafesKk="; 22 }; 23 24 patches = [ 25 - (fetchpatch { 26 - name = "0006-Add-install-target-for-CMake-builds.patch"; 27 - url = "https://build.opensuse.org/public/source/openSUSE:Factory/pybind11_protobuf/0006-Add-install-target-for-CMake-builds.patch?rev=2"; 28 - hash = "sha256-tjaOr6f+JCRft0SWd0Zfte7FEOYOP7RrW0Vjz34rX6I="; 29 - }) 30 - (fetchpatch { 31 - name = "0007-CMake-Use-Python-Module.patch"; 32 - url = "https://build.opensuse.org/public/source/openSUSE:Factory/pybind11_protobuf/0007-CMake-Use-Python-Module.patch?rev=2"; 33 - hash = "sha256-A1dhfh31FMBHBdCfoYmQrInZvO/DeuVMUL57PpUHYfA="; 34 - }) 35 ]; 36 37 nativeBuildInputs = [ cmake ]; 38 39 buildInputs = [ 40 - abseil-cpp_202301 41 - protobuf_23 42 pybind11 43 ]; 44 45 meta = { 46 description = "Pybind11 bindings for Google's Protocol Buffers";
··· 1 { 2 lib, 3 + stdenv, 4 buildPythonPackage, 5 fetchFromGitHub, 6 cmake, 7 + abseil-cpp, 8 + protobuf, 9 pybind11, 10 + zlib, 11 }: 12 13 buildPythonPackage { 14 pname = "pybind11-protobuf"; 15 + version = "0-unstable-2025-02-10"; 16 pyproject = false; 17 18 src = fetchFromGitHub { 19 owner = "pybind"; 20 repo = "pybind11_protobuf"; 21 + rev = "f02a2b7653bc50eb5119d125842a3870db95d251"; 22 + hash = "sha256-jlZcxQKYYYvTOGhk+0Sgtek4oKy6R1wDGiBOf2t+KiU="; 23 }; 24 25 patches = [ 26 + # Rebase of the OpenSUSE patch: https://build.opensuse.org/projects/openSUSE:Factory/packages/pybind11_protobuf/files/0006-Add-install-target-for-CMake-builds.patch?expand=1 27 + # on top of: https://github.com/pybind/pybind11_protobuf/pull/188/commits/5f0ac3d8c10cbb8b3b81063467c71085cd39624f 28 + ./add-install-target-for-cmake-builds.patch 29 ]; 30 31 nativeBuildInputs = [ cmake ]; 32 33 buildInputs = [ 34 + abseil-cpp 35 + protobuf 36 pybind11 37 + zlib 38 ]; 39 + 40 + cmakeFlags = 41 + [ 42 + (lib.cmakeBool "USE_SYSTEM_ABSEIL" true) 43 + (lib.cmakeBool "USE_SYSTEM_PROTOBUF" true) 44 + (lib.cmakeBool "USE_SYSTEM_PYBIND" true) 45 + 46 + # The find_package calls are local to the dependencies subdirectory 47 + (lib.cmakeBool "CMAKE_FIND_PACKAGE_TARGETS_GLOBAL" true) 48 + ] 49 + ++ lib.optionals stdenv.hostPlatform.isDarwin [ 50 + # Without it, Cmake prefers using Find-module which is mysteriously broken 51 + # But the generated Config works 52 + (lib.cmakeBool "CMAKE_FIND_PACKAGE_PREFER_CONFIG" true) 53 + ]; 54 55 meta = { 56 description = "Pybind11 bindings for Google's Protocol Buffers";
+18 -15
pkgs/development/python-modules/pyopencl/default.nix
··· 22 23 # tests 24 pytestCheckHook, 25 }: 26 27 buildPythonPackage rec { ··· 49 50 buildInputs = [ 51 opencl-headers 52 pybind11 53 - ] ++ lib.optionals (!stdenv.hostPlatform.isDarwin) [ ocl-icd ]; 54 55 dependencies = [ 56 numpy ··· 58 pytools 59 ]; 60 61 - nativeCheckInputs = [ pytestCheckHook ]; 62 63 preCheck = '' 64 - export HOME=$(mktemp -d) 65 - 66 - # https://github.com/NixOS/nixpkgs/issues/255262 67 - cd $out 68 ''; 69 70 - # https://github.com/inducer/pyopencl/issues/784 Note that these failing 71 - # tests are all the tests that are available. 72 - doCheck = false; 73 - 74 pythonImportsCheck = [ 75 "pyopencl" 76 "pyopencl.array" ··· 86 changelog = "https://github.com/inducer/pyopencl/releases/tag/v${version}"; 87 license = lib.licenses.mit; 88 maintainers = with lib.maintainers; [ GaetanLepage ]; 89 - broken = stdenv.hostPlatform.isDarwin && stdenv.hostPlatform.isAarch64; 90 - badPlatforms = [ 91 - # ld: symbol(s) not found for architecture arm64/x86_64 92 - lib.systems.inspect.patterns.isDarwin 93 - ]; 94 }; 95 }
··· 22 23 # tests 24 pytestCheckHook, 25 + writableTmpDirAsHomeHook, 26 + mako, 27 + pocl, 28 }: 29 30 buildPythonPackage rec { ··· 52 53 buildInputs = [ 54 opencl-headers 55 + ocl-icd 56 pybind11 57 + ]; 58 59 dependencies = [ 60 numpy ··· 62 pytools 63 ]; 64 65 + nativeCheckInputs = [ 66 + pocl 67 + mako 68 + pytestCheckHook 69 + writableTmpDirAsHomeHook 70 + ] ++ pytools.optional-dependencies.siphash; 71 + 72 + env = { 73 + CL_INC_DIR = "${opencl-headers}/include"; 74 + CL_LIB_DIR = "${ocl-icd}/lib"; 75 + CL_LIBNAME = "${ocl-icd}/lib/libOpenCL${stdenv.hostPlatform.extensions.sharedLibrary}"; 76 + }; 77 78 preCheck = '' 79 + rm -rf pyopencl 80 ''; 81 82 pythonImportsCheck = [ 83 "pyopencl" 84 "pyopencl.array" ··· 94 changelog = "https://github.com/inducer/pyopencl/releases/tag/v${version}"; 95 license = lib.licenses.mit; 96 maintainers = with lib.maintainers; [ GaetanLepage ]; 97 }; 98 }
-135
pkgs/development/python-modules/sentry-sdk/1.nix
··· 1 - { 2 - lib, 3 - stdenv, 4 - aiohttp, 5 - apache-beam, 6 - asttokens, 7 - blinker, 8 - bottle, 9 - buildPythonPackage, 10 - celery, 11 - certifi, 12 - chalice, 13 - django, 14 - executing, 15 - falcon, 16 - fetchFromGitHub, 17 - flask, 18 - gevent, 19 - httpx, 20 - jsonschema, 21 - mock, 22 - pure-eval, 23 - pyrsistent, 24 - pyspark, 25 - pysocks, 26 - pytest-forked, 27 - pytest-localserver, 28 - pytest-watch, 29 - pytestCheckHook, 30 - pythonOlder, 31 - quart, 32 - rq, 33 - sanic, 34 - setuptools, 35 - sqlalchemy, 36 - tornado, 37 - urllib3, 38 - }: 39 - 40 - buildPythonPackage rec { 41 - pname = "sentry-sdk"; 42 - version = "1.45.1"; 43 - pyproject = true; 44 - 45 - disabled = pythonOlder "3.7"; 46 - 47 - src = fetchFromGitHub { 48 - owner = "getsentry"; 49 - repo = "sentry-python"; 50 - rev = "refs/tags/${version}"; 51 - hash = "sha256-ZNJsxbQcW5g/bKqN18z+BspKyI34+vkj6vQ9akE1Ook="; 52 - }; 53 - 54 - build-system = [ setuptools ]; 55 - 56 - dependencies = [ 57 - certifi 58 - urllib3 59 - ]; 60 - 61 - optional-dependencies = { 62 - aiohttp = [ aiohttp ]; 63 - beam = [ apache-beam ]; 64 - bottle = [ bottle ]; 65 - celery = [ celery ]; 66 - chalice = [ chalice ]; 67 - django = [ django ]; 68 - falcon = [ falcon ]; 69 - flask = [ 70 - flask 71 - blinker 72 - ]; 73 - httpx = [ httpx ]; 74 - pyspark = [ pyspark ]; 75 - pure_eval = [ 76 - asttokens 77 - executing 78 - pure-eval 79 - ]; 80 - quart = [ 81 - quart 82 - blinker 83 - ]; 84 - rq = [ rq ]; 85 - sanic = [ sanic ]; 86 - sqlalchemy = [ sqlalchemy ]; 87 - tornado = [ tornado ]; 88 - }; 89 - 90 - nativeCheckInputs = [ 91 - asttokens 92 - executing 93 - gevent 94 - jsonschema 95 - mock 96 - pure-eval 97 - pyrsistent 98 - pysocks 99 - pytest-forked 100 - pytest-localserver 101 - pytest-watch 102 - pytestCheckHook 103 - ]; 104 - 105 - doCheck = pythonOlder "3.13" && !stdenv.hostPlatform.isDarwin; 106 - 107 - disabledTests = [ 108 - # Issue with the asseration 109 - "test_auto_enabling_integrations_catches_import_error" 110 - "test_default_release" 111 - ]; 112 - 113 - disabledTestPaths = 114 - [ 115 - # Various integration tests fail every once in a while when we 116 - # upgrade dependencies, so don't bother testing them. 117 - "tests/integrations/" 118 - ] 119 - ++ lib.optionals (stdenv.buildPlatform != "x86_64-linux") [ 120 - # test crashes on aarch64 121 - "tests/test_transport.py" 122 - ]; 123 - 124 - pythonImportsCheck = [ "sentry_sdk" ]; 125 - 126 - meta = with lib; { 127 - description = "Python SDK for Sentry.io"; 128 - homepage = "https://github.com/getsentry/sentry-python"; 129 - changelog = "https://github.com/getsentry/sentry-python/blob/${version}/CHANGELOG.md"; 130 - license = licenses.bsd2; 131 - maintainers = with maintainers; [ 132 - fab 133 - ]; 134 - }; 135 - }
···
+2 -2
pkgs/development/python-modules/swh-core/default.nix
··· 9 python-magic, 10 pyyaml, 11 requests, 12 - sentry-sdk_2, 13 tenacity, 14 setuptools, 15 setuptools-scm, ··· 65 python-magic 66 pyyaml 67 requests 68 - sentry-sdk_2 69 tenacity 70 ]; 71
··· 9 python-magic, 10 pyyaml, 11 requests, 12 + sentry-sdk, 13 tenacity, 14 setuptools, 15 setuptools-scm, ··· 65 python-magic 66 pyyaml 67 requests 68 + sentry-sdk 69 tenacity 70 ]; 71
+2 -2
pkgs/development/python-modules/voluptuous-openapi/default.nix
··· 16 17 buildPythonPackage rec { 18 pname = "voluptuous-openapi"; 19 - version = "0.0.6"; 20 pyproject = true; 21 22 src = fetchFromGitHub { 23 owner = "home-assistant-libs"; 24 repo = "voluptuous-openapi"; 25 tag = "v${version}"; 26 - hash = "sha256-D8TafOOGiZO2IpSE7aGB1CVey2zpZgGzZpUvagT9B6Q="; 27 }; 28 29 build-system = [ setuptools ];
··· 16 17 buildPythonPackage rec { 18 pname = "voluptuous-openapi"; 19 + version = "0.0.7"; 20 pyproject = true; 21 22 src = fetchFromGitHub { 23 owner = "home-assistant-libs"; 24 repo = "voluptuous-openapi"; 25 tag = "v${version}"; 26 + hash = "sha256-shgLUO4dFuvVG8K3yuK8FUsohIb4zgh7h6nvNiaYws0="; 27 }; 28 29 build-system = [ setuptools ];
+2 -2
pkgs/development/python-modules/wandb/default.nix
··· 28 pydantic, 29 pyyaml, 30 requests, 31 - sentry-sdk_2, 32 setproctitle, 33 setuptools, 34 pythonOlder, ··· 188 pydantic 189 pyyaml 190 requests 191 - sentry-sdk_2 192 setproctitle 193 # setuptools is necessary since pkg_resources is required at runtime. 194 setuptools
··· 28 pydantic, 29 pyyaml, 30 requests, 31 + sentry-sdk, 32 setproctitle, 33 setuptools, 34 pythonOlder, ··· 188 pydantic 189 pyyaml 190 requests 191 + sentry-sdk 192 setproctitle 193 # setuptools is necessary since pkg_resources is required at runtime. 194 setuptools
+2 -2
pkgs/development/python-modules/zha/default.nix
··· 27 28 buildPythonPackage rec { 29 pname = "zha"; 30 - version = "0.0.55"; 31 pyproject = true; 32 33 disabled = pythonOlder "3.12"; ··· 36 owner = "zigpy"; 37 repo = "zha"; 38 tag = version; 39 - hash = "sha256-LiHJk7xVdIqNl6BTfwlch3jqQm/MvbrKSExhOR5KD0c="; 40 }; 41 42 postPatch = ''
··· 27 28 buildPythonPackage rec { 29 pname = "zha"; 30 + version = "0.0.56"; 31 pyproject = true; 32 33 disabled = pythonOlder "3.12"; ··· 36 owner = "zigpy"; 37 repo = "zha"; 38 tag = version; 39 + hash = "sha256-k6Zegn7YUw5Wueb/8Z1RJ7teRMQ6Owdm8obM6XHI/aY="; 40 }; 41 42 postPatch = ''
+14 -22
pkgs/development/python-modules/zigpy/default.nix
··· 12 fetchFromGitHub, 13 freezegun, 14 frozendict, 15 - importlib-resources, 16 jsonschema, 17 - pycryptodome, 18 pyserial-asyncio, 19 pytest-asyncio, 20 pytest-timeout, ··· 27 28 buildPythonPackage rec { 29 pname = "zigpy"; 30 - version = "0.78.0"; 31 pyproject = true; 32 33 - disabled = pythonOlder "3.9"; 34 - 35 src = fetchFromGitHub { 36 owner = "zigpy"; 37 repo = "zigpy"; 38 tag = version; 39 - hash = "sha256-7ckpg1ukuASWtTdQn/P0KfXaMo5l2NyB9alCXeTarEU="; 40 }; 41 42 postPatch = '' ··· 47 48 build-system = [ setuptools ]; 49 50 - dependencies = 51 - [ 52 - attrs 53 - aiohttp 54 - aiosqlite 55 - crccheck 56 - cryptography 57 - frozendict 58 - jsonschema 59 - pyserial-asyncio 60 - typing-extensions 61 - pycryptodome 62 - voluptuous 63 - ] 64 - ++ lib.optionals (pythonOlder "3.9") [ importlib-resources ] 65 - ++ lib.optionals (pythonOlder "3.11") [ async-timeout ]; 66 67 nativeCheckInputs = [ 68 aioresponses
··· 12 fetchFromGitHub, 13 freezegun, 14 frozendict, 15 jsonschema, 16 pyserial-asyncio, 17 pytest-asyncio, 18 pytest-timeout, ··· 25 26 buildPythonPackage rec { 27 pname = "zigpy"; 28 + version = "0.78.1"; 29 pyproject = true; 30 31 src = fetchFromGitHub { 32 owner = "zigpy"; 33 repo = "zigpy"; 34 tag = version; 35 + hash = "sha256-b+4KqcswAKUNJb4e450VwmAR0mca9ApW4n+kif7BR7o="; 36 }; 37 38 postPatch = '' ··· 43 44 build-system = [ setuptools ]; 45 46 + dependencies = [ 47 + attrs 48 + aiohttp 49 + aiosqlite 50 + crccheck 51 + cryptography 52 + frozendict 53 + jsonschema 54 + pyserial-asyncio 55 + typing-extensions 56 + voluptuous 57 + ] ++ lib.optionals (pythonOlder "3.11") [ async-timeout ]; 58 59 nativeCheckInputs = [ 60 aioresponses
+8
pkgs/development/tools/godot/common.nix
··· 241 runHook post Install 242 ''; 243 244 passthru = { 245 tests = 246 let
··· 241 runHook post Install 242 ''; 243 244 + # patching $debug can crash patchelf 245 + # (https://github.com/NixOS/patchelf/issues/373), so explicitly patch $out 246 + dontAutoPatchelf = true; 247 + 248 + postFixup = '' 249 + autoPatchelf "$out" 250 + ''; 251 + 252 passthru = { 253 tests = 254 let
+2 -2
pkgs/development/tools/spr/default.nix
··· 19 meta = with lib; { 20 description = "Submit pull requests for individual, amendable, rebaseable commits to GitHub"; 21 mainProgram = "spr"; 22 - homepage = "https://github.com/getcord/spr"; 23 license = licenses.mit; 24 - maintainers = with maintainers; [ sven-of-cord ]; 25 }; 26 }
··· 19 meta = with lib; { 20 description = "Submit pull requests for individual, amendable, rebaseable commits to GitHub"; 21 mainProgram = "spr"; 22 + homepage = "https://github.com/spacedentist/spr"; 23 license = licenses.mit; 24 + maintainers = with maintainers; [ spacedentist ]; 25 }; 26 }
+1 -1
pkgs/servers/home-assistant/component-packages.nix
··· 2 # Do not edit! 3 4 { 5 - version = "2025.4.2"; 6 components = { 7 "3_day_blinds" = 8 ps: with ps; [
··· 2 # Do not edit! 3 4 { 5 + version = "2025.4.3"; 6 components = { 7 "3_day_blinds" = 8 ps: with ps; [
+2 -2
pkgs/servers/home-assistant/custom-components/daikin_onecta/package.nix
··· 7 buildHomeAssistantComponent rec { 8 owner = "jwillemsen"; 9 domain = "daikin_onecta"; 10 - version = "4.2.3"; 11 12 src = fetchFromGitHub { 13 owner = "jwillemsen"; 14 repo = "daikin_onecta"; 15 tag = "v${version}"; 16 - hash = "sha256-ylVHgmE6zDLE73KpuIag0iUx77w6ujp3cF/+x2bJiA8="; 17 }; 18 19 meta = {
··· 7 buildHomeAssistantComponent rec { 8 owner = "jwillemsen"; 9 domain = "daikin_onecta"; 10 + version = "4.2.6"; 11 12 src = fetchFromGitHub { 13 owner = "jwillemsen"; 14 repo = "daikin_onecta"; 15 tag = "v${version}"; 16 + hash = "sha256-JBo2205wHeC+5+kontzqgRLTss2Naht/TbkuEAs2nSQ="; 17 }; 18 19 meta = {
+2 -2
pkgs/servers/home-assistant/custom-components/midea_ac/package.nix
··· 8 buildHomeAssistantComponent rec { 9 owner = "mill1000"; 10 domain = "midea_ac"; 11 - version = "2025.3.1"; 12 13 src = fetchFromGitHub { 14 owner = "mill1000"; 15 repo = "midea-ac-py"; 16 tag = version; 17 - hash = "sha256-oO+t0my72PwWWUAzr8blA3Q8uJyICZNcfoOHsLFL3MQ="; 18 }; 19 20 dependencies = [ msmart-ng ];
··· 8 buildHomeAssistantComponent rec { 9 owner = "mill1000"; 10 domain = "midea_ac"; 11 + version = "2025.4.0"; 12 13 src = fetchFromGitHub { 14 owner = "mill1000"; 15 repo = "midea-ac-py"; 16 tag = version; 17 + hash = "sha256-ZkLC0GhfN+jp1DWv30LNVCP+NEZywt9Pxycs2RWBzrM="; 18 }; 19 20 dependencies = [ msmart-ng ];
+2 -2
pkgs/servers/home-assistant/custom-components/moonraker/package.nix
··· 8 buildHomeAssistantComponent rec { 9 owner = "marcolivierarsenault"; 10 domain = "moonraker"; 11 - version = "1.7.0"; 12 13 src = fetchFromGitHub { 14 owner = "marcolivierarsenault"; 15 repo = "moonraker-home-assistant"; 16 tag = version; 17 - hash = "sha256-J/MHT+yzV08sJMJCDoH6tpE86Mgz4fxpgbyWNge0n54="; 18 }; 19 20 dependencies = [
··· 8 buildHomeAssistantComponent rec { 9 owner = "marcolivierarsenault"; 10 domain = "moonraker"; 11 + version = "1.7.1"; 12 13 src = fetchFromGitHub { 14 owner = "marcolivierarsenault"; 15 repo = "moonraker-home-assistant"; 16 tag = version; 17 + hash = "sha256-BPlHMTGb1xSxFydeLsHKBlXSqgh1qmTrenPo+XPx2IM="; 18 }; 19 20 dependencies = [
+2 -2
pkgs/servers/home-assistant/custom-components/tuya_local/package.nix
··· 11 buildHomeAssistantComponent rec { 12 owner = "make-all"; 13 domain = "tuya_local"; 14 - version = "2025.3.0"; 15 16 src = fetchFromGitHub { 17 inherit owner; 18 repo = "tuya-local"; 19 tag = version; 20 - hash = "sha256-CkH2Njv9qdGlnEo3RP64Lz4ys62W6DxYC/TSl/lndPI="; 21 }; 22 23 dependencies = [
··· 11 buildHomeAssistantComponent rec { 12 owner = "make-all"; 13 domain = "tuya_local"; 14 + version = "2025.4.0"; 15 16 src = fetchFromGitHub { 17 inherit owner; 18 repo = "tuya-local"; 19 tag = version; 20 + hash = "sha256-lvDiB/kVlPsKI28muv03B8j/0RD7U4I4ZNiucji8/e4="; 21 }; 22 23 dependencies = [
+2 -2
pkgs/servers/home-assistant/custom-components/xiaomi_miot/package.nix
··· 11 buildHomeAssistantComponent rec { 12 owner = "al-one"; 13 domain = "xiaomi_miot"; 14 - version = "1.0.16"; 15 16 src = fetchFromGitHub { 17 owner = "al-one"; 18 repo = "hass-xiaomi-miot"; 19 rev = "v${version}"; 20 - hash = "sha256-wsc1XSq1KYq7Rs99XK/wL2PYWrxbMblWPHzgopeQ5IM="; 21 }; 22 23 dependencies = [
··· 11 buildHomeAssistantComponent rec { 12 owner = "al-one"; 13 domain = "xiaomi_miot"; 14 + version = "1.0.17"; 15 16 src = fetchFromGitHub { 17 owner = "al-one"; 18 repo = "hass-xiaomi-miot"; 19 rev = "v${version}"; 20 + hash = "sha256-jiskzH2MsddF1rdCnrdJyALQQxFrFUBpOjdR/zbTxh0="; 21 }; 22 23 dependencies = [
+3 -3
pkgs/servers/home-assistant/default.nix
··· 377 extraBuildInputs = extraPackages python.pkgs; 378 379 # Don't forget to run update-component-packages.py after updating 380 - hassVersion = "2025.4.2"; 381 382 in 383 python.pkgs.buildPythonApplication rec { ··· 398 owner = "home-assistant"; 399 repo = "core"; 400 tag = version; 401 - hash = "sha256-7OwkZ2KF1IxSNCXvr7Ex0nIKWWr78Zvma2WGaKM5cJE="; 402 }; 403 404 # Secondary source is pypi sdist for translations 405 sdist = fetchPypi { 406 inherit pname version; 407 - hash = "sha256-IBJxyPZgFrCiARgOzYEcC6Eu/hwoOFTH87lkb+6UYJ4="; 408 }; 409 410 build-system = with python.pkgs; [
··· 377 extraBuildInputs = extraPackages python.pkgs; 378 379 # Don't forget to run update-component-packages.py after updating 380 + hassVersion = "2025.4.3"; 381 382 in 383 python.pkgs.buildPythonApplication rec { ··· 398 owner = "home-assistant"; 399 repo = "core"; 400 tag = version; 401 + hash = "sha256-KyPWEGXSoB9BJolR4+Kq9K9urhXN4YcgV0SQYaAEjiA="; 402 }; 403 404 # Secondary source is pypi sdist for translations 405 sdist = fetchPypi { 406 inherit pname version; 407 + hash = "sha256-xo1f2GDeafOaXiJ1+l+NsJkpU0FvbSnflsp2BE/JKC4="; 408 }; 409 410 build-system = with python.pkgs; [
+3 -3
pkgs/servers/home-assistant/pytest-homeassistant-custom-component.nix
··· 18 19 buildPythonPackage rec { 20 pname = "pytest-homeassistant-custom-component"; 21 - version = "0.13.233"; 22 pyproject = true; 23 24 - disabled = pythonOlder "3.12"; 25 26 src = fetchFromGitHub { 27 owner = "MatthewFlamm"; 28 repo = "pytest-homeassistant-custom-component"; 29 rev = "refs/tags/${version}"; 30 - hash = "sha256-Yi865ZCK1Rr9Nto0HpqRqwCE6t/UPLR8lqfPv0P+bcs="; 31 }; 32 33 build-system = [ setuptools ];
··· 18 19 buildPythonPackage rec { 20 pname = "pytest-homeassistant-custom-component"; 21 + version = "0.13.235"; 22 pyproject = true; 23 24 + disabled = pythonOlder "3.13"; 25 26 src = fetchFromGitHub { 27 owner = "MatthewFlamm"; 28 repo = "pytest-homeassistant-custom-component"; 29 rev = "refs/tags/${version}"; 30 + hash = "sha256-hr9GZsyXj7ewEevyXs2PCMKFQahcco+gVFQi8kSxf9A="; 31 }; 32 33 build-system = [ setuptools ];
+2 -2
pkgs/servers/home-assistant/stubs.nix
··· 10 11 buildPythonPackage rec { 12 pname = "homeassistant-stubs"; 13 - version = "2025.4.2"; 14 pyproject = true; 15 16 disabled = python.version != home-assistant.python.version; ··· 19 owner = "KapJI"; 20 repo = "homeassistant-stubs"; 21 tag = version; 22 - hash = "sha256-SFMGBPVjgoLkPlYHuTKZLbhTMbKGEeE1wYsFJ0ssbgg="; 23 }; 24 25 build-system = [
··· 10 11 buildPythonPackage rec { 12 pname = "homeassistant-stubs"; 13 + version = "2025.4.3"; 14 pyproject = true; 15 16 disabled = python.version != home-assistant.python.version; ··· 19 owner = "KapJI"; 20 repo = "homeassistant-stubs"; 21 tag = version; 22 + hash = "sha256-IvtkEZLVngSHNb0nGJri/EW1t29KcrFvxdm6gjmrtz4="; 23 }; 24 25 build-system = [
+13 -1
pkgs/servers/home-assistant/tests.nix
··· 14 ibeacon-ble 15 ]; 16 hassio = getComponentDeps "homeassistant_yellow"; 17 - homeassistant_hardware = getComponentDeps "zha"; 18 homeassistant_sky_connect = getComponentDeps "zha"; 19 homeassistant_yellow = getComponentDeps "zha"; 20 husqvarna_automower_ble = getComponentDeps "gardena_bluetooth"; ··· 78 }; 79 80 extraPytestFlagsArray = { 81 dnsip = [ 82 # Tries to resolve DNS entries 83 "--deselect tests/components/dnsip/test_config_flow.py::test_options_flow" ··· 87 "--deselect tests/components/jellyfin/test_media_source.py::test_resolve" 88 "--deselect tests/components/jellyfin/test_media_source.py::test_audio_codec_resolve" 89 "--deselect tests/components/jellyfin/test_media_source.py::test_music_library" 90 ]; 91 modem_callerid = [ 92 # aioserial mock produces wrong state
··· 14 ibeacon-ble 15 ]; 16 hassio = getComponentDeps "homeassistant_yellow"; 17 + homeassistant_hardware = getComponentDeps "otbr" ++ getComponentDeps "zha"; 18 homeassistant_sky_connect = getComponentDeps "zha"; 19 homeassistant_yellow = getComponentDeps "zha"; 20 husqvarna_automower_ble = getComponentDeps "gardena_bluetooth"; ··· 78 }; 79 80 extraPytestFlagsArray = { 81 + backup = [ 82 + # outdated snapshot 83 + "--deselect tests/components/backup/test_sensors.py::test_sensors" 84 + ]; 85 + bmw_connected_drive = [ 86 + # outdated snapshot 87 + "--deselect tests/components/bmw_connected_drive/test_select.py::test_entity_state_attrs" 88 + ]; 89 dnsip = [ 90 # Tries to resolve DNS entries 91 "--deselect tests/components/dnsip/test_config_flow.py::test_options_flow" ··· 95 "--deselect tests/components/jellyfin/test_media_source.py::test_resolve" 96 "--deselect tests/components/jellyfin/test_media_source.py::test_audio_codec_resolve" 97 "--deselect tests/components/jellyfin/test_media_source.py::test_music_library" 98 + ]; 99 + matter = [ 100 + # outdated snapshot in eve_weather_sensor variant 101 + "--deselect tests/components/matter/test_number.py::test_numbers" 102 ]; 103 modem_callerid = [ 104 # aioserial mock produces wrong state
+6 -6
pkgs/servers/nextcloud/default.nix
··· 54 in 55 { 56 nextcloud29 = generic { 57 - version = "29.0.15"; 58 - hash = "sha256-iqvCDILYxxJk7oxAmXaaBbwzUWKAAd5aNHAswRKBfMA="; 59 packages = nextcloud29Packages; 60 }; 61 62 nextcloud30 = generic { 63 - version = "30.0.9"; 64 - hash = "sha256-gkWL1whsCCqHrR8UldkjuJ4jMRCajZXosA5jm70OHxY="; 65 packages = nextcloud30Packages; 66 }; 67 68 nextcloud31 = generic { 69 - version = "31.0.3"; 70 - hash = "sha256-koOuvY/aWtc5zaVvfKuqg1zDv5j3lTbDbczXMJ4rMFo="; 71 packages = nextcloud31Packages; 72 }; 73
··· 54 in 55 { 56 nextcloud29 = generic { 57 + version = "29.0.16"; 58 + hash = "sha256-SZv2GrGe3NTlQq+GYJJDxbT0QOtbsGwrp9oML6pSUyI="; 59 packages = nextcloud29Packages; 60 }; 61 62 nextcloud30 = generic { 63 + version = "30.0.10"; 64 + hash = "sha256-40ldF8X1yRZFQtk/Y21pasyPOLYL7HDPGtLnnHbZlbo="; 65 packages = nextcloud30Packages; 66 }; 67 68 nextcloud31 = generic { 69 + version = "31.0.4"; 70 + hash = "sha256-pHVBVm1casb2Pk9hfifaKVFW2kfaos0i7uNAD9KtElE="; 71 packages = nextcloud31Packages; 72 }; 73
+7 -7
pkgs/servers/nextcloud/packages/29.json
··· 240 ] 241 }, 242 "notes": { 243 - "hash": "sha256-dpMCehjhPQoOA+MVdLeGc370hmqWzmsMczgV08m/cO4=", 244 - "url": "https://github.com/nextcloud-releases/notes/releases/download/v4.11.0/notes-v4.11.0.tar.gz", 245 - "version": "4.11.0", 246 - "description": "The Notes app is a distraction free notes taking app for [Nextcloud](https://www.nextcloud.com/). It provides categories for better organization and supports formatting using [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax. Notes are saved as files in your Nextcloud, so you can view and edit them with every Nextcloud client. Furthermore, a separate [REST API](https://github.com/nextcloud/notes/blob/master/docs/api/README.md) allows for an easy integration into third-party apps (currently, there are notes apps for [Android](https://github.com/nextcloud/notes-android), [iOS](https://github.com/nextcloud/notes-ios) and the [console](https://git.danielmoch.com/nncli/about) which allow convenient access to your Nextcloud notes). Further features include marking notes as favorites.", 247 "homepage": "https://github.com/nextcloud/notes", 248 "licenses": [ 249 "agpl" ··· 340 ] 341 }, 342 "sociallogin": { 343 - "hash": "sha256-M2sITpieWvl2WPjxWHtyyZRNQPagYLahVaJcDoiTsh8=", 344 - "url": "https://github.com/zorn-v/nextcloud-social-login/releases/download/v5.9.2/release.tar.gz", 345 - "version": "5.9.2", 346 "description": "# Social login\n\nMake possible create users and login via Telegram, OAuth or OpenID\n\nFor OAuth you must create app for certain providers. Login button appear at login page if app id specified. Settings are in \"Social login\" section of settings page.\n\n## Installation\n\nLogin to your NextCloud installation as an administrator and under \"Apps\" click \"Download and enable\" next to the \"Social Login\" app.\n\nSee below for setup and configuration instructions.\n\n\n\n## Custom OAuth2/OIDC groups\n\nYou can use groups from your custom provider. For that you should specify \"Groups claim\" in custom OAuth2/OIDC provider settings. That claim should be returned from provider in `id_token` or at user info endpoint. Format should be `array` or comma separated string. Eg (with claim named `roles`)\n\n```json\n{\"roles\": [\"admin\", \"user\"]}\n```\nor\n```json\n{\"roles\": \"admin,user\"}\n```\n\nAlso nested claims is supported. For example `resource_access.client-id.roles` for\n\n```json\n\"resource_access\": {\n \"client-id\": {\n \"roles\": [\n \"client-role-1\",\n \"client-role-2\"\n ]\n }\n}\n```\n\nThere is also support for setting the displayName:\n```\n{\"roles\": [{gid: 1, displayName: \"admin\"}, {gid: 2, displayName: \"user\"}]}\n```\n\n\nYou can use provider groups in two ways:\n\n1. Map provider groups to existing nextcloud groups\n2. Create provider groups in nextcloud and associate it to user (if appropriate option specified)\n\nIf you want sync groups on every login do not forget to check \"Update user profile every login\" setting\n\n## Examples for groups\n\n* You can find example how to configure WSO2IS for return roles claim with OIDC [here](https://medium.com/@dewni.matheesha/claim-mapping-and-retrieving-end-user-information-in-wso2is-cffd5f3937ff)\n* [GitLab OIDC allowing specific GitLab groups](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/gitlab.md)\n\n## Built-in OAuth providers\n\nYou can copy link of certain login button to get proper \"redirect url\" for OAuth app setting.\n\n* [Google](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/google.md)\n* [Amazon](https://developer.amazon.com/loginwithamazon/console/site/lwa/overview.html)\n* [Facebook](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/facebook.md)\n* [Twitter](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/twitter.md)\n* [GitHub](https://github.com/settings/developers)\n* [Discord](#configure-discord)\n* [Telegram](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/telegram.md)\n* PlexTv - you can use any title as app id\n* [Codeberg](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/codeberg.md)\n\nDetails about \"Allow login only from specified domain\" google setting you can find here [#44](https://github.com/zorn-v/nextcloud-social-login/issues/44)\nYou can use comma separated list for multiple domains\n\n## Config\n\nYou can use `'social_login_auto_redirect' => true` setting in `config.php` for auto redirect unauthorized users to social login if only one provider is configured.\nIf you want to temporary disable this function (e.g. for login as local admin), you can add `noredir=1` query parameter in url for login page. Something like `https://cloud.domain.com/login?noredir=1`\n\nTo set options for http client, you can use\n```php\n 'social_login_http_client' => [\n 'timeout' => 45,\n 'proxy' => 'socks4://127.0.0.1:9050', // Check https://curl.se/libcurl/c/CURLOPT_PROXY.html for allowed variants\n ],\n```\nin `config.php`\n\n### Configurate a provider via CLI\n\nYou can configure everything from commandline by using the occ utility. To setup a oidc-provider replace the variables and URLs with values that match your deployment.\n```bash\nphp occ config:app:set sociallogin custom_providers --value='{\"custom_oidc\": [{\"name\": \"gitlab_oidc\", \"title\": \"Gitlab\", \"authorizeUrl\": \"https://gitlab.my-domain.org/oauth/authorize\", \"tokenUrl\": \"https://gitlab.my-domain.org/oauth/token\", \"userInfoUrl\": \"https://gitlab.my-domain.org/oauth/userinfo\", \"logoutUrl\": \"\", \"clientId\": \"$my_application_id\", \"clientSecret\": \"$my_super_secret_secret\", \"scope\": \"openid\", \"groupsClaim\": \"groups\", \"style\": \"gitlab\", \"defaultGroup\": \"\"}]}'\n```\nto do this with docker you just need to add `docker exec -t -uwww-data CONTAINER_NAME` in front of the command, or run it interactively from `docker exec -it -uwww-data CONTAINER_NAME sh`\n\nTo find out how to configure other providers, just configure them in the GUI and take a look at the database afterwards:\n```\nmysql -u nextcloud -p nextcloud\nPassword: <yourpassword>\n\n> SELECT * FROM oc_appconfig WHERE appid='sociallogin';\n```\n\nOr just run\n\n`docker exec -t -uwww-data CONTAINER_NAME php occ config:app:get sociallogin custom_providers`\n\n### Configure Discord\n\nTo properly configure discord you have to:\n\n1. Create new discord application on [DiscordApp developers](https://discordapp.com/developers/applications/me#top)\n2. Open tab `Settings -> OAuth2 -> General`. In `Redirects` add new redirection link looking like this: `https://nextcloud.mydomain.com/apps/sociallogin/oauth/discord`.\n3. Copy `CLIENT ID` and generate and copy `CLIENT SECRET`\n4. Open in Nextcloud `Settings -> Social Login` and paste `CLIENT ID` into field `App id` and `CLIENT SECRET` into `Secret`.\n5. Select default group for users created this way.\n6. For group mapping check [#395](https://github.com/zorn-v/nextcloud-social-login/pull/395)\n\n## Hint\n\n### About Callback(Reply) Url\nYou can copy link from specific login button on login page and paste it on provider's website as callback url. To make proper button visible, just fill certain provider settings with random data and change it later.\n\nSome users may get strange reply(Callback) url error from provider even if you pasted the right url, that's because your nextcloud server may generate http urls when you are actually using https.\nPlease set 'overwriteprotocol' => 'https', in your config.php file.", 347 "homepage": "https://github.com/zorn-v/nextcloud-social-login", 348 "licenses": [
··· 240 ] 241 }, 242 "notes": { 243 + "hash": "sha256-UdqK6DiC67YPcy84wFEZaT8AQLDhhNndLiEesQeBY7M=", 244 + "url": "https://github.com/nextcloud-releases/notes/releases/download/v4.12.0/notes-v4.12.0.tar.gz", 245 + "version": "4.12.0", 246 + "description": "The Notes app is a distraction free notes taking app for [Nextcloud](https://www.nextcloud.com/). It provides categories for better organization and supports formatting using [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax. Notes are saved as files in your Nextcloud, so you can view and edit them with every Nextcloud client. Furthermore, a separate [REST API](https://github.com/nextcloud/notes/blob/master/docs/api/README.md) allows for an easy integration into apps ([Android](https://github.com/nextcloud/notes-android), [iOS](https://github.com/nextcloud/notes-ios), as well as [3rd-party apps](https://github.com/nextcloud/notes/wiki#3rd-party-clients) which allow convenient access to your Nextcloud notes). Further features include marking notes as favorites.", 247 "homepage": "https://github.com/nextcloud/notes", 248 "licenses": [ 249 "agpl" ··· 340 ] 341 }, 342 "sociallogin": { 343 + "hash": "sha256-DNf48YmVJ49v+lynTCIBTZhPi/S1mjyIF5OWf+UVKeY=", 344 + "url": "https://github.com/zorn-v/nextcloud-social-login/releases/download/v6.0.1/release.tar.gz", 345 + "version": "6.0.1", 346 "description": "# Social login\n\nMake possible create users and login via Telegram, OAuth or OpenID\n\nFor OAuth you must create app for certain providers. Login button appear at login page if app id specified. Settings are in \"Social login\" section of settings page.\n\n## Installation\n\nLogin to your NextCloud installation as an administrator and under \"Apps\" click \"Download and enable\" next to the \"Social Login\" app.\n\nSee below for setup and configuration instructions.\n\n\n\n## Custom OAuth2/OIDC groups\n\nYou can use groups from your custom provider. For that you should specify \"Groups claim\" in custom OAuth2/OIDC provider settings. That claim should be returned from provider in `id_token` or at user info endpoint. Format should be `array` or comma separated string. Eg (with claim named `roles`)\n\n```json\n{\"roles\": [\"admin\", \"user\"]}\n```\nor\n```json\n{\"roles\": \"admin,user\"}\n```\n\nAlso nested claims is supported. For example `resource_access.client-id.roles` for\n\n```json\n\"resource_access\": {\n \"client-id\": {\n \"roles\": [\n \"client-role-1\",\n \"client-role-2\"\n ]\n }\n}\n```\n\nThere is also support for setting the displayName:\n```\n{\"roles\": [{gid: 1, displayName: \"admin\"}, {gid: 2, displayName: \"user\"}]}\n```\n\n\nYou can use provider groups in two ways:\n\n1. Map provider groups to existing nextcloud groups\n2. Create provider groups in nextcloud and associate it to user (if appropriate option specified)\n\nIf you want sync groups on every login do not forget to check \"Update user profile every login\" setting\n\n## Examples for groups\n\n* You can find example how to configure WSO2IS for return roles claim with OIDC [here](https://medium.com/@dewni.matheesha/claim-mapping-and-retrieving-end-user-information-in-wso2is-cffd5f3937ff)\n* [GitLab OIDC allowing specific GitLab groups](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/gitlab.md)\n\n## Built-in OAuth providers\n\nYou can copy link of certain login button to get proper \"redirect url\" for OAuth app setting.\n\n* [Google](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/google.md)\n* [Amazon](https://developer.amazon.com/loginwithamazon/console/site/lwa/overview.html)\n* [Facebook](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/facebook.md)\n* [Twitter](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/twitter.md)\n* [GitHub](https://github.com/settings/developers)\n* [Discord](#configure-discord)\n* [Telegram](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/telegram.md)\n* PlexTv - you can use any title as app id\n* [Codeberg](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/codeberg.md)\n\nDetails about \"Allow login only from specified domain\" google setting you can find here [#44](https://github.com/zorn-v/nextcloud-social-login/issues/44)\nYou can use comma separated list for multiple domains\n\n## Config\n\nYou can use `'social_login_auto_redirect' => true` setting in `config.php` for auto redirect unauthorized users to social login if only one provider is configured.\nIf you want to temporary disable this function (e.g. for login as local admin), you can add `noredir=1` query parameter in url for login page. Something like `https://cloud.domain.com/login?noredir=1`\n\nTo set options for http client, you can use\n```php\n 'social_login_http_client' => [\n 'timeout' => 45,\n 'proxy' => 'socks4://127.0.0.1:9050', // Check https://curl.se/libcurl/c/CURLOPT_PROXY.html for allowed variants\n ],\n```\nin `config.php`\n\n### Configurate a provider via CLI\n\nYou can configure everything from commandline by using the occ utility. To setup a oidc-provider replace the variables and URLs with values that match your deployment.\n```bash\nphp occ config:app:set sociallogin custom_providers --value='{\"custom_oidc\": [{\"name\": \"gitlab_oidc\", \"title\": \"Gitlab\", \"authorizeUrl\": \"https://gitlab.my-domain.org/oauth/authorize\", \"tokenUrl\": \"https://gitlab.my-domain.org/oauth/token\", \"userInfoUrl\": \"https://gitlab.my-domain.org/oauth/userinfo\", \"logoutUrl\": \"\", \"clientId\": \"$my_application_id\", \"clientSecret\": \"$my_super_secret_secret\", \"scope\": \"openid\", \"groupsClaim\": \"groups\", \"style\": \"gitlab\", \"defaultGroup\": \"\"}]}'\n```\nto do this with docker you just need to add `docker exec -t -uwww-data CONTAINER_NAME` in front of the command, or run it interactively from `docker exec -it -uwww-data CONTAINER_NAME sh`\n\nTo find out how to configure other providers, just configure them in the GUI and take a look at the database afterwards:\n```\nmysql -u nextcloud -p nextcloud\nPassword: <yourpassword>\n\n> SELECT * FROM oc_appconfig WHERE appid='sociallogin';\n```\n\nOr just run\n\n`docker exec -t -uwww-data CONTAINER_NAME php occ config:app:get sociallogin custom_providers`\n\n### Configure Discord\n\nTo properly configure discord you have to:\n\n1. Create new discord application on [DiscordApp developers](https://discordapp.com/developers/applications/me#top)\n2. Open tab `Settings -> OAuth2 -> General`. In `Redirects` add new redirection link looking like this: `https://nextcloud.mydomain.com/apps/sociallogin/oauth/discord`.\n3. Copy `CLIENT ID` and generate and copy `CLIENT SECRET`\n4. Open in Nextcloud `Settings -> Social Login` and paste `CLIENT ID` into field `App id` and `CLIENT SECRET` into `Secret`.\n5. Select default group for users created this way.\n6. For group mapping check [#395](https://github.com/zorn-v/nextcloud-social-login/pull/395)\n\n## Hint\n\n### About Callback(Reply) Url\nYou can copy link from specific login button on login page and paste it on provider's website as callback url. To make proper button visible, just fill certain provider settings with random data and change it later.\n\nSome users may get strange reply(Callback) url error from provider even if you pasted the right url, that's because your nextcloud server may generate http urls when you are actually using https.\nPlease set 'overwriteprotocol' => 'https', in your config.php file.", 347 "homepage": "https://github.com/zorn-v/nextcloud-social-login", 348 "licenses": [
+13 -13
pkgs/servers/nextcloud/packages/30.json
··· 40 ] 41 }, 42 "contacts": { 43 - "hash": "sha256-suiKZfa+nL9xMFkkZwlrrGiicoIf5zyxpNXS3q7nCC8=", 44 - "url": "https://github.com/nextcloud-releases/contacts/releases/download/v7.0.5/contacts-v7.0.5.tar.gz", 45 - "version": "7.0.5", 46 "description": "The Nextcloud contacts app is a user interface for Nextcloud's CardDAV server. Easily sync contacts from various devices with your Nextcloud and edit them online.\n\n* 🚀 **Integration with other Nextcloud apps!** Currently Mail and Calendar – more to come.\n* 🎉 **Never forget a birthday!** You can sync birthdays and other recurring events with your Nextcloud Calendar.\n* 👥 **Sharing of Adressbooks!** You want to share your contacts with your friends or coworkers? No problem!\n* 🙈 **We’re not reinventing the wheel!** Based on the great and open SabreDAV library.", 47 "homepage": "https://github.com/nextcloud/contacts#readme", 48 "licenses": [ ··· 190 ] 191 }, 192 "mail": { 193 - "hash": "sha256-PeDfYIaU1HNONCI/aNwsMv0gBUArATj/dXKUW52ejW8=", 194 - "url": "https://github.com/nextcloud-releases/mail/releases/download/v4.3.6/mail-v4.3.6.tar.gz", 195 - "version": "4.3.6", 196 "description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files – more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 We’re not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!\n\n## Ethical AI Rating\n\n### Priority Inbox\n\nPositive:\n* The software for training and inferencing of this model is open source.\n* The model is created and trained on-premises based on the user's own data.\n* The training data is accessible to the user, making it possible to check or correct for bias or optimise the performance and CO2 usage.\n\n### Thread Summaries (opt-in)\n\n**Rating:** 🟢/🟡/🟠/🔴\n\nThe rating depends on the installed text processing backend. See [the rating overview](https://docs.nextcloud.com/server/latest/admin_manual/ai/index.html) for details.\n\nLearn more about the Nextcloud Ethical AI Rating [in our blog](https://nextcloud.com/blog/nextcloud-ethical-ai-rating/).", 197 "homepage": "https://github.com/nextcloud/mail#readme", 198 "licenses": [ ··· 240 ] 241 }, 242 "notes": { 243 - "hash": "sha256-dpMCehjhPQoOA+MVdLeGc370hmqWzmsMczgV08m/cO4=", 244 - "url": "https://github.com/nextcloud-releases/notes/releases/download/v4.11.0/notes-v4.11.0.tar.gz", 245 - "version": "4.11.0", 246 - "description": "The Notes app is a distraction free notes taking app for [Nextcloud](https://www.nextcloud.com/). It provides categories for better organization and supports formatting using [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax. Notes are saved as files in your Nextcloud, so you can view and edit them with every Nextcloud client. Furthermore, a separate [REST API](https://github.com/nextcloud/notes/blob/master/docs/api/README.md) allows for an easy integration into third-party apps (currently, there are notes apps for [Android](https://github.com/nextcloud/notes-android), [iOS](https://github.com/nextcloud/notes-ios) and the [console](https://git.danielmoch.com/nncli/about) which allow convenient access to your Nextcloud notes). Further features include marking notes as favorites.", 247 "homepage": "https://github.com/nextcloud/notes", 248 "licenses": [ 249 "agpl" ··· 340 ] 341 }, 342 "sociallogin": { 343 - "hash": "sha256-M2sITpieWvl2WPjxWHtyyZRNQPagYLahVaJcDoiTsh8=", 344 - "url": "https://github.com/zorn-v/nextcloud-social-login/releases/download/v5.9.2/release.tar.gz", 345 - "version": "5.9.2", 346 "description": "# Social login\n\nMake possible create users and login via Telegram, OAuth or OpenID\n\nFor OAuth you must create app for certain providers. Login button appear at login page if app id specified. Settings are in \"Social login\" section of settings page.\n\n## Installation\n\nLogin to your NextCloud installation as an administrator and under \"Apps\" click \"Download and enable\" next to the \"Social Login\" app.\n\nSee below for setup and configuration instructions.\n\n\n\n## Custom OAuth2/OIDC groups\n\nYou can use groups from your custom provider. For that you should specify \"Groups claim\" in custom OAuth2/OIDC provider settings. That claim should be returned from provider in `id_token` or at user info endpoint. Format should be `array` or comma separated string. Eg (with claim named `roles`)\n\n```json\n{\"roles\": [\"admin\", \"user\"]}\n```\nor\n```json\n{\"roles\": \"admin,user\"}\n```\n\nAlso nested claims is supported. For example `resource_access.client-id.roles` for\n\n```json\n\"resource_access\": {\n \"client-id\": {\n \"roles\": [\n \"client-role-1\",\n \"client-role-2\"\n ]\n }\n}\n```\n\nThere is also support for setting the displayName:\n```\n{\"roles\": [{gid: 1, displayName: \"admin\"}, {gid: 2, displayName: \"user\"}]}\n```\n\n\nYou can use provider groups in two ways:\n\n1. Map provider groups to existing nextcloud groups\n2. Create provider groups in nextcloud and associate it to user (if appropriate option specified)\n\nIf you want sync groups on every login do not forget to check \"Update user profile every login\" setting\n\n## Examples for groups\n\n* You can find example how to configure WSO2IS for return roles claim with OIDC [here](https://medium.com/@dewni.matheesha/claim-mapping-and-retrieving-end-user-information-in-wso2is-cffd5f3937ff)\n* [GitLab OIDC allowing specific GitLab groups](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/gitlab.md)\n\n## Built-in OAuth providers\n\nYou can copy link of certain login button to get proper \"redirect url\" for OAuth app setting.\n\n* [Google](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/google.md)\n* [Amazon](https://developer.amazon.com/loginwithamazon/console/site/lwa/overview.html)\n* [Facebook](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/facebook.md)\n* [Twitter](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/twitter.md)\n* [GitHub](https://github.com/settings/developers)\n* [Discord](#configure-discord)\n* [Telegram](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/telegram.md)\n* PlexTv - you can use any title as app id\n* [Codeberg](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/codeberg.md)\n\nDetails about \"Allow login only from specified domain\" google setting you can find here [#44](https://github.com/zorn-v/nextcloud-social-login/issues/44)\nYou can use comma separated list for multiple domains\n\n## Config\n\nYou can use `'social_login_auto_redirect' => true` setting in `config.php` for auto redirect unauthorized users to social login if only one provider is configured.\nIf you want to temporary disable this function (e.g. for login as local admin), you can add `noredir=1` query parameter in url for login page. Something like `https://cloud.domain.com/login?noredir=1`\n\nTo set options for http client, you can use\n```php\n 'social_login_http_client' => [\n 'timeout' => 45,\n 'proxy' => 'socks4://127.0.0.1:9050', // Check https://curl.se/libcurl/c/CURLOPT_PROXY.html for allowed variants\n ],\n```\nin `config.php`\n\n### Configurate a provider via CLI\n\nYou can configure everything from commandline by using the occ utility. To setup a oidc-provider replace the variables and URLs with values that match your deployment.\n```bash\nphp occ config:app:set sociallogin custom_providers --value='{\"custom_oidc\": [{\"name\": \"gitlab_oidc\", \"title\": \"Gitlab\", \"authorizeUrl\": \"https://gitlab.my-domain.org/oauth/authorize\", \"tokenUrl\": \"https://gitlab.my-domain.org/oauth/token\", \"userInfoUrl\": \"https://gitlab.my-domain.org/oauth/userinfo\", \"logoutUrl\": \"\", \"clientId\": \"$my_application_id\", \"clientSecret\": \"$my_super_secret_secret\", \"scope\": \"openid\", \"groupsClaim\": \"groups\", \"style\": \"gitlab\", \"defaultGroup\": \"\"}]}'\n```\nto do this with docker you just need to add `docker exec -t -uwww-data CONTAINER_NAME` in front of the command, or run it interactively from `docker exec -it -uwww-data CONTAINER_NAME sh`\n\nTo find out how to configure other providers, just configure them in the GUI and take a look at the database afterwards:\n```\nmysql -u nextcloud -p nextcloud\nPassword: <yourpassword>\n\n> SELECT * FROM oc_appconfig WHERE appid='sociallogin';\n```\n\nOr just run\n\n`docker exec -t -uwww-data CONTAINER_NAME php occ config:app:get sociallogin custom_providers`\n\n### Configure Discord\n\nTo properly configure discord you have to:\n\n1. Create new discord application on [DiscordApp developers](https://discordapp.com/developers/applications/me#top)\n2. Open tab `Settings -> OAuth2 -> General`. In `Redirects` add new redirection link looking like this: `https://nextcloud.mydomain.com/apps/sociallogin/oauth/discord`.\n3. Copy `CLIENT ID` and generate and copy `CLIENT SECRET`\n4. Open in Nextcloud `Settings -> Social Login` and paste `CLIENT ID` into field `App id` and `CLIENT SECRET` into `Secret`.\n5. Select default group for users created this way.\n6. For group mapping check [#395](https://github.com/zorn-v/nextcloud-social-login/pull/395)\n\n## Hint\n\n### About Callback(Reply) Url\nYou can copy link from specific login button on login page and paste it on provider's website as callback url. To make proper button visible, just fill certain provider settings with random data and change it later.\n\nSome users may get strange reply(Callback) url error from provider even if you pasted the right url, that's because your nextcloud server may generate http urls when you are actually using https.\nPlease set 'overwriteprotocol' => 'https', in your config.php file.", 347 "homepage": "https://github.com/zorn-v/nextcloud-social-login", 348 "licenses": [
··· 40 ] 41 }, 42 "contacts": { 43 + "hash": "sha256-3G1di/PnOAIML2vwKglmuMApvn8+nXYjdqnySSSoLDI=", 44 + "url": "https://github.com/nextcloud-releases/contacts/releases/download/v7.0.6/contacts-v7.0.6.tar.gz", 45 + "version": "7.0.6", 46 "description": "The Nextcloud contacts app is a user interface for Nextcloud's CardDAV server. Easily sync contacts from various devices with your Nextcloud and edit them online.\n\n* 🚀 **Integration with other Nextcloud apps!** Currently Mail and Calendar – more to come.\n* 🎉 **Never forget a birthday!** You can sync birthdays and other recurring events with your Nextcloud Calendar.\n* 👥 **Sharing of Adressbooks!** You want to share your contacts with your friends or coworkers? No problem!\n* 🙈 **We’re not reinventing the wheel!** Based on the great and open SabreDAV library.", 47 "homepage": "https://github.com/nextcloud/contacts#readme", 48 "licenses": [ ··· 190 ] 191 }, 192 "mail": { 193 + "hash": "sha256-AV0vrDU4zeg7AQQpJkj5mHQatxCa2RMON5tY4Q/OjyM=", 194 + "url": "https://github.com/nextcloud-releases/mail/releases/download/v5.0.0/mail-v5.0.0.tar.gz", 195 + "version": "5.0.0", 196 "description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files – more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 We’re not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!\n\n## Ethical AI Rating\n\n### Priority Inbox\n\nPositive:\n* The software for training and inferencing of this model is open source.\n* The model is created and trained on-premises based on the user's own data.\n* The training data is accessible to the user, making it possible to check or correct for bias or optimise the performance and CO2 usage.\n\n### Thread Summaries (opt-in)\n\n**Rating:** 🟢/🟡/🟠/🔴\n\nThe rating depends on the installed text processing backend. See [the rating overview](https://docs.nextcloud.com/server/latest/admin_manual/ai/index.html) for details.\n\nLearn more about the Nextcloud Ethical AI Rating [in our blog](https://nextcloud.com/blog/nextcloud-ethical-ai-rating/).", 197 "homepage": "https://github.com/nextcloud/mail#readme", 198 "licenses": [ ··· 240 ] 241 }, 242 "notes": { 243 + "hash": "sha256-UdqK6DiC67YPcy84wFEZaT8AQLDhhNndLiEesQeBY7M=", 244 + "url": "https://github.com/nextcloud-releases/notes/releases/download/v4.12.0/notes-v4.12.0.tar.gz", 245 + "version": "4.12.0", 246 + "description": "The Notes app is a distraction free notes taking app for [Nextcloud](https://www.nextcloud.com/). It provides categories for better organization and supports formatting using [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax. Notes are saved as files in your Nextcloud, so you can view and edit them with every Nextcloud client. Furthermore, a separate [REST API](https://github.com/nextcloud/notes/blob/master/docs/api/README.md) allows for an easy integration into apps ([Android](https://github.com/nextcloud/notes-android), [iOS](https://github.com/nextcloud/notes-ios), as well as [3rd-party apps](https://github.com/nextcloud/notes/wiki#3rd-party-clients) which allow convenient access to your Nextcloud notes). Further features include marking notes as favorites.", 247 "homepage": "https://github.com/nextcloud/notes", 248 "licenses": [ 249 "agpl" ··· 340 ] 341 }, 342 "sociallogin": { 343 + "hash": "sha256-DNf48YmVJ49v+lynTCIBTZhPi/S1mjyIF5OWf+UVKeY=", 344 + "url": "https://github.com/zorn-v/nextcloud-social-login/releases/download/v6.0.1/release.tar.gz", 345 + "version": "6.0.1", 346 "description": "# Social login\n\nMake possible create users and login via Telegram, OAuth or OpenID\n\nFor OAuth you must create app for certain providers. Login button appear at login page if app id specified. Settings are in \"Social login\" section of settings page.\n\n## Installation\n\nLogin to your NextCloud installation as an administrator and under \"Apps\" click \"Download and enable\" next to the \"Social Login\" app.\n\nSee below for setup and configuration instructions.\n\n\n\n## Custom OAuth2/OIDC groups\n\nYou can use groups from your custom provider. For that you should specify \"Groups claim\" in custom OAuth2/OIDC provider settings. That claim should be returned from provider in `id_token` or at user info endpoint. Format should be `array` or comma separated string. Eg (with claim named `roles`)\n\n```json\n{\"roles\": [\"admin\", \"user\"]}\n```\nor\n```json\n{\"roles\": \"admin,user\"}\n```\n\nAlso nested claims is supported. For example `resource_access.client-id.roles` for\n\n```json\n\"resource_access\": {\n \"client-id\": {\n \"roles\": [\n \"client-role-1\",\n \"client-role-2\"\n ]\n }\n}\n```\n\nThere is also support for setting the displayName:\n```\n{\"roles\": [{gid: 1, displayName: \"admin\"}, {gid: 2, displayName: \"user\"}]}\n```\n\n\nYou can use provider groups in two ways:\n\n1. Map provider groups to existing nextcloud groups\n2. Create provider groups in nextcloud and associate it to user (if appropriate option specified)\n\nIf you want sync groups on every login do not forget to check \"Update user profile every login\" setting\n\n## Examples for groups\n\n* You can find example how to configure WSO2IS for return roles claim with OIDC [here](https://medium.com/@dewni.matheesha/claim-mapping-and-retrieving-end-user-information-in-wso2is-cffd5f3937ff)\n* [GitLab OIDC allowing specific GitLab groups](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/gitlab.md)\n\n## Built-in OAuth providers\n\nYou can copy link of certain login button to get proper \"redirect url\" for OAuth app setting.\n\n* [Google](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/google.md)\n* [Amazon](https://developer.amazon.com/loginwithamazon/console/site/lwa/overview.html)\n* [Facebook](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/facebook.md)\n* [Twitter](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/twitter.md)\n* [GitHub](https://github.com/settings/developers)\n* [Discord](#configure-discord)\n* [Telegram](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/telegram.md)\n* PlexTv - you can use any title as app id\n* [Codeberg](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/codeberg.md)\n\nDetails about \"Allow login only from specified domain\" google setting you can find here [#44](https://github.com/zorn-v/nextcloud-social-login/issues/44)\nYou can use comma separated list for multiple domains\n\n## Config\n\nYou can use `'social_login_auto_redirect' => true` setting in `config.php` for auto redirect unauthorized users to social login if only one provider is configured.\nIf you want to temporary disable this function (e.g. for login as local admin), you can add `noredir=1` query parameter in url for login page. Something like `https://cloud.domain.com/login?noredir=1`\n\nTo set options for http client, you can use\n```php\n 'social_login_http_client' => [\n 'timeout' => 45,\n 'proxy' => 'socks4://127.0.0.1:9050', // Check https://curl.se/libcurl/c/CURLOPT_PROXY.html for allowed variants\n ],\n```\nin `config.php`\n\n### Configurate a provider via CLI\n\nYou can configure everything from commandline by using the occ utility. To setup a oidc-provider replace the variables and URLs with values that match your deployment.\n```bash\nphp occ config:app:set sociallogin custom_providers --value='{\"custom_oidc\": [{\"name\": \"gitlab_oidc\", \"title\": \"Gitlab\", \"authorizeUrl\": \"https://gitlab.my-domain.org/oauth/authorize\", \"tokenUrl\": \"https://gitlab.my-domain.org/oauth/token\", \"userInfoUrl\": \"https://gitlab.my-domain.org/oauth/userinfo\", \"logoutUrl\": \"\", \"clientId\": \"$my_application_id\", \"clientSecret\": \"$my_super_secret_secret\", \"scope\": \"openid\", \"groupsClaim\": \"groups\", \"style\": \"gitlab\", \"defaultGroup\": \"\"}]}'\n```\nto do this with docker you just need to add `docker exec -t -uwww-data CONTAINER_NAME` in front of the command, or run it interactively from `docker exec -it -uwww-data CONTAINER_NAME sh`\n\nTo find out how to configure other providers, just configure them in the GUI and take a look at the database afterwards:\n```\nmysql -u nextcloud -p nextcloud\nPassword: <yourpassword>\n\n> SELECT * FROM oc_appconfig WHERE appid='sociallogin';\n```\n\nOr just run\n\n`docker exec -t -uwww-data CONTAINER_NAME php occ config:app:get sociallogin custom_providers`\n\n### Configure Discord\n\nTo properly configure discord you have to:\n\n1. Create new discord application on [DiscordApp developers](https://discordapp.com/developers/applications/me#top)\n2. Open tab `Settings -> OAuth2 -> General`. In `Redirects` add new redirection link looking like this: `https://nextcloud.mydomain.com/apps/sociallogin/oauth/discord`.\n3. Copy `CLIENT ID` and generate and copy `CLIENT SECRET`\n4. Open in Nextcloud `Settings -> Social Login` and paste `CLIENT ID` into field `App id` and `CLIENT SECRET` into `Secret`.\n5. Select default group for users created this way.\n6. For group mapping check [#395](https://github.com/zorn-v/nextcloud-social-login/pull/395)\n\n## Hint\n\n### About Callback(Reply) Url\nYou can copy link from specific login button on login page and paste it on provider's website as callback url. To make proper button visible, just fill certain provider settings with random data and change it later.\n\nSome users may get strange reply(Callback) url error from provider even if you pasted the right url, that's because your nextcloud server may generate http urls when you are actually using https.\nPlease set 'overwriteprotocol' => 'https', in your config.php file.", 347 "homepage": "https://github.com/zorn-v/nextcloud-social-login", 348 "licenses": [
+13 -13
pkgs/servers/nextcloud/packages/31.json
··· 40 ] 41 }, 42 "contacts": { 43 - "hash": "sha256-suiKZfa+nL9xMFkkZwlrrGiicoIf5zyxpNXS3q7nCC8=", 44 - "url": "https://github.com/nextcloud-releases/contacts/releases/download/v7.0.5/contacts-v7.0.5.tar.gz", 45 - "version": "7.0.5", 46 "description": "The Nextcloud contacts app is a user interface for Nextcloud's CardDAV server. Easily sync contacts from various devices with your Nextcloud and edit them online.\n\n* 🚀 **Integration with other Nextcloud apps!** Currently Mail and Calendar – more to come.\n* 🎉 **Never forget a birthday!** You can sync birthdays and other recurring events with your Nextcloud Calendar.\n* 👥 **Sharing of Adressbooks!** You want to share your contacts with your friends or coworkers? No problem!\n* 🙈 **We’re not reinventing the wheel!** Based on the great and open SabreDAV library.", 47 "homepage": "https://github.com/nextcloud/contacts#readme", 48 "licenses": [ ··· 190 ] 191 }, 192 "mail": { 193 - "hash": "sha256-PeDfYIaU1HNONCI/aNwsMv0gBUArATj/dXKUW52ejW8=", 194 - "url": "https://github.com/nextcloud-releases/mail/releases/download/v4.3.6/mail-v4.3.6.tar.gz", 195 - "version": "4.3.6", 196 "description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files – more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 We’re not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!\n\n## Ethical AI Rating\n\n### Priority Inbox\n\nPositive:\n* The software for training and inferencing of this model is open source.\n* The model is created and trained on-premises based on the user's own data.\n* The training data is accessible to the user, making it possible to check or correct for bias or optimise the performance and CO2 usage.\n\n### Thread Summaries (opt-in)\n\n**Rating:** 🟢/🟡/🟠/🔴\n\nThe rating depends on the installed text processing backend. See [the rating overview](https://docs.nextcloud.com/server/latest/admin_manual/ai/index.html) for details.\n\nLearn more about the Nextcloud Ethical AI Rating [in our blog](https://nextcloud.com/blog/nextcloud-ethical-ai-rating/).", 197 "homepage": "https://github.com/nextcloud/mail#readme", 198 "licenses": [ ··· 230 ] 231 }, 232 "notes": { 233 - "hash": "sha256-dpMCehjhPQoOA+MVdLeGc370hmqWzmsMczgV08m/cO4=", 234 - "url": "https://github.com/nextcloud-releases/notes/releases/download/v4.11.0/notes-v4.11.0.tar.gz", 235 - "version": "4.11.0", 236 - "description": "The Notes app is a distraction free notes taking app for [Nextcloud](https://www.nextcloud.com/). It provides categories for better organization and supports formatting using [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax. Notes are saved as files in your Nextcloud, so you can view and edit them with every Nextcloud client. Furthermore, a separate [REST API](https://github.com/nextcloud/notes/blob/master/docs/api/README.md) allows for an easy integration into third-party apps (currently, there are notes apps for [Android](https://github.com/nextcloud/notes-android), [iOS](https://github.com/nextcloud/notes-ios) and the [console](https://git.danielmoch.com/nncli/about) which allow convenient access to your Nextcloud notes). Further features include marking notes as favorites.", 237 "homepage": "https://github.com/nextcloud/notes", 238 "licenses": [ 239 "agpl" ··· 330 ] 331 }, 332 "sociallogin": { 333 - "hash": "sha256-M2sITpieWvl2WPjxWHtyyZRNQPagYLahVaJcDoiTsh8=", 334 - "url": "https://github.com/zorn-v/nextcloud-social-login/releases/download/v5.9.2/release.tar.gz", 335 - "version": "5.9.2", 336 "description": "# Social login\n\nMake possible create users and login via Telegram, OAuth or OpenID\n\nFor OAuth you must create app for certain providers. Login button appear at login page if app id specified. Settings are in \"Social login\" section of settings page.\n\n## Installation\n\nLogin to your NextCloud installation as an administrator and under \"Apps\" click \"Download and enable\" next to the \"Social Login\" app.\n\nSee below for setup and configuration instructions.\n\n\n\n## Custom OAuth2/OIDC groups\n\nYou can use groups from your custom provider. For that you should specify \"Groups claim\" in custom OAuth2/OIDC provider settings. That claim should be returned from provider in `id_token` or at user info endpoint. Format should be `array` or comma separated string. Eg (with claim named `roles`)\n\n```json\n{\"roles\": [\"admin\", \"user\"]}\n```\nor\n```json\n{\"roles\": \"admin,user\"}\n```\n\nAlso nested claims is supported. For example `resource_access.client-id.roles` for\n\n```json\n\"resource_access\": {\n \"client-id\": {\n \"roles\": [\n \"client-role-1\",\n \"client-role-2\"\n ]\n }\n}\n```\n\nThere is also support for setting the displayName:\n```\n{\"roles\": [{gid: 1, displayName: \"admin\"}, {gid: 2, displayName: \"user\"}]}\n```\n\n\nYou can use provider groups in two ways:\n\n1. Map provider groups to existing nextcloud groups\n2. Create provider groups in nextcloud and associate it to user (if appropriate option specified)\n\nIf you want sync groups on every login do not forget to check \"Update user profile every login\" setting\n\n## Examples for groups\n\n* You can find example how to configure WSO2IS for return roles claim with OIDC [here](https://medium.com/@dewni.matheesha/claim-mapping-and-retrieving-end-user-information-in-wso2is-cffd5f3937ff)\n* [GitLab OIDC allowing specific GitLab groups](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/gitlab.md)\n\n## Built-in OAuth providers\n\nYou can copy link of certain login button to get proper \"redirect url\" for OAuth app setting.\n\n* [Google](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/google.md)\n* [Amazon](https://developer.amazon.com/loginwithamazon/console/site/lwa/overview.html)\n* [Facebook](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/facebook.md)\n* [Twitter](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/twitter.md)\n* [GitHub](https://github.com/settings/developers)\n* [Discord](#configure-discord)\n* [Telegram](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/telegram.md)\n* PlexTv - you can use any title as app id\n* [Codeberg](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/codeberg.md)\n\nDetails about \"Allow login only from specified domain\" google setting you can find here [#44](https://github.com/zorn-v/nextcloud-social-login/issues/44)\nYou can use comma separated list for multiple domains\n\n## Config\n\nYou can use `'social_login_auto_redirect' => true` setting in `config.php` for auto redirect unauthorized users to social login if only one provider is configured.\nIf you want to temporary disable this function (e.g. for login as local admin), you can add `noredir=1` query parameter in url for login page. Something like `https://cloud.domain.com/login?noredir=1`\n\nTo set options for http client, you can use\n```php\n 'social_login_http_client' => [\n 'timeout' => 45,\n 'proxy' => 'socks4://127.0.0.1:9050', // Check https://curl.se/libcurl/c/CURLOPT_PROXY.html for allowed variants\n ],\n```\nin `config.php`\n\n### Configurate a provider via CLI\n\nYou can configure everything from commandline by using the occ utility. To setup a oidc-provider replace the variables and URLs with values that match your deployment.\n```bash\nphp occ config:app:set sociallogin custom_providers --value='{\"custom_oidc\": [{\"name\": \"gitlab_oidc\", \"title\": \"Gitlab\", \"authorizeUrl\": \"https://gitlab.my-domain.org/oauth/authorize\", \"tokenUrl\": \"https://gitlab.my-domain.org/oauth/token\", \"userInfoUrl\": \"https://gitlab.my-domain.org/oauth/userinfo\", \"logoutUrl\": \"\", \"clientId\": \"$my_application_id\", \"clientSecret\": \"$my_super_secret_secret\", \"scope\": \"openid\", \"groupsClaim\": \"groups\", \"style\": \"gitlab\", \"defaultGroup\": \"\"}]}'\n```\nto do this with docker you just need to add `docker exec -t -uwww-data CONTAINER_NAME` in front of the command, or run it interactively from `docker exec -it -uwww-data CONTAINER_NAME sh`\n\nTo find out how to configure other providers, just configure them in the GUI and take a look at the database afterwards:\n```\nmysql -u nextcloud -p nextcloud\nPassword: <yourpassword>\n\n> SELECT * FROM oc_appconfig WHERE appid='sociallogin';\n```\n\nOr just run\n\n`docker exec -t -uwww-data CONTAINER_NAME php occ config:app:get sociallogin custom_providers`\n\n### Configure Discord\n\nTo properly configure discord you have to:\n\n1. Create new discord application on [DiscordApp developers](https://discordapp.com/developers/applications/me#top)\n2. Open tab `Settings -> OAuth2 -> General`. In `Redirects` add new redirection link looking like this: `https://nextcloud.mydomain.com/apps/sociallogin/oauth/discord`.\n3. Copy `CLIENT ID` and generate and copy `CLIENT SECRET`\n4. Open in Nextcloud `Settings -> Social Login` and paste `CLIENT ID` into field `App id` and `CLIENT SECRET` into `Secret`.\n5. Select default group for users created this way.\n6. For group mapping check [#395](https://github.com/zorn-v/nextcloud-social-login/pull/395)\n\n## Hint\n\n### About Callback(Reply) Url\nYou can copy link from specific login button on login page and paste it on provider's website as callback url. To make proper button visible, just fill certain provider settings with random data and change it later.\n\nSome users may get strange reply(Callback) url error from provider even if you pasted the right url, that's because your nextcloud server may generate http urls when you are actually using https.\nPlease set 'overwriteprotocol' => 'https', in your config.php file.", 337 "homepage": "https://github.com/zorn-v/nextcloud-social-login", 338 "licenses": [
··· 40 ] 41 }, 42 "contacts": { 43 + "hash": "sha256-3G1di/PnOAIML2vwKglmuMApvn8+nXYjdqnySSSoLDI=", 44 + "url": "https://github.com/nextcloud-releases/contacts/releases/download/v7.0.6/contacts-v7.0.6.tar.gz", 45 + "version": "7.0.6", 46 "description": "The Nextcloud contacts app is a user interface for Nextcloud's CardDAV server. Easily sync contacts from various devices with your Nextcloud and edit them online.\n\n* 🚀 **Integration with other Nextcloud apps!** Currently Mail and Calendar – more to come.\n* 🎉 **Never forget a birthday!** You can sync birthdays and other recurring events with your Nextcloud Calendar.\n* 👥 **Sharing of Adressbooks!** You want to share your contacts with your friends or coworkers? No problem!\n* 🙈 **We’re not reinventing the wheel!** Based on the great and open SabreDAV library.", 47 "homepage": "https://github.com/nextcloud/contacts#readme", 48 "licenses": [ ··· 190 ] 191 }, 192 "mail": { 193 + "hash": "sha256-AV0vrDU4zeg7AQQpJkj5mHQatxCa2RMON5tY4Q/OjyM=", 194 + "url": "https://github.com/nextcloud-releases/mail/releases/download/v5.0.0/mail-v5.0.0.tar.gz", 195 + "version": "5.0.0", 196 "description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files – more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 We’re not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!\n\n## Ethical AI Rating\n\n### Priority Inbox\n\nPositive:\n* The software for training and inferencing of this model is open source.\n* The model is created and trained on-premises based on the user's own data.\n* The training data is accessible to the user, making it possible to check or correct for bias or optimise the performance and CO2 usage.\n\n### Thread Summaries (opt-in)\n\n**Rating:** 🟢/🟡/🟠/🔴\n\nThe rating depends on the installed text processing backend. See [the rating overview](https://docs.nextcloud.com/server/latest/admin_manual/ai/index.html) for details.\n\nLearn more about the Nextcloud Ethical AI Rating [in our blog](https://nextcloud.com/blog/nextcloud-ethical-ai-rating/).", 197 "homepage": "https://github.com/nextcloud/mail#readme", 198 "licenses": [ ··· 230 ] 231 }, 232 "notes": { 233 + "hash": "sha256-UdqK6DiC67YPcy84wFEZaT8AQLDhhNndLiEesQeBY7M=", 234 + "url": "https://github.com/nextcloud-releases/notes/releases/download/v4.12.0/notes-v4.12.0.tar.gz", 235 + "version": "4.12.0", 236 + "description": "The Notes app is a distraction free notes taking app for [Nextcloud](https://www.nextcloud.com/). It provides categories for better organization and supports formatting using [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax. Notes are saved as files in your Nextcloud, so you can view and edit them with every Nextcloud client. Furthermore, a separate [REST API](https://github.com/nextcloud/notes/blob/master/docs/api/README.md) allows for an easy integration into apps ([Android](https://github.com/nextcloud/notes-android), [iOS](https://github.com/nextcloud/notes-ios), as well as [3rd-party apps](https://github.com/nextcloud/notes/wiki#3rd-party-clients) which allow convenient access to your Nextcloud notes). Further features include marking notes as favorites.", 237 "homepage": "https://github.com/nextcloud/notes", 238 "licenses": [ 239 "agpl" ··· 330 ] 331 }, 332 "sociallogin": { 333 + "hash": "sha256-DNf48YmVJ49v+lynTCIBTZhPi/S1mjyIF5OWf+UVKeY=", 334 + "url": "https://github.com/zorn-v/nextcloud-social-login/releases/download/v6.0.1/release.tar.gz", 335 + "version": "6.0.1", 336 "description": "# Social login\n\nMake possible create users and login via Telegram, OAuth or OpenID\n\nFor OAuth you must create app for certain providers. Login button appear at login page if app id specified. Settings are in \"Social login\" section of settings page.\n\n## Installation\n\nLogin to your NextCloud installation as an administrator and under \"Apps\" click \"Download and enable\" next to the \"Social Login\" app.\n\nSee below for setup and configuration instructions.\n\n\n\n## Custom OAuth2/OIDC groups\n\nYou can use groups from your custom provider. For that you should specify \"Groups claim\" in custom OAuth2/OIDC provider settings. That claim should be returned from provider in `id_token` or at user info endpoint. Format should be `array` or comma separated string. Eg (with claim named `roles`)\n\n```json\n{\"roles\": [\"admin\", \"user\"]}\n```\nor\n```json\n{\"roles\": \"admin,user\"}\n```\n\nAlso nested claims is supported. For example `resource_access.client-id.roles` for\n\n```json\n\"resource_access\": {\n \"client-id\": {\n \"roles\": [\n \"client-role-1\",\n \"client-role-2\"\n ]\n }\n}\n```\n\nThere is also support for setting the displayName:\n```\n{\"roles\": [{gid: 1, displayName: \"admin\"}, {gid: 2, displayName: \"user\"}]}\n```\n\n\nYou can use provider groups in two ways:\n\n1. Map provider groups to existing nextcloud groups\n2. Create provider groups in nextcloud and associate it to user (if appropriate option specified)\n\nIf you want sync groups on every login do not forget to check \"Update user profile every login\" setting\n\n## Examples for groups\n\n* You can find example how to configure WSO2IS for return roles claim with OIDC [here](https://medium.com/@dewni.matheesha/claim-mapping-and-retrieving-end-user-information-in-wso2is-cffd5f3937ff)\n* [GitLab OIDC allowing specific GitLab groups](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/gitlab.md)\n\n## Built-in OAuth providers\n\nYou can copy link of certain login button to get proper \"redirect url\" for OAuth app setting.\n\n* [Google](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/google.md)\n* [Amazon](https://developer.amazon.com/loginwithamazon/console/site/lwa/overview.html)\n* [Facebook](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/facebook.md)\n* [Twitter](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/twitter.md)\n* [GitHub](https://github.com/settings/developers)\n* [Discord](#configure-discord)\n* [Telegram](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/telegram.md)\n* PlexTv - you can use any title as app id\n* [Codeberg](https://github.com/zorn-v/nextcloud-social-login/blob/master/docs/sso/codeberg.md)\n\nDetails about \"Allow login only from specified domain\" google setting you can find here [#44](https://github.com/zorn-v/nextcloud-social-login/issues/44)\nYou can use comma separated list for multiple domains\n\n## Config\n\nYou can use `'social_login_auto_redirect' => true` setting in `config.php` for auto redirect unauthorized users to social login if only one provider is configured.\nIf you want to temporary disable this function (e.g. for login as local admin), you can add `noredir=1` query parameter in url for login page. Something like `https://cloud.domain.com/login?noredir=1`\n\nTo set options for http client, you can use\n```php\n 'social_login_http_client' => [\n 'timeout' => 45,\n 'proxy' => 'socks4://127.0.0.1:9050', // Check https://curl.se/libcurl/c/CURLOPT_PROXY.html for allowed variants\n ],\n```\nin `config.php`\n\n### Configurate a provider via CLI\n\nYou can configure everything from commandline by using the occ utility. To setup a oidc-provider replace the variables and URLs with values that match your deployment.\n```bash\nphp occ config:app:set sociallogin custom_providers --value='{\"custom_oidc\": [{\"name\": \"gitlab_oidc\", \"title\": \"Gitlab\", \"authorizeUrl\": \"https://gitlab.my-domain.org/oauth/authorize\", \"tokenUrl\": \"https://gitlab.my-domain.org/oauth/token\", \"userInfoUrl\": \"https://gitlab.my-domain.org/oauth/userinfo\", \"logoutUrl\": \"\", \"clientId\": \"$my_application_id\", \"clientSecret\": \"$my_super_secret_secret\", \"scope\": \"openid\", \"groupsClaim\": \"groups\", \"style\": \"gitlab\", \"defaultGroup\": \"\"}]}'\n```\nto do this with docker you just need to add `docker exec -t -uwww-data CONTAINER_NAME` in front of the command, or run it interactively from `docker exec -it -uwww-data CONTAINER_NAME sh`\n\nTo find out how to configure other providers, just configure them in the GUI and take a look at the database afterwards:\n```\nmysql -u nextcloud -p nextcloud\nPassword: <yourpassword>\n\n> SELECT * FROM oc_appconfig WHERE appid='sociallogin';\n```\n\nOr just run\n\n`docker exec -t -uwww-data CONTAINER_NAME php occ config:app:get sociallogin custom_providers`\n\n### Configure Discord\n\nTo properly configure discord you have to:\n\n1. Create new discord application on [DiscordApp developers](https://discordapp.com/developers/applications/me#top)\n2. Open tab `Settings -> OAuth2 -> General`. In `Redirects` add new redirection link looking like this: `https://nextcloud.mydomain.com/apps/sociallogin/oauth/discord`.\n3. Copy `CLIENT ID` and generate and copy `CLIENT SECRET`\n4. Open in Nextcloud `Settings -> Social Login` and paste `CLIENT ID` into field `App id` and `CLIENT SECRET` into `Secret`.\n5. Select default group for users created this way.\n6. For group mapping check [#395](https://github.com/zorn-v/nextcloud-social-login/pull/395)\n\n## Hint\n\n### About Callback(Reply) Url\nYou can copy link from specific login button on login page and paste it on provider's website as callback url. To make proper button visible, just fill certain provider settings with random data and change it later.\n\nSome users may get strange reply(Callback) url error from provider even if you pasted the right url, that's because your nextcloud server may generate http urls when you are actually using https.\nPlease set 'overwriteprotocol' => 'https', in your config.php file.", 337 "homepage": "https://github.com/zorn-v/nextcloud-social-login", 338 "licenses": [
-27
pkgs/servers/tracing/honeycomb/honeymarker/default.nix
··· 1 - { 2 - lib, 3 - buildGoModule, 4 - fetchFromGitHub, 5 - }: 6 - import ./versions.nix ( 7 - { version, sha256 }: 8 - buildGoModule { 9 - pname = "honeymarker"; 10 - inherit version; 11 - vendorHash = "sha256-ZuDobjC/nizZ7G0o/zVTQmDfDjcdBhfPcmkhgwFc7VU="; 12 - 13 - src = fetchFromGitHub { 14 - owner = "honeycombio"; 15 - repo = "honeymarker"; 16 - rev = "v${version}"; 17 - hash = sha256; 18 - }; 19 - 20 - meta = with lib; { 21 - description = "provides a simple CRUD interface for dealing with per-dataset markers on honeycomb.io"; 22 - homepage = "https://honeycomb.io/"; 23 - license = licenses.asl20; 24 - maintainers = [ maintainers.iand675 ]; 25 - }; 26 - } 27 - )
···
-6
pkgs/servers/tracing/honeycomb/honeymarker/versions.nix
··· 1 - generic: { 2 - v0_2_1 = generic { 3 - version = "0.2.1"; 4 - sha256 = "sha256-tiwX94CRvXnUYpiux94XhOj2abn1Uc+wjcDOmw79ab4="; 5 - }; 6 - }
···
-27
pkgs/servers/tracing/honeycomb/honeytail/default.nix
··· 1 - { 2 - lib, 3 - buildGoModule, 4 - fetchFromGitHub, 5 - }: 6 - import ./versions.nix ( 7 - { version, sha256 }: 8 - buildGoModule { 9 - pname = "honeytail"; 10 - inherit version; 11 - vendorHash = "sha256-LtiiLGLjhbfT49A6Fw5CbSbnmTHMxtcUssr+ayCVrvY="; 12 - 13 - src = fetchFromGitHub { 14 - owner = "honeycombio"; 15 - repo = "honeytail"; 16 - rev = "v${version}"; 17 - hash = sha256; 18 - }; 19 - 20 - meta = with lib; { 21 - description = "agent for ingesting log file data into honeycomb.io and making it available for exploration"; 22 - homepage = "https://honeycomb.io/"; 23 - license = licenses.asl20; 24 - maintainers = [ maintainers.iand675 ]; 25 - }; 26 - } 27 - )
···
-6
pkgs/servers/tracing/honeycomb/honeytail/versions.nix
··· 1 - generic: { 2 - v1_6_0 = generic { 3 - version = "1.6.0"; 4 - sha256 = "sha256-S0hIgNNzF1eNe+XJs+PT7EUIl5oJCXu+B/zQago4sf8="; 5 - }; 6 - }
···
-27
pkgs/servers/tracing/honeycomb/honeyvent/default.nix
··· 1 - { 2 - lib, 3 - buildGoModule, 4 - fetchFromGitHub, 5 - }: 6 - import ./versions.nix ( 7 - { version, sha256 }: 8 - buildGoModule { 9 - pname = "honeyvent"; 10 - inherit version; 11 - vendorHash = null; 12 - 13 - src = fetchFromGitHub { 14 - owner = "honeycombio"; 15 - repo = "honeyvent"; 16 - rev = "v${version}"; 17 - hash = sha256; 18 - }; 19 - 20 - meta = with lib; { 21 - description = "CLI for sending individual events to honeycomb.io"; 22 - homepage = "https://honeycomb.io/"; 23 - license = licenses.asl20; 24 - maintainers = [ maintainers.iand675 ]; 25 - }; 26 - } 27 - )
···
-6
pkgs/servers/tracing/honeycomb/honeyvent/versions.nix
··· 1 - generic: { 2 - v1_1_0 = generic { 3 - version = "1.1.0"; 4 - sha256 = "sha256-yFQEOshjaH6fRCQ7IZChANI9guZlTXk35p1NzQvxUdI="; 5 - }; 6 - }
···
+6 -2
pkgs/stdenv/generic/check-meta.nix
··· 108 109 hasUnfreeLicense = attrs: hasLicense attrs && isUnfree attrs.meta.license; 110 111 - hasNoMaintainers = attrs: attrs ? meta.maintainers && (length attrs.meta.maintainers) == 0; 112 113 isMarkedBroken = attrs: attrs.meta.broken or false; 114 ··· 368 ]; 369 sourceProvenance = listOf attrs; 370 maintainers = listOf (attrsOf any); # TODO use the maintainer type from lib/tests/maintainer-module.nix 371 priority = int; 372 pkgConfigModules = listOf str; 373 inherit platforms; ··· 534 { 535 valid = "warn"; 536 reason = "maintainerless"; 537 - errormsg = "has no maintainers"; 538 } 539 # ----- 540 else
··· 108 109 hasUnfreeLicense = attrs: hasLicense attrs && isUnfree attrs.meta.license; 110 111 + hasNoMaintainers = 112 + attrs: 113 + (attrs ? meta.maintainers && (length attrs.meta.maintainers) == 0) 114 + && (attrs ? meta.teams && (length attrs.meta.teams) == 0); 115 116 isMarkedBroken = attrs: attrs.meta.broken or false; 117 ··· 371 ]; 372 sourceProvenance = listOf attrs; 373 maintainers = listOf (attrsOf any); # TODO use the maintainer type from lib/tests/maintainer-module.nix 374 + teams = listOf (attrsOf any); # TODO similar to maintainers, use a teams type 375 priority = int; 376 pkgConfigModules = listOf str; 377 inherit platforms; ··· 538 { 539 valid = "warn"; 540 reason = "maintainerless"; 541 + errormsg = "has no maintainers or teams"; 542 } 543 # ----- 544 else
+3 -21
pkgs/top-level/all-packages.nix
··· 2025 2026 birdtray = libsForQt5.callPackage ../applications/misc/birdtray { }; 2027 2028 - charles = charles4; 2029 inherit (callPackages ../applications/networking/charles { }) 2030 charles3 2031 charles4 2032 ; 2033 2034 quaternion-qt5 = ··· 6070 stdenv = gcc10Stdenv; 6071 }; 6072 6073 - fstar = callPackage ../development/compilers/fstar { 6074 - ocamlPackages = ocaml-ng.ocamlPackages_4_14; 6075 - z3 = z3_4_8_5; 6076 - }; 6077 - 6078 dotnetPackages = recurseIntoAttrs (callPackage ./dotnet-packages.nix { }); 6079 6080 gopro-tool = callPackage ../by-name/go/gopro-tool/package.nix { ··· 8419 botan2 8420 botan3 8421 ; 8422 - 8423 - box2d = callPackage ../development/libraries/box2d { }; 8424 8425 c-ares = callPackage ../development/libraries/c-ares { }; 8426 ··· 16944 isabelle = callPackage ../by-name/is/isabelle/package.nix { 16945 polyml = polyml.overrideAttrs { 16946 pname = "polyml-for-isabelle"; 16947 - version = "2024"; 16948 configureFlags = [ 16949 "--enable-intinf-as-int" 16950 "--with-gmp" 16951 "--disable-shared" 16952 ]; 16953 buildFlags = [ "compiler" ]; 16954 - src = fetchFromGitHub { 16955 - owner = "polyml"; 16956 - repo = "polyml"; 16957 - rev = "v5.9.1"; 16958 - hash = "sha256-72wm8dt+Id59A5058mVE5P9TkXW5/LZRthZoxUustVA="; 16959 - }; 16960 }; 16961 16962 java = openjdk21; ··· 18061 zrythm = callPackage ../applications/audio/zrythm { 18062 inherit (plasma5Packages) breeze-icons; 18063 }; 18064 - 18065 - honeymarker = callPackage ../servers/tracing/honeycomb/honeymarker { }; 18066 - 18067 - honeytail = callPackage ../servers/tracing/honeycomb/honeytail { }; 18068 - 18069 - honeyvent = callPackage ../servers/tracing/honeycomb/honeyvent { }; 18070 18071 swift-corelibs-libdispatch = swiftPackages.Dispatch; 18072
··· 2025 2026 birdtray = libsForQt5.callPackage ../applications/misc/birdtray { }; 2027 2028 + charles = charles5; 2029 inherit (callPackages ../applications/networking/charles { }) 2030 charles3 2031 charles4 2032 + charles5 2033 ; 2034 2035 quaternion-qt5 = ··· 6071 stdenv = gcc10Stdenv; 6072 }; 6073 6074 dotnetPackages = recurseIntoAttrs (callPackage ./dotnet-packages.nix { }); 6075 6076 gopro-tool = callPackage ../by-name/go/gopro-tool/package.nix { ··· 8415 botan2 8416 botan3 8417 ; 8418 8419 c-ares = callPackage ../development/libraries/c-ares { }; 8420 ··· 16938 isabelle = callPackage ../by-name/is/isabelle/package.nix { 16939 polyml = polyml.overrideAttrs { 16940 pname = "polyml-for-isabelle"; 16941 + version = "2025"; 16942 configureFlags = [ 16943 "--enable-intinf-as-int" 16944 "--with-gmp" 16945 "--disable-shared" 16946 ]; 16947 buildFlags = [ "compiler" ]; 16948 }; 16949 16950 java = openjdk21; ··· 18049 zrythm = callPackage ../applications/audio/zrythm { 18050 inherit (plasma5Packages) breeze-icons; 18051 }; 18052 18053 swift-corelibs-libdispatch = swiftPackages.Dispatch; 18054
+1
pkgs/top-level/python-aliases.nix
··· 694 scikits-samplerate = throw "scikits-samplerate has been removed, it was unsed and unmaintained since 2015"; # added 2024-05-23 695 selectors2 = throw "selectors2 has been removed: archived by upstream."; # added 2024-07-27 696 selectors34 = throw "selectors34 has been removed: functionality provided by Python itself; archived by upstream."; # added 2021-06-10 697 sequoia = throw "python3Packages.sequoia was replaced by pysequoia - built from a dedicated repository, with a new API."; # added 2023-06-24 698 setuptools_dso = setuptools-dso; # added 2024-03-03 699 setuptools_scm = setuptools-scm; # added 2021-06-03
··· 694 scikits-samplerate = throw "scikits-samplerate has been removed, it was unsed and unmaintained since 2015"; # added 2024-05-23 695 selectors2 = throw "selectors2 has been removed: archived by upstream."; # added 2024-07-27 696 selectors34 = throw "selectors34 has been removed: functionality provided by Python itself; archived by upstream."; # added 2021-06-10 697 + sentry-sdk_2 = sentry-sdk; # added 2025-04-20 698 sequoia = throw "python3Packages.sequoia was replaced by pysequoia - built from a dedicated repository, with a new API."; # added 2023-06-24 699 setuptools_dso = setuptools-dso; # added 2024-03-03 700 setuptools_scm = setuptools-scm; # added 2021-06-03
+8 -5
pkgs/top-level/python-packages.nix
··· 7274 7275 kaggle = callPackage ../development/python-modules/kaggle { }; 7276 7277 kaitaistruct = callPackage ../development/python-modules/kaitaistruct { }; 7278 7279 kajiki = callPackage ../development/python-modules/kajiki { }; ··· 15538 15539 sentinels = callPackage ../development/python-modules/sentinels { }; 15540 15541 - sentry-sdk = sentry-sdk_1; 15542 - 15543 - sentry-sdk_1 = callPackage ../development/python-modules/sentry-sdk/1.nix { }; 15544 - 15545 - sentry-sdk_2 = callPackage ../development/python-modules/sentry-sdk/default.nix { }; 15546 15547 sepaxml = callPackage ../development/python-modules/sepaxml { }; 15548
··· 7274 7275 kaggle = callPackage ../development/python-modules/kaggle { }; 7276 7277 + kahip = toPythonModule ( 7278 + pkgs.kahip.override { 7279 + pythonSupport = true; 7280 + python3Packages = self; 7281 + } 7282 + ); 7283 + 7284 kaitaistruct = callPackage ../development/python-modules/kaitaistruct { }; 7285 7286 kajiki = callPackage ../development/python-modules/kajiki { }; ··· 15545 15546 sentinels = callPackage ../development/python-modules/sentinels { }; 15547 15548 + sentry-sdk = callPackage ../development/python-modules/sentry-sdk/default.nix { }; 15549 15550 sepaxml = callPackage ../development/python-modules/sepaxml { }; 15551