Merge staging-next into staging

authored by github-actions[bot] and committed by GitHub 6234dd88 6f30d456

+943 -250
+16
nixos/doc/manual/from_md/release-notes/rl-2211.section.xml
··· 133 133 </listitem> 134 134 <listitem> 135 135 <para> 136 + <link xlink:href="https://hbase.apache.org/">HBase 137 + cluster</link>, a distributed, scalable, big data store. 138 + Available as 139 + <link xlink:href="options.html#opt-services.hadoop.hbase.enable">services.hadoop.hbase</link>. 140 + </para> 141 + </listitem> 142 + <listitem> 143 + <para> 136 144 <link xlink:href="https://github.com/leetronics/infnoise">infnoise</link>, 137 145 a hardware True Random Number Generator dongle. Available as 138 146 <link xlink:href="options.html#opt-services.infnoise.enable">services.infnoise</link>. ··· 255 263 <link xlink:href="https://www.barco.com/de/support/knowledge-base/4380-can-i-use-linux-os-with-clickshare-base-units">According 256 264 to Barco</link> many of their base unit models can be used 257 265 with Google Chrome and the Google Cast extension. 266 + </para> 267 + </listitem> 268 + <listitem> 269 + <para> 270 + <literal>services.hbase</literal> has been renamed to 271 + <literal>services.hbase-standalone</literal>. For production 272 + HBase clusters, use <literal>services.hadoop.hbase</literal> 273 + instead. 258 274 </para> 259 275 </listitem> 260 276 <listitem>
+5
nixos/doc/manual/release-notes/rl-2211.section.md
··· 58 58 59 59 - [dragonflydb](https://dragonflydb.io/), a modern replacement for Redis and Memcached. Available as [services.dragonflydb](#opt-services.dragonflydb.enable). 60 60 61 + - [HBase cluster](https://hbase.apache.org/), a distributed, scalable, big data store. Available as [services.hadoop.hbase](options.html#opt-services.hadoop.hbase.enable). 62 + 61 63 - [infnoise](https://github.com/leetronics/infnoise), a hardware True Random Number Generator dongle. 62 64 Available as [services.infnoise](options.html#opt-services.infnoise.enable). 63 65 ··· 99 101 - The Barco ClickShare driver/client package `pkgs.clickshare-csc1` and the option `programs.clickshare-csc1.enable` have been removed, 100 102 as it requires `qt4`, which reached its end-of-life 2015 and will no longer be supported by nixpkgs. 101 103 [According to Barco](https://www.barco.com/de/support/knowledge-base/4380-can-i-use-linux-os-with-clickshare-base-units) many of their base unit models can be used with Google Chrome and the Google Cast extension. 104 + 105 + - `services.hbase` has been renamed to `services.hbase-standalone`. 106 + For production HBase clusters, use `services.hadoop.hbase` instead. 102 107 103 108 - PHP 7.4 is no longer supported due to upstream not supporting this 104 109 version for the entire lifecycle of the 22.11 release.
+1 -1
nixos/modules/module-list.nix
··· 357 357 ./services/databases/dgraph.nix 358 358 ./services/databases/firebird.nix 359 359 ./services/databases/foundationdb.nix 360 - ./services/databases/hbase.nix 360 + ./services/databases/hbase-standalone.nix 361 361 ./services/databases/influxdb.nix 362 362 ./services/databases/influxdb2.nix 363 363 ./services/databases/memcached.nix
+2 -1
nixos/modules/services/cluster/hadoop/conf.nix
··· 33 33 mkdir -p $out/ 34 34 cp ${siteXml "core-site.xml" (coreSite // coreSiteInternal)}/* $out/ 35 35 cp ${siteXml "hdfs-site.xml" (hdfsSiteDefault // hdfsSite // hdfsSiteInternal)}/* $out/ 36 + cp ${siteXml "hbase-site.xml" (hbaseSiteDefault // hbaseSite // hbaseSiteInternal)}/* $out/ 36 37 cp ${siteXml "mapred-site.xml" (mapredSiteDefault // mapredSite)}/* $out/ 37 38 cp ${siteXml "yarn-site.xml" (yarnSiteDefault // yarnSite // yarnSiteInternal)}/* $out/ 38 39 cp ${siteXml "httpfs-site.xml" httpfsSite}/* $out/ ··· 40 41 cp ${pkgs.writeTextDir "hadoop-user-functions.sh" userFunctions}/* $out/ 41 42 cp ${pkgs.writeTextDir "hadoop-env.sh" hadoopEnv}/* $out/ 42 43 cp ${log4jProperties} $out/log4j.properties 43 - ${lib.concatMapStringsSep "\n" (dir: "cp -r ${dir}/* $out/") extraConfDirs} 44 + ${lib.concatMapStringsSep "\n" (dir: "cp -f -r ${dir}/* $out/") extraConfDirs} 44 45 '')
+1 -1
nixos/modules/services/cluster/hadoop/default.nix
··· 5 5 in 6 6 with lib; 7 7 { 8 - imports = [ ./yarn.nix ./hdfs.nix ]; 8 + imports = [ ./yarn.nix ./hdfs.nix ./hbase.nix ]; 9 9 10 10 options.services.hadoop = { 11 11 coreSite = mkOption {
+196
nixos/modules/services/cluster/hadoop/hbase.nix
··· 1 + { config, lib, pkgs, ...}: 2 + 3 + with lib; 4 + let 5 + cfg = config.services.hadoop; 6 + hadoopConf = "${import ./conf.nix { inherit cfg pkgs lib; }}/"; 7 + mkIfNotNull = x: mkIf (x != null) x; 8 + in 9 + { 10 + options.services.hadoop = { 11 + 12 + gatewayRole.enableHbaseCli = mkEnableOption "HBase CLI tools"; 13 + 14 + hbaseSiteDefault = mkOption { 15 + default = { 16 + "hbase.regionserver.ipc.address" = "0.0.0.0"; 17 + "hbase.master.ipc.address" = "0.0.0.0"; 18 + "hbase.master.info.bindAddress" = "0.0.0.0"; 19 + "hbase.regionserver.info.bindAddress" = "0.0.0.0"; 20 + 21 + "hbase.cluster.distributed" = "true"; 22 + }; 23 + type = types.attrsOf types.anything; 24 + description = '' 25 + Default options for hbase-site.xml 26 + ''; 27 + }; 28 + hbaseSite = mkOption { 29 + default = {}; 30 + type = with types; attrsOf anything; 31 + example = literalExpression '' 32 + ''; 33 + description = '' 34 + Additional options and overrides for hbase-site.xml 35 + <link xlink:href="https://github.com/apache/hbase/blob/rel/2.4.11/hbase-common/src/main/resources/hbase-default.xml"/> 36 + ''; 37 + }; 38 + hbaseSiteInternal = mkOption { 39 + default = {}; 40 + type = with types; attrsOf anything; 41 + internal = true; 42 + description = '' 43 + Internal option to add configs to hbase-site.xml based on module options 44 + ''; 45 + }; 46 + 47 + hbase = { 48 + 49 + package = mkOption { 50 + type = types.package; 51 + default = pkgs.hbase; 52 + defaultText = literalExpression "pkgs.hbase"; 53 + description = "HBase package"; 54 + }; 55 + 56 + rootdir = mkOption { 57 + description = '' 58 + This option will set "hbase.rootdir" in hbase-site.xml and determine 59 + the directory shared by region servers and into which HBase persists. 60 + The URL should be 'fully-qualified' to include the filesystem scheme. 61 + If a core-site.xml is provided, the FS scheme defaults to the value 62 + of "fs.defaultFS". 63 + 64 + Filesystems other than HDFS (like S3, QFS, Swift) are also supported. 65 + ''; 66 + type = types.str; 67 + example = "hdfs://nameservice1/hbase"; 68 + default = "/hbase"; 69 + }; 70 + zookeeperQuorum = mkOption { 71 + description = '' 72 + This option will set "hbase.zookeeper.quorum" in hbase-site.xml. 73 + Comma separated list of servers in the ZooKeeper ensemble. 74 + ''; 75 + type = with types; nullOr commas; 76 + example = "zk1.internal,zk2.internal,zk3.internal"; 77 + default = null; 78 + }; 79 + master = { 80 + enable = mkEnableOption "HBase Master"; 81 + initHDFS = mkEnableOption "initialization of the hbase directory on HDFS"; 82 + 83 + openFirewall = mkOption { 84 + type = types.bool; 85 + default = false; 86 + description = '' 87 + Open firewall ports for HBase master. 88 + ''; 89 + }; 90 + }; 91 + regionServer = { 92 + enable = mkEnableOption "HBase RegionServer"; 93 + 94 + overrideHosts = mkOption { 95 + type = types.bool; 96 + default = true; 97 + description = '' 98 + Remove /etc/hosts entries for "127.0.0.2" and "::1" defined in nixos/modules/config/networking.nix 99 + Regionservers must be able to resolve their hostnames to their IP addresses, through PTR records 100 + or /etc/hosts entries. 101 + 102 + ''; 103 + }; 104 + 105 + openFirewall = mkOption { 106 + type = types.bool; 107 + default = false; 108 + description = '' 109 + Open firewall ports for HBase master. 110 + ''; 111 + }; 112 + }; 113 + }; 114 + }; 115 + 116 + config = mkMerge [ 117 + (mkIf cfg.hbase.master.enable { 118 + services.hadoop.gatewayRole = { 119 + enable = true; 120 + enableHbaseCli = mkDefault true; 121 + }; 122 + 123 + systemd.services.hbase-master = { 124 + description = "HBase master"; 125 + wantedBy = [ "multi-user.target" ]; 126 + 127 + preStart = mkIf cfg.hbase.master.initHDFS '' 128 + HADOOP_USER_NAME=hdfs ${cfg.package}/bin/hdfs --config ${hadoopConf} dfsadmin -safemode wait 129 + HADOOP_USER_NAME=hdfs ${cfg.package}/bin/hdfs --config ${hadoopConf} dfs -mkdir -p ${cfg.hbase.rootdir} 130 + HADOOP_USER_NAME=hdfs ${cfg.package}/bin/hdfs --config ${hadoopConf} dfs -chown hbase ${cfg.hbase.rootdir} 131 + ''; 132 + 133 + serviceConfig = { 134 + User = "hbase"; 135 + SyslogIdentifier = "hbase-master"; 136 + ExecStart = "${cfg.hbase.package}/bin/hbase --config ${hadoopConf} " + 137 + "master start"; 138 + Restart = "always"; 139 + }; 140 + }; 141 + 142 + services.hadoop.hbaseSiteInternal."hbase.rootdir" = cfg.hbase.rootdir; 143 + 144 + networking.firewall.allowedTCPPorts = (mkIf cfg.hbase.master.openFirewall [ 145 + 16000 16010 146 + ]); 147 + 148 + }) 149 + 150 + (mkIf cfg.hbase.regionServer.enable { 151 + services.hadoop.gatewayRole = { 152 + enable = true; 153 + enableHbaseCli = mkDefault true; 154 + }; 155 + 156 + systemd.services.hbase-regionserver = { 157 + description = "HBase RegionServer"; 158 + wantedBy = [ "multi-user.target" ]; 159 + serviceConfig = { 160 + User = "hbase"; 161 + SyslogIdentifier = "hbase-regionserver"; 162 + ExecStart = "${cfg.hbase.package}/bin/hbase --config /etc/hadoop-conf/ " + 163 + "regionserver start"; 164 + Restart = "always"; 165 + }; 166 + }; 167 + 168 + services.hadoop.hbaseSiteInternal."hbase.rootdir" = cfg.hbase.rootdir; 169 + 170 + networking = { 171 + firewall.allowedTCPPorts = (mkIf cfg.hbase.regionServer.openFirewall [ 172 + 16020 16030 173 + ]); 174 + hosts = mkIf cfg.hbase.regionServer.overrideHosts { 175 + "127.0.0.2" = mkForce [ ]; 176 + "::1" = mkForce [ ]; 177 + }; 178 + }; 179 + }) 180 + 181 + (mkIf cfg.gatewayRole.enable { 182 + 183 + environment.systemPackages = mkIf cfg.gatewayRole.enableHbaseCli [ cfg.hbase.package ]; 184 + 185 + services.hadoop.hbaseSiteInternal = with cfg.hbase; { 186 + "hbase.zookeeper.quorum" = mkIfNotNull zookeeperQuorum; 187 + }; 188 + 189 + users.users.hbase = { 190 + description = "Hadoop HBase user"; 191 + group = "hadoop"; 192 + isSystemUser = true; 193 + }; 194 + }) 195 + ]; 196 + }
+2 -2
nixos/modules/services/cluster/hadoop/hdfs.nix
··· 158 158 50010 # datanode.address 159 159 50020 # datanode.ipc.address 160 160 ]; 161 - extraConfig.services.hadoop.hdfsSiteInternal."dfs.datanode.data.dir" = let d = cfg.hdfs.datanode.dataDirs; in 162 - if (d!= null) then (concatMapStringsSep "," (x: "["+x.type+"]file://"+x.path) cfg.hdfs.datanode.dataDirs) else d; 161 + extraConfig.services.hadoop.hdfsSiteInternal."dfs.datanode.data.dir" = mkIf (cfg.hdfs.datanode.dataDirs!= null) 162 + (concatMapStringsSep "," (x: "["+x.type+"]file://"+x.path) cfg.hdfs.datanode.dataDirs); 163 163 }) 164 164 165 165 (hadoopServiceConfig {
+4 -4
nixos/modules/services/cluster/hadoop/yarn.nix
··· 178 178 179 179 services.hadoop.gatewayRole.enable = true; 180 180 181 - services.hadoop.yarnSiteInternal = with cfg.yarn.nodemanager; { 182 - "yarn.nodemanager.local-dirs" = localDir; 181 + services.hadoop.yarnSiteInternal = with cfg.yarn.nodemanager; mkMerge [ ({ 182 + "yarn.nodemanager.local-dirs" = mkIf (localDir!= null) (concatStringsSep "," localDir); 183 183 "yarn.scheduler.maximum-allocation-vcores" = resource.maximumAllocationVCores; 184 184 "yarn.scheduler.maximum-allocation-mb" = resource.maximumAllocationMB; 185 185 "yarn.nodemanager.resource.cpu-vcores" = resource.cpuVCores; 186 186 "yarn.nodemanager.resource.memory-mb" = resource.memoryMB; 187 - } // mkIf useCGroups { 187 + }) (mkIf useCGroups { 188 188 "yarn.nodemanager.linux-container-executor.cgroups.hierarchy" = "/hadoop-yarn"; 189 189 "yarn.nodemanager.linux-container-executor.resources-handler.class" = "org.apache.hadoop.yarn.server.nodemanager.util.CgroupsLCEResourcesHandler"; 190 190 "yarn.nodemanager.linux-container-executor.cgroups.mount" = "true"; 191 191 "yarn.nodemanager.linux-container-executor.cgroups.mount-path" = "/run/wrappers/yarn-nodemanager/cgroup"; 192 - }; 192 + })]; 193 193 194 194 networking.firewall.allowedTCPPortRanges = [ 195 195 (mkIf (cfg.yarn.nodemanager.openFirewall) {from = 1024; to = 65535;})
+12 -13
nixos/modules/services/databases/hbase.nix nixos/modules/services/databases/hbase-standalone.nix
··· 3 3 with lib; 4 4 5 5 let 6 - cfg = config.services.hbase; 7 - opt = options.services.hbase; 6 + cfg = config.services.hbase-standalone; 7 + opt = options.services.hbase-standalone; 8 8 9 9 buildProperty = configAttr: 10 10 (builtins.concatStringsSep "\n" ··· 32 32 33 33 in { 34 34 35 + imports = [ 36 + (mkRenamedOptionModule [ "services" "hbase" ] [ "services" "hbase-standalone" ]) 37 + ]; 38 + 35 39 ###### interface 36 40 37 41 options = { 42 + services.hbase-standalone = { 38 43 39 - services.hbase = { 40 - 41 - enable = mkOption { 42 - type = types.bool; 43 - default = false; 44 - description = lib.mdDoc '' 45 - Whether to run HBase. 46 - ''; 47 - }; 44 + enable = mkEnableOption '' 45 + HBase master in standalone mode with embedded regionserver and zookeper. 46 + Do not use this configuration for production nor for evaluating HBase performance. 47 + ''; 48 48 49 49 package = mkOption { 50 50 type = types.package; ··· 108 108 }; 109 109 110 110 }; 111 - 112 111 }; 113 112 114 113 ###### implementation 115 114 116 - config = mkIf config.services.hbase.enable { 115 + config = mkIf cfg.enable { 117 116 118 117 systemd.tmpfiles.rules = [ 119 118 "d '${cfg.dataDir}' - ${cfg.user} ${cfg.group} - -"
+1
nixos/tests/hadoop/default.nix
··· 4 4 all = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop.nix { inherit package; }; 5 5 hdfs = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hdfs.nix { inherit package; }; 6 6 yarn = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./yarn.nix { inherit package; }; 7 + hbase = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hbase.nix { inherit package; }; 7 8 }
+84
nixos/tests/hadoop/hbase.nix
··· 1 + # Test a minimal hbase cluster 2 + { pkgs, ... }: 3 + import ../make-test-python.nix ({ hadoop ? pkgs.hadoop, hbase ? pkgs.hbase, ... }: 4 + with pkgs.lib; 5 + { 6 + name = "hadoop-hbase"; 7 + 8 + nodes = let 9 + coreSite = { 10 + "fs.defaultFS" = "hdfs://namenode:8020"; 11 + }; 12 + defOpts = { 13 + enable = true; 14 + openFirewall = true; 15 + }; 16 + zookeeperQuorum = "zookeeper"; 17 + in { 18 + zookeeper = { ... }: { 19 + services.zookeeper.enable = true; 20 + networking.firewall.allowedTCPPorts = [ 2181 ]; 21 + }; 22 + namenode = { ... }: { 23 + services.hadoop = { 24 + hdfs = { 25 + namenode = defOpts // { formatOnInit = true; }; 26 + }; 27 + inherit coreSite; 28 + }; 29 + }; 30 + datanode = { ... }: { 31 + virtualisation.diskSize = 8192; 32 + services.hadoop = { 33 + hdfs.datanode = defOpts; 34 + inherit coreSite; 35 + }; 36 + }; 37 + 38 + master = { ... }:{ 39 + services.hadoop = { 40 + inherit coreSite; 41 + hbase = { 42 + inherit zookeeperQuorum; 43 + master = defOpts // { initHDFS = true; }; 44 + }; 45 + }; 46 + }; 47 + regionserver = { ... }:{ 48 + services.hadoop = { 49 + inherit coreSite; 50 + hbase = { 51 + inherit zookeeperQuorum; 52 + regionServer = defOpts; 53 + }; 54 + }; 55 + }; 56 + }; 57 + 58 + testScript = '' 59 + start_all() 60 + 61 + # wait for HDFS cluster 62 + namenode.wait_for_unit("hdfs-namenode") 63 + namenode.wait_for_unit("network.target") 64 + namenode.wait_for_open_port(8020) 65 + namenode.wait_for_open_port(9870) 66 + datanode.wait_for_unit("hdfs-datanode") 67 + datanode.wait_for_unit("network.target") 68 + datanode.wait_for_open_port(9864) 69 + datanode.wait_for_open_port(9866) 70 + datanode.wait_for_open_port(9867) 71 + 72 + # wait for ZK 73 + zookeeper.wait_for_unit("zookeeper") 74 + zookeeper.wait_for_open_port(2181) 75 + 76 + # wait for HBase to start up 77 + master.wait_for_unit("hbase-master") 78 + regionserver.wait_for_unit("hbase-regionserver") 79 + 80 + assert "1 active master, 0 backup masters, 1 servers" in master.succeed("echo status | HADOOP_USER_NAME=hbase hbase shell -n") 81 + regionserver.wait_until_succeeds("echo \"create 't1','f1'\" | HADOOP_USER_NAME=hbase hbase shell -n") 82 + assert "NAME => 'f1'" in regionserver.succeed("echo \"describe 't1'\" | HADOOP_USER_NAME=hbase hbase shell -n") 83 + ''; 84 + })
+1 -1
nixos/tests/hadoop/yarn.nix
··· 19 19 enable = true; 20 20 openFirewall = true; 21 21 }; 22 - yarnSite = options.services.hadoop.yarnSite.default // { 22 + yarnSite = { 23 23 "yarn.resourcemanager.hostname" = "resourcemanager"; 24 24 "yarn.nodemanager.log-dirs" = "/tmp/userlogs"; 25 25 };
+2 -2
nixos/tests/hbase.nix
··· 1 1 import ./make-test-python.nix ({ pkgs, lib, package ? pkgs.hbase, ... }: 2 2 { 3 - name = "hbase"; 3 + name = "hbase-standalone"; 4 4 5 5 meta = with lib.maintainers; { 6 6 maintainers = [ illustris ]; ··· 8 8 9 9 nodes = { 10 10 hbase = { pkgs, ... }: { 11 - services.hbase = { 11 + services.hbase-standalone = { 12 12 enable = true; 13 13 inherit package; 14 14 # Needed for standalone mode in hbase 2+
+2 -2
pkgs/applications/audio/kid3/default.nix
··· 27 27 28 28 stdenv.mkDerivation rec { 29 29 pname = "kid3"; 30 - version = "3.9.1"; 30 + version = "3.9.2"; 31 31 32 32 src = fetchurl { 33 33 url = "https://download.kde.org/stable/${pname}/${version}/${pname}-${version}.tar.xz"; 34 - sha256 = "sha256-N/HDwfJn7Py4y/GZcIDbeoMEqG+SuRGO23ITZMot8cc="; 34 + sha256 = "sha256-R4Xv+PmzKZQF1tFtSQTFjaisGug2EKM6mPVoGutNnok="; 35 35 }; 36 36 37 37 nativeBuildInputs = [
+26 -12
pkgs/applications/graphics/nsxiv/default.nix
··· 1 1 { lib 2 2 , stdenv 3 - , fetchFromGitHub 3 + , fetchFromGitea 4 + , fetchpatch 4 5 , giflib 5 6 , imlib2 6 7 , libXft 7 8 , libexif 8 9 , libwebp 10 + , libinotify-kqueue 9 11 , conf ? null 10 12 }: 11 13 12 14 stdenv.mkDerivation rec { 13 15 pname = "nsxiv"; 14 - version = "29"; 16 + version = "30"; 15 17 16 - src = fetchFromGitHub { 18 + src = fetchFromGitea { 19 + domain = "codeberg.org"; 17 20 owner = "nsxiv"; 18 - repo = pname; 21 + repo = "nsxiv"; 19 22 rev = "v${version}"; 20 - hash = "sha256-JUF2cF6QeAXk6G76uMu3reaMgxp2RcqHDbamkNufwqE="; 23 + hash = "sha256-swzTdQ6ow1At4bKRORqz6fb0Ej92yU9rlI/OgcinPu4="; 21 24 }; 22 25 26 + patches = [ 27 + # Fix build failure when _SC_PHYS_PAGES is not defined 28 + (fetchpatch { 29 + url = "https://codeberg.org/nsxiv/nsxiv/commit/1a50bff9f300f84e93a6e7035657e6029e7e8183.patch"; 30 + hash = "sha256-PpUqGVWaJ06EVu3tBKVzOh8HYvT6wAG3bvY6wUD+dTM="; 31 + }) 32 + ]; 33 + 23 34 buildInputs = [ 24 35 giflib 25 36 imlib2 26 37 libXft 27 38 libexif 28 39 libwebp 29 - ]; 40 + ] ++ lib.optional stdenv.isDarwin libinotify-kqueue; 30 41 31 42 preBuild = lib.optionalString (conf!=null) '' 32 43 cp ${(builtins.toFile "config.def.h" conf)} config.def.h 33 44 ''; 34 45 35 - makeFlags = [ 36 - "PREFIX=${placeholder "out"}" 37 - ]; 46 + NIX_LDFLAGS = lib.optionalString stdenv.isDarwin "-linotify"; 47 + 48 + makeFlags = [ "CC:=$(CC)" ]; 49 + 50 + installFlags = [ "PREFIX=$(out)" ]; 51 + 52 + installTargets = [ "install-all" ]; 38 53 39 54 meta = with lib; { 40 - homepage = "https://nsxiv.github.io/nsxiv/"; 55 + homepage = "https://nsxiv.codeberg.page/"; 41 56 description = "New Suckless X Image Viewer"; 42 57 longDescription = '' 43 58 nsxiv is a fork of now unmaintained sxiv with the purpose of being a ··· 54 69 - Display image name/path in X title 55 70 ''; 56 71 license = licenses.gpl2Plus; 57 - maintainers = with maintainers; [ AndersonTorres ]; 72 + maintainers = with maintainers; [ AndersonTorres sikmir ]; 58 73 platforms = platforms.unix; 59 - broken = stdenv.isDarwin; 60 74 }; 61 75 }
+29 -23
pkgs/applications/networking/cluster/hadoop/default.nix
··· 26 26 assert elem stdenv.system [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]; 27 27 28 28 let 29 - common = { pname, versions, untarDir ? "${pname}-${version}", hash, jdk, openssl ? null, nativeLibs ? [ ], libPatches ? "", tests }: 29 + common = { pname, platformAttrs, untarDir ? "${pname}-${version}", jdk, openssl ? null, nativeLibs ? [ ], libPatches ? "", tests }: 30 30 stdenv.mkDerivation rec { 31 31 inherit pname jdk libPatches untarDir openssl; 32 - version = versions.${stdenv.system} or (throw "Unsupported system: ${stdenv.system}"); 32 + version = platformAttrs.${stdenv.system}.version or (throw "Unsupported system: ${stdenv.system}"); 33 33 src = fetchurl { 34 34 url = "mirror://apache/hadoop/common/hadoop-${version}/hadoop-${version}" + optionalString stdenv.isAarch64 "-aarch64" + ".tar.gz"; 35 - hash = hash.${stdenv.system}; 35 + inherit (platformAttrs.${stdenv.system}) hash; 36 36 }; 37 37 doCheck = true; 38 38 ··· 51 51 makeWrapper "$n" "$out/bin/$(basename $n)"\ 52 52 --set-default JAVA_HOME ${jdk.home}\ 53 53 --set-default HADOOP_HOME $out/lib/${untarDir}\ 54 - --set-default HADOOP_CONF_DIR /etc/hadoop-conf/\ 54 + --run "test -d /etc/hadoop-conf && export HADOOP_CONF_DIR=\''${HADOOP_CONF_DIR-'/etc/hadoop-conf/'}"\ 55 + --set-default HADOOP_CONF_DIR $out/lib/${untarDir}/etc/hadoop/\ 55 56 --prefix PATH : "${makeBinPath [ bash coreutils which]}"\ 56 57 --prefix JAVA_LIBRARY_PATH : "${makeLibraryPath buildInputs}" 57 58 done ··· 62 63 63 64 passthru = { inherit tests; }; 64 65 65 - meta = { 66 + meta = recursiveUpdate { 66 67 homepage = "https://hadoop.apache.org/"; 67 68 description = "Framework for distributed processing of large data sets across clusters of computers"; 68 69 license = licenses.asl20; ··· 80 81 computers, each of which may be prone to failures. 81 82 ''; 82 83 maintainers = with maintainers; [ illustris ]; 83 - platforms = attrNames hash; 84 - }; 84 + platforms = attrNames platformAttrs; 85 + } (attrByPath [ stdenv.system "meta" ] {} platformAttrs); 85 86 }; 86 87 in 87 88 { ··· 89 90 # https://cwiki.apache.org/confluence/display/HADOOP/Hadoop+Java+Versions 90 91 hadoop_3_3 = common rec { 91 92 pname = "hadoop"; 92 - versions = rec { 93 - x86_64-linux = "3.3.3"; 94 - x86_64-darwin = x86_64-linux; 95 - aarch64-linux = "3.3.1"; 96 - aarch64-darwin = aarch64-linux; 97 - }; 98 - untarDir = "${pname}-${version}"; 99 - hash = rec { 100 - x86_64-linux = "sha256-+nHGG7qkJxKa7wn+wCizTdVCxlrZD9zOxefvk9g7h2Q="; 101 - x86_64-darwin = x86_64-linux; 102 - aarch64-linux = "sha256-v1Om2pk0wsgKBghRD2wgTSHJoKd3jkm1wPKAeDcKlgI="; 103 - aarch64-darwin = aarch64-linux; 93 + platformAttrs = rec { 94 + x86_64-linux = { 95 + version = "3.3.3"; 96 + hash = "sha256-+nHGG7qkJxKa7wn+wCizTdVCxlrZD9zOxefvk9g7h2Q="; 97 + }; 98 + x86_64-darwin = x86_64-linux; 99 + aarch64-linux = { 100 + version = "3.3.1"; 101 + hash = "sha256-v1Om2pk0wsgKBghRD2wgTSHJoKd3jkm1wPKAeDcKlgI="; 102 + meta.knownVulnerabilities = [ "CVE-2021-37404" "CVE-2021-33036" ]; 103 + }; 104 + aarch64-darwin = aarch64-linux; 104 105 }; 106 + untarDir = "${pname}-${platformAttrs.${stdenv.system}.version}"; 105 107 jdk = jdk11_headless; 106 108 inherit openssl; 107 109 # TODO: Package and add Intel Storage Acceleration Library ··· 122 124 }; 123 125 hadoop_3_2 = common rec { 124 126 pname = "hadoop"; 125 - versions.x86_64-linux = "3.2.3"; 126 - hash.x86_64-linux = "sha256-Q2/a1LcKutpJoGySB0qlCcYE2bvC/HoG/dp9nBikuNU="; 127 + platformAttrs.x86_64-linux = { 128 + version = "3.2.3"; 129 + hash = "sha256-Q2/a1LcKutpJoGySB0qlCcYE2bvC/HoG/dp9nBikuNU="; 130 + }; 127 131 jdk = jdk8_headless; 128 132 # not using native libs because of broken openssl_1_0_2 dependency 129 133 # can be manually overriden ··· 131 135 }; 132 136 hadoop2 = common rec { 133 137 pname = "hadoop"; 134 - versions.x86_64-linux = "2.10.2"; 135 - hash.x86_64-linux = "sha256-xhA4zxqIRGNhIeBnJO9dLKf/gx/Bq+uIyyZwsIafEyo="; 138 + platformAttrs.x86_64-linux = { 139 + version = "2.10.2"; 140 + hash = "sha256-xhA4zxqIRGNhIeBnJO9dLKf/gx/Bq+uIyyZwsIafEyo="; 141 + }; 136 142 jdk = jdk8_headless; 137 143 tests = nixosTests.hadoop2; 138 144 };
+4 -4
pkgs/applications/networking/cluster/helm/plugins/helm-diff.nix
··· 2 2 3 3 buildGoModule rec { 4 4 pname = "helm-diff"; 5 - version = "3.1.3"; 5 + version = "3.5.0"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "databus23"; 9 9 repo = pname; 10 10 rev = "v${version}"; 11 - sha256 = "sha256-h26EOjKNrlcrs2DAYj0NmDRgNRKozjfw5DtxUgHNTa4="; 11 + sha256 = "sha256-evFdMM2AilKQPdSCUzKo6RuC4OC4zfjj+JzFvtkSrdk="; 12 12 }; 13 13 14 - vendorSha256 = "sha256-+n/QBuZqtdgUkaBG7iqSuBfljn+AdEzDoIo5SI8ErQA="; 14 + vendorSha256 = "sha256-9i4ryBpaK7mMbsOpIaaZWBRjewD1MtTpf4zJ0yU0KMg="; 15 15 16 16 # NOTE: Remove the install and upgrade hooks. 17 17 postPatch = '' ··· 27 27 28 28 meta = with lib; { 29 29 description = "A Helm plugin that shows a diff"; 30 - inherit (src.meta) homepage; 30 + homepage = "https://github.com/databus23/helm-diff"; 31 31 license = licenses.asl20; 32 32 maintainers = with maintainers; [ yurrriq ]; 33 33 };
+1 -1
pkgs/applications/networking/cluster/helm/plugins/helm-git.nix
··· 39 39 40 40 meta = with lib; { 41 41 description = "The Helm downloader plugin that provides GIT protocol support"; 42 - inherit (src.meta) homepage; 42 + homepage = "https://github.com/aslafy-z/helm-git"; 43 43 license = licenses.mit; 44 44 maintainers = with maintainers; [ flokli ]; 45 45 };
+1 -1
pkgs/applications/networking/cluster/helm/plugins/helm-s3.nix
··· 30 30 31 31 meta = with lib; { 32 32 description = "A Helm plugin that shows a diff"; 33 - inherit (src.meta) homepage; 33 + homepage = "https://github.com/hypnoglow/helm-s3"; 34 34 license = licenses.mit; 35 35 maintainers = with maintainers; [ yurrriq ]; 36 36 };
+2 -2
pkgs/applications/networking/cluster/helm/plugins/helm-secrets.nix
··· 36 36 37 37 meta = with lib; { 38 38 description = "A Helm plugin that helps manage secrets"; 39 - inherit (src.meta) homepage; 39 + homepage = "https://github.com/jkroepke/helm-secrets"; 40 40 license = licenses.asl20; 41 41 maintainers = with maintainers; [ yurrriq ]; 42 - platforms = platforms.all; 42 + platforms = platforms.unix; 43 43 }; 44 44 }
+2 -2
pkgs/applications/networking/ftp/filezilla/default.nix
··· 18 18 19 19 stdenv.mkDerivation rec { 20 20 pname = "filezilla"; 21 - version = "3.60.1"; 21 + version = "3.60.2"; 22 22 23 23 src = fetchurl { 24 24 url = "https://download.filezilla-project.org/client/FileZilla_${version}_src.tar.bz2"; 25 - hash = "sha256-gflsY2OMrxg44MY+WHT2AZISCWXYJSlKiUoit9QgZq8="; 25 + hash = "sha256-5AfbrRaZU/+VFFK8vxONlTo6MCNfirsD0nHHEsx+V5I="; 26 26 }; 27 27 28 28 configureFlags = [
+2 -2
pkgs/applications/networking/gopher/geomyidae/default.nix
··· 3 3 4 4 stdenv.mkDerivation rec { 5 5 pname = "geomyidae"; 6 - version = "0.50.1"; 6 + version = "0.51"; 7 7 8 8 src = fetchurl { 9 9 url = "gopher://bitreich.org/9/scm/geomyidae/tag/geomyidae-v${version}.tar.gz"; 10 - sha512 = "2a71b12f51c2ef8d6e791089f9eea49eb90a36be45b874d4234eba1e673186be945711be1f92508190f5c0a6f502f132c4b7cb82caf805a39a3f31903032ac47"; 10 + sha512 = "3lGAa7BCrspGBcQqjduBkIACpf3u/CkeSCBnaJ3rrz3OIidn4o4dNwZNe7u8swaJxN2dhDSKKeVT3RnFQUaXdg=="; 11 11 }; 12 12 13 13 buildInputs = [ libressl ];
+13 -9
pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix
··· 1 1 { lib 2 2 , copyDesktopItems 3 3 , electron_18 4 + , buildGoModule 4 5 , esbuild 5 6 , fetchFromGitHub 6 7 , libdeltachat ··· 35 36 "${electron_18}/Applications/Electron.app/Contents/MacOS/Electron" 36 37 else 37 38 "${electron_18}/bin/electron"; 38 - esbuild' = esbuild.overrideAttrs (old: rec { 39 - version = "0.12.29"; 40 - src = fetchFromGitHub { 41 - owner = "evanw"; 42 - repo = "esbuild"; 43 - rev = "v${version}"; 44 - hash = "sha256-oU++9E3StUoyrMVRMZz8/1ntgPI62M1NoNz9sH/N5Bg="; 45 - }; 46 - }); 39 + esbuild' = esbuild.override { 40 + buildGoModule = args: buildGoModule (args // rec { 41 + version = "0.12.29"; 42 + src = fetchFromGitHub { 43 + owner = "evanw"; 44 + repo = "esbuild"; 45 + rev = "v${version}"; 46 + hash = "sha256-oU++9E3StUoyrMVRMZz8/1ntgPI62M1NoNz9sH/N5Bg="; 47 + }; 48 + vendorSha256 = "sha256-QPkBR+FscUc3jOvH7olcGUhM6OW4vxawmNJuRQxPuGs="; 49 + }); 50 + }; 47 51 in nodePackages.deltachat-desktop.override rec { 48 52 pname = "deltachat-desktop"; 49 53 version = "1.30.1";
+17 -1
pkgs/applications/window-managers/fvwm/2.6.nix
··· 17 17 , libxslt 18 18 , perl 19 19 , pkg-config 20 + , python3Packages 20 21 , readline 21 22 , enableGestures ? false 22 23 }: ··· 32 33 hash = "sha256-sBVOrrl2WrZ2wWN/r1kDUtR+tPwXgDoSJDaxGeFkXJI="; 33 34 }; 34 35 35 - nativeBuildInputs = [ autoreconfHook pkg-config ]; 36 + nativeBuildInputs = [ 37 + autoreconfHook 38 + pkg-config 39 + python3Packages.wrapPython 40 + ]; 36 41 37 42 buildInputs = [ 38 43 cairo ··· 48 53 librsvg 49 54 libxslt 50 55 perl 56 + python3Packages.python 51 57 readline 52 58 ] ++ lib.optional enableGestures libstroke; 53 59 60 + pythonPath = [ 61 + python3Packages.pyxdg 62 + ]; 63 + 54 64 configureFlags = [ 55 65 "--enable-mandoc" 56 66 "--disable-htmldoc" 57 67 ]; 68 + 69 + postFixup = '' 70 + wrapPythonPrograms 71 + ''; 72 + 73 + enableParallelBuilding = true; 58 74 59 75 meta = with lib; { 60 76 homepage = "http://fvwm.org";
+13 -2
pkgs/applications/window-managers/fvwm/3.nix
··· 25 25 , libxslt 26 26 , perl 27 27 , pkg-config 28 - , python3 28 + , python3Packages 29 29 , readline 30 30 , sharutils 31 31 }: ··· 45 45 autoreconfHook 46 46 asciidoctor 47 47 pkg-config 48 + python3Packages.wrapPython 48 49 ]; 49 50 50 51 buildInputs = [ ··· 69 70 libstroke 70 71 libxslt 71 72 perl 72 - python3 73 + python3Packages.python 73 74 readline 74 75 sharutils 75 76 ]; 76 77 78 + pythonPath = [ 79 + python3Packages.pyxdg 80 + ]; 81 + 77 82 configureFlags = [ 78 83 "--enable-mandoc" 79 84 ]; 85 + 86 + postFixup = '' 87 + wrapPythonPrograms 88 + ''; 89 + 90 + enableParallelBuilding = true; 80 91 81 92 meta = with lib; { 82 93 homepage = "http://fvwm.org";
+1 -1
pkgs/development/compilers/go/1.17.nix
··· 25 25 assert threadsCross != null -> stdenv.targetPlatform.isWindows; 26 26 27 27 let 28 - go_bootstrap = buildPackages.callPackage ./bootstrap.nix { }; 28 + go_bootstrap = buildPackages.callPackage ./bootstrap116.nix { }; 29 29 30 30 goBootstrap = runCommand "go-bootstrap" { } '' 31 31 mkdir $out
+1 -1
pkgs/development/compilers/go/1.18.nix
··· 25 25 assert threadsCross != null -> stdenv.targetPlatform.isWindows; 26 26 27 27 let 28 - go_bootstrap = buildPackages.callPackage ./bootstrap.nix { }; 28 + go_bootstrap = buildPackages.callPackage ./bootstrap116.nix { }; 29 29 30 30 goBootstrap = runCommand "go-bootstrap" { } '' 31 31 mkdir $out
+1 -1
pkgs/development/compilers/go/1.19.nix
··· 25 25 assert threadsCross != null -> stdenv.targetPlatform.isWindows; 26 26 27 27 let 28 - go_bootstrap = buildPackages.callPackage ./bootstrap.nix { }; 28 + go_bootstrap = buildPackages.callPackage ./bootstrap116.nix { }; 29 29 30 30 goBootstrap = runCommand "go-bootstrap" { } '' 31 31 mkdir $out
pkgs/development/compilers/go/bootstrap.nix pkgs/development/compilers/go/bootstrap116.nix
+15
pkgs/development/compilers/go/bootstrap117.nix
··· 1 + { callPackage }: 2 + callPackage ./binary.nix { 3 + version = "1.17.13"; 4 + hashes = { 5 + # Use `print-hashes.sh ${version}` to generate the list below 6 + darwin-amd64 = "c101beaa232e0f448fab692dc036cd6b4677091ff89c4889cc8754b1b29c6608"; 7 + darwin-arm64 = "e4ccc9c082d91eaa0b866078b591fc97d24b91495f12deb3dd2d8eda4e55a6ea"; 8 + linux-386 = "5e02f35aecc6b89679f631e0edf12c49922dd31c8140cf8dd725c5797a9f2425"; 9 + linux-amd64 = "4cdd2bc664724dc7db94ad51b503512c5ae7220951cac568120f64f8e94399fc"; 10 + linux-arm64 = "914daad3f011cc2014dea799bb7490442677e4ad6de0b2ac3ded6cee7e3f493d"; 11 + linux-armv6l = "260431d7deeb8893c21e71fcbbb1fde3258616d8eba584c8d72060228ab42c86"; 12 + linux-ppc64le = "bd0763fb130f8412672ffe1e4a8e65888ebe2419e5caa9a67ac21e8c298aa254"; 13 + linux-s390x = "08f6074e1e106cbe5d78622357db71a93648c7a4c4e4b02e3b5f2a1828914c76"; 14 + }; 15 + }
+9 -8
pkgs/development/compilers/go/print-hashes.sh
··· 1 - #!/usr/bin/env bash 1 + #!/usr/bin/env nix-shell 2 + #! nix-shell -i bash -p curl jq 3 + # shellcheck shell=bash 2 4 set -euo pipefail 3 5 4 6 BASEURL=https://go.dev/dl/ 5 7 VERSION=${1:-} 6 8 7 - if [[ -z $VERSION ]] 8 - then 9 - echo "No version supplied" 10 - exit -1 9 + if [[ -z ${VERSION} ]]; then 10 + echo "No version supplied" 11 + exit 1 11 12 fi 12 13 13 - curl -s "${BASEURL}?mode=json&include=all" | \ 14 - jq '.[] | select(.version == "go'${VERSION}'")' | \ 15 - jq -r '.files[] | select(.kind == "archive" and (.os == "linux" or .os == "darwin")) | (.os + "-" + .arch + " = \"" + .sha256 + "\";")' 14 + curl -s "${BASEURL}?mode=json&include=all" | 15 + jq '.[] | select(.version == "go'"${VERSION}"'")' | 16 + jq -r '.files[] | select(.kind == "archive" and (.os == "linux" or .os == "darwin")) | (.os + "-" + .arch + " = \"" + .sha256 + "\";")'
+1 -1
pkgs/development/libraries/exiv2/default.nix
··· 118 118 disallowedReferences = [ stdenv.cc.cc ]; 119 119 120 120 meta = with lib; { 121 - homepage = "https://www.exiv2.org/"; 121 + homepage = "https://exiv2.org"; 122 122 description = "A library and command-line utility to manage image metadata"; 123 123 platforms = platforms.all; 124 124 license = licenses.gpl2Plus;
+31 -12
pkgs/development/libraries/libiptcdata/default.nix
··· 1 - { lib, stdenv, fetchurl, libiconv }: 1 + { lib 2 + , stdenv 3 + , fetchFromGitHub 4 + , autoreconfHook 5 + , libiconv 6 + , libintl 7 + }: 2 8 3 9 stdenv.mkDerivation rec { 4 10 pname = "libiptcdata"; 5 - version = "1.0.4"; 11 + version = "1.0.5"; 12 + 13 + src = fetchFromGitHub { 14 + owner = "ianw"; 15 + repo = pname; 16 + rev = "release_${builtins.replaceStrings ["."] ["_"] version}"; 17 + sha256 = "sha256-ZjokepDAHiSEwXrkvM9qUAPcpIiRQoOsv7REle7roPU="; 18 + }; 19 + 20 + postPatch = '' 21 + # gtk-doc doesn't build without network access 22 + sed -i '/GTK_DOC_CHECK/d;/docs/d' configure.ac 23 + sed -i 's/docs//' Makefile.am 24 + ''; 25 + 26 + nativeBuildInputs = [ 27 + autoreconfHook 28 + ]; 6 29 7 30 buildInputs = lib.optionals stdenv.isDarwin [ 8 31 libiconv 32 + libintl 9 33 ]; 10 34 11 - src = fetchurl { 12 - url = "mirror://sourceforge/libiptcdata/${pname}-${version}.tar.gz"; 13 - sha256 = "03pfvkmmx762iydq0q207x2028d275pbdysfsgpmrr0ywy63pxkr"; 14 - }; 15 - 16 - meta = { 35 + meta = with lib; { 17 36 description = "Library for reading and writing the IPTC metadata in images and other files"; 18 - homepage = "http://libiptcdata.sourceforge.net/"; 19 - license = lib.licenses.gpl2Plus; 20 - platforms = lib.platforms.unix; 21 - maintainers = with lib.maintainers; [ wegank ]; 37 + homepage = "https://github.com/ianw/libiptcdata"; 38 + license = licenses.gpl2Plus; 39 + platforms = platforms.unix; 40 + maintainers = with maintainers; [ wegank ]; 22 41 }; 23 42 }
+2 -2
pkgs/development/libraries/libpulsar/default.nix
··· 51 51 in 52 52 stdenv.mkDerivation rec { 53 53 pname = "libpulsar"; 54 - version = "2.9.1"; 54 + version = "2.10.1"; 55 55 56 56 src = fetchurl { 57 - hash = "sha512-NKHiL7D/Lmnn6ICpQyUmmQYQETz4nZPJU9/4LMRDUQ3Pck6qDh+t6CRk+b9UQ2Vb0jvPIGTjEsSp2nC7TJk3ug=="; 57 + hash = "sha256-qMj76jnxRH68DE6JkZjQrLSNzgXGnO7HjPjlaFavaUY="; 58 58 url = "mirror://apache/pulsar/pulsar-${version}/apache-pulsar-${version}-src.tar.gz"; 59 59 }; 60 60
+2 -2
pkgs/development/libraries/tachyon/default.nix
··· 9 9 10 10 stdenv.mkDerivation rec { 11 11 pname = "tachyon"; 12 - version = "0.99.4"; 12 + version = "0.99.5"; 13 13 src = fetchurl { 14 14 url = "http://jedi.ks.uiuc.edu/~johns/tachyon/files/${version}/${pname}-${version}.tar.gz"; 15 - sha256 = "sha256-vJvDHhLDp5rpH9KhXUtQaqfjyai0e3NMKOEkbhYuaA0="; 15 + sha256 = "sha256-CSA8ECMRFJ9d9cw2dAn5bHJXQmZtGcJNtbqZTVqBpvU="; 16 16 }; 17 17 buildInputs = lib.optionals stdenv.isDarwin [ 18 18 Carbon
+1 -1
pkgs/development/libraries/tinyxml-2/default.nix
··· 15 15 16 16 meta = { 17 17 description = "A simple, small, efficient, C++ XML parser"; 18 - homepage = "http://www.grinninglizard.com/tinyxml2/index.html"; 18 + homepage = "https://www.grinninglizard.com/tinyxml2/index.html"; 19 19 platforms = lib.platforms.unix; 20 20 license = lib.licenses.zlib; 21 21 };
+2 -2
pkgs/development/nim-packages/flatty/default.nix
··· 2 2 3 3 buildNimPackage rec { 4 4 pname = "flatty"; 5 - version = "0.2.3"; 5 + version = "0.3.4"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "treeform"; 9 9 repo = pname; 10 10 rev = version; 11 - hash = "sha256-1tPLtnlGtE4SF5/ti/2svvYHpEy/0Za5N4YAOHFOyjA="; 11 + hash = "sha256-ZmhjehmEJHm5qNlsGQvyYLajUdwhWt1+AtRppRrNtgA="; 12 12 }; 13 13 14 14 doCheck = true;
+46
pkgs/development/python-modules/aioecowitt/default.nix
··· 1 + { lib 2 + , aiohttp 3 + , buildPythonPackage 4 + , fetchFromGitHub 5 + , meteocalc 6 + , pytest-aiohttp 7 + , pytestCheckHook 8 + , pythonOlder 9 + }: 10 + 11 + buildPythonPackage rec { 12 + pname = "aioecowitt"; 13 + version = "2022.7.0"; 14 + format = "setuptools"; 15 + 16 + disabled = pythonOlder "3.9"; 17 + 18 + src = fetchFromGitHub { 19 + owner = "home-assistant-libs"; 20 + repo = pname; 21 + rev = "refs/tags/${version}"; 22 + hash = "sha256-GALBhapE31CM2mqBrgcdQf5SJV+edN3kj35r0cf7BcU="; 23 + }; 24 + 25 + propagatedBuildInputs = [ 26 + aiohttp 27 + meteocalc 28 + ]; 29 + 30 + checkInputs = [ 31 + pytest-aiohttp 32 + pytestCheckHook 33 + ]; 34 + 35 + pythonImportsCheck = [ 36 + "aioecowitt" 37 + ]; 38 + 39 + meta = with lib; { 40 + description = "Wrapper for the EcoWitt protocol"; 41 + homepage = "https://github.com/home-assistant-libs/aioecowitt"; 42 + changelog = "https://github.com/home-assistant-libs/aioecowitt/releases/tag/${version}"; 43 + license = with licenses; [ asl20 ]; 44 + maintainers = with maintainers; [ fab ]; 45 + }; 46 + }
+36 -19
pkgs/development/python-modules/clustershell/default.nix
··· 1 - { stdenv, lib, buildPythonPackage, fetchPypi, pyyaml, openssh 2 - , nose, bc, hostname, coreutils, bash, gnused 1 + { stdenv 2 + , lib 3 + , buildPythonPackage 4 + , fetchPypi 5 + , pyyaml 6 + , openssh 7 + , nose 8 + , bc 9 + , hostname 10 + , bash 3 11 }: 4 12 5 13 buildPythonPackage rec { ··· 10 18 inherit pname version; 11 19 sha256 = "ff6fba688a06e5e577315d899f0dab3f4fe479cef99d444a4e651af577b7d081"; 12 20 }; 13 - 14 - propagatedBuildInputs = [ pyyaml ]; 15 21 16 22 postPatch = '' 17 23 substituteInPlace lib/ClusterShell/Worker/Ssh.py \ ··· 20 26 21 27 substituteInPlace lib/ClusterShell/Worker/fastsubprocess.py \ 22 28 --replace '"/bin/sh"' '"${bash}/bin/sh"' 29 + 30 + for f in tests/*; do 31 + substituteInPlace $f \ 32 + --replace '/bin/hostname' '${hostname}/bin/hostname' \ 33 + --replace '/bin/sleep' 'sleep' \ 34 + --replace '/bin/echo' 'echo' \ 35 + --replace '/bin/uname' 'uname' \ 36 + --replace '/bin/false' 'false' \ 37 + --replace '/bin/true' 'true' \ 38 + --replace '/usr/bin/printf' 'printf' 39 + done 40 + 41 + # Fix warnings 42 + substituteInPlace lib/ClusterShell/Task.py \ 43 + --replace "notifyAll" "notify_all" 44 + substituteInPlace tests/TaskPortTest.py lib/ClusterShell/Task.py \ 45 + --replace "currentThread" "current_thread" 23 46 ''; 24 47 25 - checkInputs = [ nose bc hostname coreutils gnused ]; 48 + propagatedBuildInputs = [ pyyaml ]; 49 + 50 + checkInputs = [ 51 + bc 52 + hostname 53 + nose 54 + ]; 55 + 56 + pythonImportsCheck = [ "ClusterShell" ]; 26 57 27 58 # Many tests want to open network connections 28 59 # https://github.com/cea-hpc/clustershell#test-suite 29 60 # 30 61 # Several tests fail on Darwin 31 62 checkPhase = '' 32 - for f in tests/*; do 33 - substituteInPlace $f \ 34 - --replace '/bin/hostname' '${hostname}/bin/hostname' \ 35 - --replace '/bin/sleep' '${coreutils}/bin/sleep' \ 36 - --replace '"sleep' '"${coreutils}/bin/sleep' \ 37 - --replace '/bin/echo' '${coreutils}/bin/echo' \ 38 - --replace '/bin/uname' '${coreutils}/bin/uname' \ 39 - --replace '/bin/false' '${coreutils}/bin/false' \ 40 - --replace '/bin/true' '${coreutils}/bin/true' \ 41 - --replace '/usr/bin/printf' '${coreutils}/bin/printf' \ 42 - --replace '"sed' '"${gnused}/bin/sed' \ 43 - --replace ' sed ' ' ${gnused}/bin/sed ' 44 - done 45 - 46 63 rm tests/CLIClushTest.py 47 64 rm tests/TreeWorkerTest.py 48 65 rm tests/TaskDistantMixin.py
+3 -3
pkgs/development/python-modules/dvc-render/default.nix
··· 13 13 14 14 buildPythonPackage rec { 15 15 pname = "dvc-render"; 16 - version = "0.0.8"; 16 + version = "0.0.9"; 17 17 format = "pyproject"; 18 18 19 19 disabled = pythonOlder "3.7"; ··· 22 22 owner = "iterative"; 23 23 repo = pname; 24 24 rev = "refs/tags/${version}"; 25 - hash = "sha256-pn1dmCyDxbMgXwUj9o//X3FZ/x0jz5ZKdTcEuKkeJ1s="; 25 + hash = "sha256-ZUIyNg+PTj5CWC65RqB1whnB+pUp1yNJQj43iSBcyvU="; 26 26 }; 27 27 28 28 SETUPTOOLS_SCM_PRETEND_VERSION = version; ··· 58 58 description = "Library for rendering DVC plots"; 59 59 homepage = "https://github.com/iterative/dvc-render"; 60 60 license = licenses.asl20; 61 - maintainers = with maintainers; [ fab ]; 61 + maintainers = with maintainers; [ fab anthonyroussel ]; 62 62 }; 63 63 }
+36
pkgs/development/python-modules/meteocalc/default.nix
··· 1 + { lib 2 + , buildPythonPackage 3 + , fetchFromGitHub 4 + , pytestCheckHook 5 + , pythonOlder 6 + }: 7 + 8 + buildPythonPackage rec { 9 + pname = "meteocalc"; 10 + version = "1.1.0"; 11 + format = "setuptools"; 12 + 13 + disabled = pythonOlder "3.7"; 14 + 15 + src = fetchFromGitHub { 16 + owner = "malexer"; 17 + repo = pname; 18 + rev = version; 19 + hash = "sha256-WuIW6hROQkjMfbCLUouECIrp4s6oCd2/N79hsrTbVTk="; 20 + }; 21 + 22 + checkInputs = [ 23 + pytestCheckHook 24 + ]; 25 + 26 + pythonImportsCheck = [ 27 + "meteocalc" 28 + ]; 29 + 30 + meta = with lib; { 31 + description = "Module for calculation of meteorological variables"; 32 + homepage = "https://github.com/malexer/meteocalc"; 33 + license = licenses.mit; 34 + maintainers = with maintainers; [ fab ]; 35 + }; 36 + }
+53
pkgs/development/python-modules/pysigma-backend-elasticsearch/default.nix
··· 1 + { lib 2 + , buildPythonPackage 3 + , fetchFromGitHub 4 + , poetry-core 5 + , pysigma 6 + , pytestCheckHook 7 + , pythonOlder 8 + , requests 9 + }: 10 + 11 + buildPythonPackage rec { 12 + pname = "pysigma-backend-elasticsearch"; 13 + version = "0.1.0"; 14 + format = "pyproject"; 15 + 16 + disabled = pythonOlder "3.8"; 17 + 18 + src = fetchFromGitHub { 19 + owner = "SigmaHQ"; 20 + repo = "pySigma-backend-elasticsearch"; 21 + rev = "v${version}"; 22 + hash = "sha256-BEvYz0jTJifsNBrA4r16JkiFaERDj/zWKd9MbhcuCS8="; 23 + }; 24 + 25 + nativeBuildInputs = [ 26 + poetry-core 27 + ]; 28 + 29 + propagatedBuildInputs = [ 30 + pysigma 31 + ]; 32 + 33 + checkInputs = [ 34 + pytestCheckHook 35 + requests 36 + ]; 37 + 38 + pythonImportsCheck = [ 39 + "sigma.backends.elasticsearch" 40 + ]; 41 + 42 + disabledTests = [ 43 + # Tests requires network access 44 + "test_connect_lucene" 45 + ]; 46 + 47 + meta = with lib; { 48 + description = "Library to support Elasticsearch for pySigma"; 49 + homepage = "https://github.com/SigmaHQ/pySigma-backend-elasticsearch"; 50 + license = with licenses; [ lgpl21Only ]; 51 + maintainers = with maintainers; [ fab ]; 52 + }; 53 + }
+2 -2
pkgs/development/python-modules/pysigma-backend-insightidr/default.nix
··· 10 10 11 11 buildPythonPackage rec { 12 12 pname = "pysigma-backend-insightidr"; 13 - version = "0.1.6"; 13 + version = "0.1.7"; 14 14 format = "pyproject"; 15 15 16 16 disabled = pythonOlder "3.8"; ··· 19 19 owner = "SigmaHQ"; 20 20 repo = "pySigma-backend-insightidr"; 21 21 rev = "refs/tags/v${version}"; 22 - hash = "sha256-Sg+AYoEbCmcqxw5dl8wmQcI+lFrAfFgDnQjiQh6r9Yc="; 22 + hash = "sha256-5uWSXUKLSJbkJHvMmFGrS/yukBO/ax8seg+0ZqAHdaE="; 23 23 }; 24 24 25 25 nativeBuildInputs = [
+55
pkgs/development/python-modules/pysigma-backend-opensearch/default.nix
··· 1 + { lib 2 + , buildPythonPackage 3 + , fetchFromGitHub 4 + , poetry-core 5 + , pysigma 6 + , pysigma-backend-elasticsearch 7 + , pytestCheckHook 8 + , pythonOlder 9 + , requests 10 + }: 11 + 12 + buildPythonPackage rec { 13 + pname = "pysigma-backend-opensearch"; 14 + version = "0.1.2"; 15 + format = "pyproject"; 16 + 17 + disabled = pythonOlder "3.8"; 18 + 19 + src = fetchFromGitHub { 20 + owner = "SigmaHQ"; 21 + repo = "pySigma-backend-opensearch"; 22 + rev = "v${version}"; 23 + hash = "sha256-5+/LOi7GHu8h9WhjpZ7bBc4aM41NiXrSrdGhbXdYMvw="; 24 + }; 25 + 26 + nativeBuildInputs = [ 27 + poetry-core 28 + ]; 29 + 30 + propagatedBuildInputs = [ 31 + pysigma 32 + pysigma-backend-elasticsearch 33 + ]; 34 + 35 + checkInputs = [ 36 + pytestCheckHook 37 + requests 38 + ]; 39 + 40 + pythonImportsCheck = [ 41 + "sigma.backends.opensearch" 42 + ]; 43 + 44 + disabledTests = [ 45 + # Tests requires network access 46 + "test_connect_lucene" 47 + ]; 48 + 49 + meta = with lib; { 50 + description = "Library to support OpenSearch for pySigma"; 51 + homepage = "https://github.com/SigmaHQ/pySigma-backend-opensearch"; 52 + license = with licenses; [ lgpl21Only ]; 53 + maintainers = with maintainers; [ fab ]; 54 + }; 55 + }
+49
pkgs/development/python-modules/pysigma-backend-qradar/default.nix
··· 1 + { lib 2 + , buildPythonPackage 3 + , fetchFromGitHub 4 + , poetry-core 5 + , pysigma 6 + , pysigma-pipeline-sysmon 7 + , pytestCheckHook 8 + , pythonOlder 9 + , requests 10 + }: 11 + 12 + buildPythonPackage rec { 13 + pname = "pysigma-backend-qradar"; 14 + version = "0.1.9"; 15 + format = "pyproject"; 16 + 17 + disabled = pythonOlder "3.8"; 18 + 19 + src = fetchFromGitHub { 20 + owner = "nNipsx-Sec"; 21 + repo = "pySigma-backend-qradar"; 22 + rev = "v${version}"; 23 + hash = "sha256-b3e8cVrVFZgihhEk6QlUnRZigglczHUa/XeMvMzNYLk="; 24 + }; 25 + 26 + nativeBuildInputs = [ 27 + poetry-core 28 + ]; 29 + 30 + propagatedBuildInputs = [ 31 + pysigma 32 + ]; 33 + 34 + checkInputs = [ 35 + pysigma-pipeline-sysmon 36 + pytestCheckHook 37 + ]; 38 + 39 + pythonImportsCheck = [ 40 + "sigma.backends.qradar" 41 + ]; 42 + 43 + meta = with lib; { 44 + description = "Library to support Qradar for pySigma"; 45 + homepage = "https://github.com/nNipsx-Sec/pySigma-backend-qradar"; 46 + license = with licenses; [ lgpl21Only ]; 47 + maintainers = with maintainers; [ fab ]; 48 + }; 49 + }
+2 -2
pkgs/development/python-modules/pysigma-backend-splunk/default.nix
··· 10 10 11 11 buildPythonPackage rec { 12 12 pname = "pysigma-backend-splunk"; 13 - version = "0.3.5"; 13 + version = "0.3.6"; 14 14 format = "pyproject"; 15 15 16 16 disabled = pythonOlder "3.8"; ··· 19 19 owner = "SigmaHQ"; 20 20 repo = "pySigma-backend-splunk"; 21 21 rev = "refs/tags/v${version}"; 22 - hash = "sha256-W6wnxQPrtMDG5jctB7CMXr4kPIhZievFTBJCeSeDCWw="; 22 + hash = "sha256-6XvKytODJll9BPeAhk6girwLibNJk+QEn2AV/WNqnyI="; 23 23 }; 24 24 25 25 nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/pysigma-pipeline-crowdstrike/default.nix
··· 9 9 10 10 buildPythonPackage rec { 11 11 pname = "pysigma-pipeline-crowdstrike"; 12 - version = "0.1.6"; 12 + version = "0.1.7"; 13 13 format = "pyproject"; 14 14 15 15 disabled = pythonOlder "3.8"; ··· 18 18 owner = "SigmaHQ"; 19 19 repo = "pySigma-pipeline-crowdstrike"; 20 20 rev = "v${version}"; 21 - hash = "sha256-5xX7NwM+Us0ToJa2miAw9KsCt2T+TqBnqHtwphZNxJI="; 21 + hash = "sha256-cALpOAn+zf1w7KYExBVFebRwNYMjiQPE7fdB1x7gHZw="; 22 22 }; 23 23 24 24 nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/pysigma-pipeline-sysmon/default.nix
··· 9 9 10 10 buildPythonPackage rec { 11 11 pname = "pysigma-pipeline-sysmon"; 12 - version = "0.1.6"; 12 + version = "1.0.0"; 13 13 format = "pyproject"; 14 14 15 15 disabled = pythonOlder "3.8"; ··· 18 18 owner = "SigmaHQ"; 19 19 repo = "pySigma-pipeline-sysmon"; 20 20 rev = "v${version}"; 21 - hash = "sha256-hKXnM3iqt6PnV+cMV3gEleBChd263sy2DovpIKg22fs="; 21 + hash = "sha256-OwWUt1O8436kmuaqv8Ec6485NLkVztLjGIWF2SPRtKA="; 22 22 }; 23 23 24 24 nativeBuildInputs = [
+2 -7
pkgs/development/python-modules/pysigma-pipeline-windows/default.nix
··· 9 9 10 10 buildPythonPackage rec { 11 11 pname = "pysigma-pipeline-windows"; 12 - version = "0.1.1"; 12 + version = "1.0.0"; 13 13 format = "pyproject"; 14 14 15 15 disabled = pythonOlder "3.8"; ··· 18 18 owner = "SigmaHQ"; 19 19 repo = "pySigma-pipeline-windows"; 20 20 rev = "refs/tags/v${version}"; 21 - hash = "sha256-ATDWhHY9tjuQbfIFgoGhz8qsluH9hTSI9zdPmP8GPWE="; 21 + hash = "sha256-/DUhgvcGhk28HRDCi96K1NH6MHNB1WogfuMVCgNbot4="; 22 22 }; 23 23 24 24 nativeBuildInputs = [ ··· 28 28 propagatedBuildInputs = [ 29 29 pysigma 30 30 ]; 31 - 32 - postPatch = '' 33 - substituteInPlace pyproject.toml \ 34 - --replace 'pysigma = "^0.5.0"' 'pysigma = "^0.6.0"' 35 - ''; 36 31 37 32 checkInputs = [ 38 33 pytestCheckHook
+2 -2
pkgs/development/python-modules/pysigma/default.nix
··· 11 11 12 12 buildPythonPackage rec { 13 13 pname = "pysigma"; 14 - version = "0.6.8"; 14 + version = "0.7.3"; 15 15 format = "pyproject"; 16 16 17 17 disabled = pythonOlder "3.8"; ··· 20 20 owner = "SigmaHQ"; 21 21 repo = "pySigma"; 22 22 rev = "refs/tags/v${version}"; 23 - hash = "sha256-Jq37/9w1GHr+GZ8Fvftmfz2XkL3zUpTjbDlokrUoLtw="; 23 + hash = "sha256-yXzYNBD39dPd2f5Gvmo5p0cVlTy38q1jnBL+HjAxmB8="; 24 24 }; 25 25 26 26 nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/regenmaschine/default.nix
··· 14 14 15 15 buildPythonPackage rec { 16 16 pname = "regenmaschine"; 17 - version = "2022.07.3"; 17 + version = "2022.08.0"; 18 18 format = "pyproject"; 19 19 20 20 disabled = pythonOlder "3.8"; ··· 23 23 owner = "bachya"; 24 24 repo = pname; 25 25 rev = "refs/tags/${version}"; 26 - sha256 = "sha256-z7FrVnGQjpTjdIX/gatP/ZjzOLaj2D8XsQ+UTYBOHgE="; 26 + sha256 = "sha256-JPJ+8h3r1C2fHxVPsQgk0ZuG7VqKfBb4qthAG+GCvcE="; 27 27 }; 28 28 29 29 nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/volvooncall/default.nix
··· 16 16 17 17 buildPythonPackage rec { 18 18 pname = "volvooncall"; 19 - version = "0.10.0"; 19 + version = "0.10.1"; 20 20 21 21 disabled = pythonOlder "3.8"; 22 22 ··· 26 26 owner = "molobrakos"; 27 27 repo = "volvooncall"; 28 28 rev = "v${version}"; 29 - hash = "sha256-HLSanXJs1yPSgYo4oX0zJtrV5sKkxV2yLPhc2dVRHY8="; 29 + hash = "sha256-udYvgKj7Rlc/hA86bbeBfnoVRjKkXT4TwpceWz226cU="; 30 30 }; 31 31 32 32 propagatedBuildInputs = [
+51
pkgs/development/python-modules/zadnegoale/default.nix
··· 1 + { lib 2 + , aiohttp 3 + , aioresponses 4 + , buildPythonPackage 5 + , fetchFromGitHub 6 + , dacite 7 + , orjson 8 + , pytest-asyncio 9 + , pytest-error-for-skips 10 + , pytestCheckHook 11 + , pythonOlder 12 + }: 13 + 14 + buildPythonPackage rec { 15 + pname = "zadnegoale"; 16 + version = "0.6.5"; 17 + format = "setuptools"; 18 + 19 + disabled = pythonOlder "3.8"; 20 + 21 + src = fetchFromGitHub { 22 + owner = "bieniu"; 23 + repo = pname; 24 + rev = "refs/tags/${version}"; 25 + hash = "sha256-ubBN4jvueNgReNbS+RXNDNHID0MF/rvQnb0+F4/DZaU="; 26 + }; 27 + 28 + propagatedBuildInputs = [ 29 + aiohttp 30 + dacite 31 + orjson 32 + ]; 33 + 34 + checkInputs = [ 35 + aioresponses 36 + pytest-asyncio 37 + pytest-error-for-skips 38 + pytestCheckHook 39 + ]; 40 + 41 + pythonImportsCheck = [ 42 + "zadnegoale" 43 + ]; 44 + 45 + meta = with lib; { 46 + description = "Python wrapper for getting allergen concentration data from Żadnego Ale servers"; 47 + homepage = "https://github.com/bieniu/zadnegoale"; 48 + license = licenses.asl20; 49 + maintainers = with maintainers; [ fab ]; 50 + }; 51 + }
+1 -1
pkgs/development/tools/build-managers/bmake/default.nix
··· 105 105 license = licenses.bsd3; 106 106 maintainers = with maintainers; [ thoughtpolice AndersonTorres ]; 107 107 platforms = platforms.unix; 108 - broken = stdenv.isAarch64; # ofborg complains 108 + broken = with stdenv; isAarch64 && !isDarwin; # ofborg complains 109 109 }; 110 110 111 111 passthru.tests.bmakeMusl = pkgsMusl.bmake;
+4 -4
pkgs/development/tools/golangci-lint/default.nix
··· 1 - { stdenv, buildGoModule, fetchFromGitHub, lib, installShellFiles }: 1 + { buildGoModule, fetchFromGitHub, lib, installShellFiles }: 2 2 3 3 buildGoModule rec { 4 4 pname = "golangci-lint"; 5 - version = "1.47.3"; 5 + version = "1.48.0"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "golangci"; 9 9 repo = "golangci-lint"; 10 10 rev = "v${version}"; 11 - sha256 = "sha256-uY8D3VqcaLEi/QChH/kfY9SF3H2wmiScm3m6OGeWTu8="; 11 + sha256 = "sha256-6nXn1+LsjiXjCeHhvVjyU1F6IJ8YP1Oj+5tDRhiMuUc="; 12 12 }; 13 13 14 - vendorSha256 = "sha256-F7arWygCbh9Z6zemPt+0T6wWMcP2Wg5A1qC6A7mYngI="; 14 + vendorSha256 = "sha256-4ZqO4NEZfIhl/hWcB0HeRbp2jQ/WhMBpTLmP2W7X7xM="; 15 15 16 16 doCheck = false; 17 17
+25
pkgs/development/tools/kubectx/bump-golang-x-sys.patch
··· 1 + diff --git a/go.mod b/go.mod 2 + index c523783..1ef8d00 100644 3 + --- a/go.mod 4 + +++ b/go.mod 5 + @@ -9,6 +9,7 @@ require ( 6 + github.com/imdario/mergo v0.3.9 // indirect 7 + github.com/mattn/go-isatty v0.0.12 8 + github.com/pkg/errors v0.9.1 9 + + golang.org/x/sys v0.0.0-20220731174439-a90be440212d // indirect 10 + gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c 11 + k8s.io/apimachinery v0.21.0-alpha.1 12 + k8s.io/client-go v0.21.0-alpha.1 13 + diff --git a/go.sum b/go.sum 14 + index 8f16b5a..7426c68 100644 15 + --- a/go.sum 16 + +++ b/go.sum 17 + @@ -293,6 +293,8 @@ golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7w 18 + golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 19 + golang.org/x/sys v0.0.0-20201112073958-5cba982894dd h1:5CtCZbICpIOFdgO940moixOPjc0178IU44m4EjOO5IY= 20 + golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 21 + +golang.org/x/sys v0.0.0-20220731174439-a90be440212d h1:Sv5ogFZatcgIMMtBSTTAgMYsicp25MXBubjXNDKwm80= 22 + +golang.org/x/sys v0.0.0-20220731174439-a90be440212d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 23 + golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 24 + golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 25 + golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+5 -2
pkgs/development/tools/kubectx/default.nix
··· 11 11 sha256 = "sha256-WY0zFt76mvdzk/s2Rzqys8n+DVw6qg7V6Y8JncOUVCM="; 12 12 }; 13 13 14 - vendorSha256 = "sha256-4sQaqC0BOsDfWH3cHy2EMQNMq6qiAcbV+RwxCdcSxsg="; 14 + patches = [ 15 + ./bump-golang-x-sys.patch 16 + ]; 17 + 18 + vendorSha256 = "sha256-p4KUBmJw6hWG1J2qwg4QBbh6Vo1cr/HQz0IqytIDJjU="; 15 19 16 20 nativeBuildInputs = [ installShellFiles ]; 17 21 ··· 24 28 license = licenses.asl20; 25 29 homepage = "https://github.com/ahmetb/kubectx"; 26 30 maintainers = with maintainers; [ jlesquembre ]; 27 - platforms = with platforms; unix; 28 31 }; 29 32 }
+4 -4
pkgs/development/tools/revive/default.nix
··· 2 2 3 3 buildGoModule rec { 4 4 pname = "revive"; 5 - version = "1.2.1"; 5 + version = "1.2.2"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "mgechev"; 9 9 repo = pname; 10 10 rev = "v${version}"; 11 - sha256 = "sha256-xZakVuw+QKzFh6wsnZbltLEEwyb9WcMvVWEzKnS9aWc="; 11 + sha256 = "sha256-kHnRzjhrUPXgDTBu95Ytrhx5lO5ogm9PMzeANfLcWHQ="; 12 12 # populate values that require us to use git. By doing this in postFetch we 13 13 # can delete .git afterwards and maintain better reproducibility of the src. 14 14 leaveDotGit = true; ··· 18 18 rm -rf $out/.git 19 19 ''; 20 20 }; 21 - vendorSha256 = "sha256-Fpl5i+qMvJ/CDh8X0gps9C/BxF7/Uvln+3DpVOXE0WQ="; 21 + vendorSha256 = "sha256-sa4OkTSRyoPFXTGmjpiqBug+EKgxkcJrNxQwbTRfN2A="; 22 22 23 23 ldflags = [ 24 24 "-s" ··· 35 35 36 36 # The following tests fail when built by nix: 37 37 # 38 - # $ nix log /nix/store/build-revive.1.2.1.drv | grep FAIL 38 + # $ nix log /nix/store/build-revive.1.2.2.drv | grep FAIL 39 39 # 40 40 # --- FAIL: TestAll (0.01s) 41 41 # --- FAIL: TestTimeEqual (0.00s)
+3 -1
pkgs/servers/hbase/default.nix
··· 21 21 installPhase = '' 22 22 mkdir -p $out 23 23 cp -R * $out 24 - wrapProgram $out/bin/hbase --set-default JAVA_HOME ${jdk.home} 24 + wrapProgram $out/bin/hbase --set-default JAVA_HOME ${jdk.home} \ 25 + --run "test -d /etc/hadoop-conf && export HBASE_CONF_DIR=\''${HBASE_CONF_DIR-'/etc/hadoop-conf/'}" \ 26 + --set-default HBASE_CONF_DIR "$out/conf/" 25 27 ''; 26 28 27 29 passthru = { inherit tests; };
+10 -20
pkgs/tools/graphics/mangohud/default.nix
··· 1 1 { lib 2 2 , stdenv 3 3 , fetchFromGitHub 4 - , fetchpatch 5 4 , fetchurl 6 5 , substituteAll 7 6 , coreutils 8 7 , curl 9 - , gawk 10 8 , glxinfo 11 9 , gnugrep 12 10 , gnused 13 - , lsof 14 11 , xdg-utils 15 12 , dbus 16 13 , hwdata 17 14 , libX11 18 15 , mangohud32 19 16 , vulkan-headers 17 + , appstream 20 18 , glslang 21 19 , makeWrapper 20 + , Mako 22 21 , meson 23 22 , ninja 24 23 , pkg-config 25 - , python3Packages 26 24 , unzip 27 25 , vulkan-loader 28 26 , libXNVCtrl ··· 43 41 src = fetchFromGitHub { 44 42 owner = "ocornut"; 45 43 repo = "imgui"; 46 - rev = "v${version}"; 47 - hash = "sha256-rRkayXk3xz758v6vlMSaUu5fui6NR8Md3njhDB0gJ18="; 44 + rev = "refs/tags/v${version}"; 45 + sha256 = "sha256-rRkayXk3xz758v6vlMSaUu5fui6NR8Md3njhDB0gJ18="; 48 46 }; 49 47 patch = fetchurl { 50 48 url = "https://wrapdb.mesonbuild.com/v2/imgui_${version}-1/get_patch"; 51 - hash = "sha256-bQC0QmkLalxdj4mDEdqvvOFtNwz2T1MpTDuMXGYeQ18="; 49 + sha256 = "sha256-bQC0QmkLalxdj4mDEdqvvOFtNwz2T1MpTDuMXGYeQ18="; 52 50 }; 53 51 }; 54 52 in stdenv.mkDerivation rec { 55 53 pname = "mangohud"; 56 - version = "0.6.7-1"; 54 + version = "0.6.8"; 57 55 58 56 src = fetchFromGitHub { 59 57 owner = "flightlessmango"; 60 58 repo = "MangoHud"; 61 - rev = "v${version}"; 59 + rev = "refs/tags/v${version}"; 62 60 fetchSubmodules = true; 63 - sha256 = "sha256-60cZYo+d679KRggLBGbpLYM5Iu1XySEEGp+MxZs6wF0="; 61 + sha256 = "sha256-jfmgN90kViHa7vMOjo2x4bNY2QbLk93uYEvaA4DxYvg="; 64 62 }; 65 63 66 64 outputs = [ "out" "doc" "man" ]; ··· 81 79 path = lib.makeBinPath [ 82 80 coreutils 83 81 curl 84 - gawk 85 82 glxinfo 86 83 gnugrep 87 84 gnused 88 - lsof 89 85 xdg-utils 90 86 ]; 91 87 92 88 libdbus = dbus.lib; 93 89 inherit hwdata libX11; 94 90 }) 95 - 96 - (fetchpatch { 97 - name = "allow-system-nlohmann-json.patch"; 98 - url = "https://github.com/flightlessmango/MangoHud/commit/e1ffa0f85820abea44639438fca2152290c87ee8.patch"; 99 - sha256 = "sha256-CaJb0RpXmNGCBidMXM39VJVLIXb6NbN5HXWkH/5Sfvo="; 100 - }) 101 91 ] ++ lib.optional (stdenv.hostPlatform.system == "x86_64-linux") [ 102 92 # Support 32bit OpenGL applications by appending the mangohud32 103 93 # lib path to LD_LIBRARY_PATH. ··· 124 114 ]; 125 115 126 116 nativeBuildInputs = [ 117 + appstream 127 118 glslang 128 119 makeWrapper 120 + Mako 129 121 meson 130 122 ninja 131 123 pkg-config 132 - python3Packages.Mako 133 - python3Packages.python 134 124 unzip 135 125 vulkan-loader 136 126 ];
+2 -20
pkgs/tools/graphics/mangohud/hardcode-dependencies.patch
··· 1 - From 56a191f6db6d530c2bc89d9d3395b4c9768d108f Mon Sep 17 00:00:00 2001 2 - From: Atemu <atemu.main@gmail.com> 3 - Date: Tue, 17 May 2022 16:58:08 +0200 4 - Subject: [PATCH 1/2] hardcode dependencies 5 - 6 - --- 7 - src/dbus.cpp | 2 +- 8 - src/loaders/loader_x11.cpp | 2 +- 9 - src/logging.cpp | 7 +++++++ 10 - src/pci_ids.cpp | 6 ++---- 11 - 4 files changed, 11 insertions(+), 6 deletions(-) 12 - 13 1 diff --git a/src/dbus.cpp b/src/dbus.cpp 14 2 index 3b3cccb..1405725 100644 15 3 --- a/src/dbus.cpp ··· 34 22 -std::shared_ptr<libx11_loader> g_x11(new libx11_loader("libX11.so.6")); 35 23 +std::shared_ptr<libx11_loader> g_x11(new libx11_loader("@libX11@/lib/libX11.so.6")); 36 24 diff --git a/src/logging.cpp b/src/logging.cpp 37 - index b27f21e..48f5e03 100644 25 + index 1668226..f0c8df5 100644 38 26 --- a/src/logging.cpp 39 27 +++ b/src/logging.cpp 40 - @@ -22,7 +22,14 @@ string exec(string command) { 28 + @@ -24,7 +24,11 @@ string exec(string command) { 41 29 #endif 42 30 std::array<char, 128> buffer; 43 31 std::string result; 44 32 + 45 33 + char* originalPath = getenv("PATH"); 46 34 + setenv("PATH", "@path@", 1); 47 - + 48 35 std::unique_ptr<FILE, decltype(&pclose)> pipe(popen(command.c_str(), "r"), pclose); 49 - + 50 36 + setenv("PATH", originalPath, 1); 51 - + 52 37 if (!pipe) { 53 38 return "popen failed!"; 54 39 } ··· 70 55 } 71 56 72 57 std::string line; 73 - -- 74 - 2.36.0 75 -
+4 -16
pkgs/tools/graphics/mangohud/opengl32-nix-workaround.patch
··· 1 - From 1ac93cbf0eed951af6967a81f731a0f418ea0b3d Mon Sep 17 00:00:00 2001 2 - From: Atemu <atemu.main@gmail.com> 3 - Date: Tue, 17 May 2022 16:58:45 +0200 4 - Subject: [PATCH 2/2] opengl32 nix workaround 5 - 6 - --- 7 - bin/mangohud.in | 2 +- 8 - 1 file changed, 1 insertion(+), 1 deletion(-) 9 - 10 1 diff --git a/bin/mangohud.in b/bin/mangohud.in 11 - index 8ec21de..f65304a 100755 2 + index e13da99..086443c 100755 12 3 --- a/bin/mangohud.in 13 4 +++ b/bin/mangohud.in 14 5 @@ -23,6 +23,6 @@ fi 15 6 # figure out whether the 32 or 64 bit version should be used, and will search 16 7 # for it in the correct directory 17 - LD_PRELOAD="${LD_PRELOAD}:${MANGOHUD_LIB_NAME}" 18 - -LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:@ld_libdir_mangohud@" 19 - +LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:@ld_libdir_mangohud@:@mangohud32@/lib/mangohud" 8 + LD_PRELOAD="${LD_PRELOAD}${LD_PRELOAD:+:}${MANGOHUD_LIB_NAME}" 9 + -LD_LIBRARY_PATH="${LD_LIBRARY_PATH}${LD_LIBRARY_PATH:+:}@ld_libdir_mangohud@" 10 + +LD_LIBRARY_PATH="${LD_LIBRARY_PATH}${LD_LIBRARY_PATH:+:}@ld_libdir_mangohud@:@mangohud32@/lib/mangohud" 20 11 21 12 exec env MANGOHUD=1 LD_LIBRARY_PATH="${LD_LIBRARY_PATH}" LD_PRELOAD="${LD_PRELOAD}" "$@" 22 - -- 23 - 2.36.0 24 -
+2 -2
pkgs/tools/misc/remind/default.nix
··· 16 16 in 17 17 tcl.mkTclDerivation rec { 18 18 pname = "remind"; 19 - version = "04.00.00"; 19 + version = "04.00.01"; 20 20 21 21 src = fetchurl { 22 22 url = "https://dianne.skoll.ca/projects/remind/download/remind-${version}.tar.gz"; 23 - sha256 = "sha256-I7bmsO3EAUnmo2KoIy5myxXuZB8tzs5kCEXpG550x8Y="; 23 + sha256 = "sha256-NhXS7WpFbKuRFiAWDSy+g9PoYVQcpugLppQEF/S5WeI="; 24 24 }; 25 25 26 26 propagatedBuildInputs = tclLibraries;
+2 -2
pkgs/tools/misc/reredirect/default.nix
··· 2 2 3 3 stdenv.mkDerivation rec { 4 4 pname = "reredirect"; 5 - version = "0.2"; 5 + version = "0.3"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "jerome-pouiller"; 9 9 repo = "reredirect"; 10 10 rev = "v${version}"; 11 - sha256 = "0aqzs940kwvw80lhkszx8spcdh9ilsx5ncl9vnp611hwlryfw7kk"; 11 + sha256 = "sha256-RHRamDo7afnJ4DlOVAqM8lQAC60YESGSMKa8Io2vcX0="; 12 12 }; 13 13 14 14 makeFlags = [ "PREFIX=${placeholder "out"}" ];
+2 -2
pkgs/tools/networking/mu/default.nix
··· 14 14 15 15 stdenv.mkDerivation rec { 16 16 pname = "mu"; 17 - version = "1.8.7"; 17 + version = "1.8.8"; 18 18 19 19 src = fetchFromGitHub { 20 20 owner = "djcb"; 21 21 repo = "mu"; 22 22 rev = "v${version}"; 23 - sha256 = "/RGq/q0N623RR4HuAmyfFx1/OYZBrmK1ddhiT4bgy3k="; 23 + hash = "sha256-kgskeQM6zESkjDWmgGqhZlGnH8naZ5k0sw+70ZzW2/E="; 24 24 }; 25 25 26 26 postPatch = ''
+2 -2
pkgs/tools/networking/smartdns/default.nix
··· 2 2 3 3 stdenv.mkDerivation rec { 4 4 pname = "smartdns"; 5 - version = "36.1"; 5 + version = "37"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "pymumu"; 9 9 repo = pname; 10 10 rev = "Release${version}"; 11 - sha256 = "sha256-5pAt7IjgbCCGaHeSoQvuoc6KPD9Yn5iXL1CAawgBeY0="; 11 + sha256 = "sha256-zmcLXhqgwP9SKTyUUhOehQRJk6uX7iwgu9WP2TVhsR8="; 12 12 }; 13 13 14 14 buildInputs = [ openssl ];
+6 -7
pkgs/tools/security/buttercup-desktop/default.nix
··· 2 2 3 3 let 4 4 pname = "buttercup-desktop"; 5 - version = "2.14.2"; 6 - name = "${pname}-${version}"; 5 + version = "2.16.0"; 7 6 src = fetchurl { 8 7 url = "https://github.com/buttercup/buttercup-desktop/releases/download/v${version}/Buttercup-linux-x86_64.AppImage"; 9 - sha256 = "sha256-ZZaolebDGqRk4BHP5PxFxBsMgOQAxUoIMTlhxM58k0Y="; 8 + sha256 = "sha256-o6KdbwD0VdCTYLEfar7Jt7MRZUayGHyasnmtU8Cqg3E="; 10 9 }; 11 - appimageContents = appimageTools.extractType2 { inherit name src; }; 10 + appimageContents = appimageTools.extractType2 { inherit pname src version; }; 12 11 13 12 in appimageTools.wrapType2 { 14 - inherit name src; 13 + inherit pname src version; 15 14 16 15 extraPkgs = pkgs: (appimageTools.defaultFhsEnvArgs.multiPkgs pkgs) ++ [ pkgs.libsecret ]; 17 16 18 17 extraInstallCommands = '' 19 - mv $out/bin/${name} $out/bin/buttercup-desktop 18 + mv $out/bin/${pname}-${version} $out/bin/${pname} 20 19 install -m 444 -D ${appimageContents}/buttercup.desktop -t $out/share/applications 21 20 substituteInPlace $out/share/applications/buttercup.desktop \ 22 - --replace 'Exec=AppRun' 'Exec=buttercup-desktop' 21 + --replace 'Exec=AppRun' 'Exec=${pname}' 23 22 cp -r ${appimageContents}/usr/share/icons $out/share 24 23 ''; 25 24
+7 -4
pkgs/tools/security/sigma-cli/default.nix
··· 5 5 6 6 python3.pkgs.buildPythonApplication rec { 7 7 pname = "sigma-cli"; 8 - version = "0.4.3"; 8 + version = "0.5.0"; 9 9 format = "pyproject"; 10 10 11 11 src = fetchFromGitHub { 12 12 owner = "SigmaHQ"; 13 13 repo = pname; 14 14 rev = "refs/tags/v${version}"; 15 - hash = "sha256-3LFakeS3aQaacm7HqeAJPMJhi3Wf8zbJc//SEWUA1Rg="; 15 + hash = "sha256-i0rin4TLoqo+F2nWG4kcFp3x/cRtkMzAo5Ldyo0Si5w="; 16 16 }; 17 17 18 18 nativeBuildInputs = with python3.pkgs; [ ··· 23 23 click 24 24 prettytable 25 25 pysigma 26 + pysigma-backend-elasticsearch 27 + pysigma-backend-insightidr 28 + pysigma-backend-opensearch 29 + pysigma-backend-qradar 26 30 pysigma-backend-splunk 27 - pysigma-backend-insightidr 28 31 pysigma-pipeline-crowdstrike 29 32 pysigma-pipeline-sysmon 30 33 pysigma-pipeline-windows ··· 37 40 postPatch = '' 38 41 substituteInPlace pyproject.toml \ 39 42 --replace 'prettytable = "^3.1.1"' 'prettytable = "*"' \ 40 - --replace 'pysigma = "^0.5.0"' 'pysigma = "*"' 43 + --replace 'pysigma = "^0.7.2"' 'pysigma = "*"' 41 44 ''; 42 45 43 46 pythonImportsCheck = [
+2 -3
pkgs/top-level/all-packages.nix
··· 8572 8572 mangohud = callPackage ../tools/graphics/mangohud { 8573 8573 libXNVCtrl = linuxPackages.nvidia_x11.settings.libXNVCtrl; 8574 8574 mangohud32 = pkgsi686Linux.mangohud; 8575 + inherit (python3Packages) Mako; 8575 8576 }; 8576 8577 8577 8578 manix = callPackage ../tools/nix/manix { ··· 16237 16238 16238 16239 kubeaudit = callPackage ../tools/security/kubeaudit { }; 16239 16240 16240 - kubectx = callPackage ../development/tools/kubectx { 16241 - buildGoModule = buildGo117Module; 16242 - }; 16241 + kubectx = callPackage ../development/tools/kubectx { }; 16243 16242 16244 16243 kube-linter = callPackage ../development/tools/kube-linter { }; 16245 16244
+12
pkgs/top-level/python-packages.nix
··· 287 287 288 288 aioeagle = callPackage ../development/python-modules/aioeagle { }; 289 289 290 + aioecowitt = callPackage ../development/python-modules/aioecowitt { }; 291 + 290 292 aioemonitor = callPackage ../development/python-modules/aioemonitor { }; 291 293 292 294 aioesphomeapi = callPackage ../development/python-modules/aioesphomeapi { }; ··· 5459 5461 5460 5462 meteoalertapi = callPackage ../development/python-modules/meteoalertapi { }; 5461 5463 5464 + meteocalc = callPackage ../development/python-modules/meteocalc { }; 5465 + 5462 5466 meteofrance-api = callPackage ../development/python-modules/meteofrance-api { }; 5463 5467 5464 5468 mezzanine = callPackage ../development/python-modules/mezzanine { }; ··· 8196 8200 8197 8201 pysigma = callPackage ../development/python-modules/pysigma { }; 8198 8202 8203 + pysigma-backend-elasticsearch = callPackage ../development/python-modules/pysigma-backend-elasticsearch { }; 8204 + 8205 + pysigma-backend-opensearch = callPackage ../development/python-modules/pysigma-backend-opensearch { }; 8206 + 8207 + pysigma-backend-qradar = callPackage ../development/python-modules/pysigma-backend-qradar { }; 8208 + 8199 8209 pysigma-backend-splunk = callPackage ../development/python-modules/pysigma-backend-splunk { }; 8200 8210 8201 8211 pysigma-backend-insightidr = callPackage ../development/python-modules/pysigma-backend-insightidr { }; ··· 11694 11704 z3 = (toPythonModule (pkgs.z3.override { 11695 11705 inherit python; 11696 11706 })).python; 11707 + 11708 + zadnegoale = callPackage ../development/python-modules/zadnegoale { }; 11697 11709 11698 11710 zake = callPackage ../development/python-modules/zake { }; 11699 11711