lol

hadoop, nixos/hadoop: remove untarDir

+33 -34
+9 -9
nixos/modules/services/cluster/hadoop/default.nix
··· 67 67 mapredSiteDefault = mkOption { 68 68 default = { 69 69 "mapreduce.framework.name" = "yarn"; 70 - "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}"; 71 - "mapreduce.map.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}"; 72 - "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}"; 70 + "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=${cfg.package}"; 71 + "mapreduce.map.env" = "HADOOP_MAPRED_HOME=${cfg.package}"; 72 + "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=${cfg.package}"; 73 73 }; 74 74 defaultText = literalExpression '' 75 75 { 76 76 "mapreduce.framework.name" = "yarn"; 77 - "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}"; 78 - "mapreduce.map.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}"; 79 - "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}"; 77 + "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}"; 78 + "mapreduce.map.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}"; 79 + "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}"; 80 80 } 81 81 ''; 82 82 type = types.attrsOf types.anything; ··· 154 154 }; 155 155 156 156 log4jProperties = mkOption { 157 - default = "${cfg.package}/lib/${cfg.package.untarDir}/etc/hadoop/log4j.properties"; 157 + default = "${cfg.package}/etc/hadoop/log4j.properties"; 158 158 defaultText = literalExpression '' 159 - "''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}/etc/hadoop/log4j.properties" 159 + "''${config.${opt.package}}/etc/hadoop/log4j.properties" 160 160 ''; 161 161 type = types.path; 162 162 example = literalExpression '' 163 - "''${pkgs.hadoop}/lib/''${pkgs.hadoop.untarDir}/etc/hadoop/log4j.properties"; 163 + "''${pkgs.hadoop}/etc/hadoop/log4j.properties"; 164 164 ''; 165 165 description = lib.mdDoc "log4j.properties file added to HADOOP_CONF_DIR"; 166 166 };
+1 -1
nixos/modules/services/cluster/hadoop/yarn.nix
··· 160 160 umount /run/wrappers/yarn-nodemanager/cgroup/cpu || true 161 161 rm -rf /run/wrappers/yarn-nodemanager/ || true 162 162 mkdir -p /run/wrappers/yarn-nodemanager/{bin,etc/hadoop,cgroup/cpu} 163 - cp ${cfg.package}/lib/${cfg.package.untarDir}/bin/container-executor /run/wrappers/yarn-nodemanager/bin/ 163 + cp ${cfg.package}/bin/container-executor /run/wrappers/yarn-nodemanager/bin/ 164 164 chgrp hadoop /run/wrappers/yarn-nodemanager/bin/container-executor 165 165 chmod 6050 /run/wrappers/yarn-nodemanager/bin/container-executor 166 166 cp ${hadoopConf}/container-executor.cfg /run/wrappers/yarn-nodemanager/etc/hadoop/
+1 -1
nixos/tests/hadoop/hadoop.nix
··· 249 249 assert "standby" in client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState") 250 250 client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat") 251 251 252 - assert "Estimated value of Pi is" in client.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~lib/hadoop-*/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10") 252 + assert "Estimated value of Pi is" in client.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10") 253 253 assert "SUCCEEDED" in client.succeed("yarn application -list -appStates FINISHED") 254 254 ''; 255 255 })
+22 -23
pkgs/applications/networking/cluster/hadoop/default.nix
··· 29 29 30 30 let 31 31 common = { 32 - pname, platformAttrs, untarDir ? "${pname}-${version}", jdk 33 - , nativeLibs ? [ ], libPatchesGenerator ? (_: ""), tests 32 + pname, platformAttrs, jdk, nativeLibs ? [ ] 33 + , libPatchesGenerator ? (_: ""), tests 34 34 }: 35 35 stdenv.mkDerivation (finalAttrs: { 36 - inherit pname jdk untarDir; 36 + inherit pname jdk; 37 37 libPatches = libPatchesGenerator finalAttrs; 38 38 version = platformAttrs.${stdenv.system}.version or (throw "Unsupported system: ${stdenv.system}"); 39 39 src = fetchurl { ··· 51 51 }; 52 52 53 53 nativeBuildInputs = [ makeWrapper ] 54 - ++ optionals (stdenv.isLinux && (nativeLibs != [ ] || (libPatches finalAttrs) != "")) [ autoPatchelfHook ]; 54 + ++ optionals (stdenv.isLinux && (nativeLibs != [ ] || (libPatchesGenerator finalAttrs) != "")) [ autoPatchelfHook ]; 55 55 buildInputs = [ openssl ] ++ nativeLibs; 56 56 57 57 installPhase = '' 58 - mkdir -p $out/{lib/${finalAttrs.untarDir}/conf,bin,lib} 59 - mv * $out/lib/${finalAttrs.untarDir} 58 + mkdir $out 59 + mv * $out/ 60 60 '' + optionalString stdenv.isLinux '' 61 61 for n in $(find ${finalAttrs.containerExecutor}/bin -type f); do 62 - ln -sf "$n" $out/lib/${finalAttrs.untarDir}/bin 62 + ln -sf "$n" $out/bin 63 63 done 64 64 '' + '' 65 - for n in $(find $out/lib/${finalAttrs.untarDir}/bin -type f ! -name "*.*"); do 66 - makeWrapper "$n" "$out/bin/$(basename $n)"\ 65 + for n in $(find $out/bin -type f ! -name "*.*"); do 66 + wrapProgram "$n"\ 67 67 --set-default JAVA_HOME ${finalAttrs.jdk.home}\ 68 - --set-default HADOOP_HOME $out/lib/${finalAttrs.untarDir}\ 68 + --set-default HADOOP_HOME $out/\ 69 69 --run "test -d /etc/hadoop-conf && export HADOOP_CONF_DIR=\''${HADOOP_CONF_DIR-'/etc/hadoop-conf/'}"\ 70 - --set-default HADOOP_CONF_DIR $out/lib/${finalAttrs.untarDir}/etc/hadoop/\ 70 + --set-default HADOOP_CONF_DIR $out/etc/hadoop/\ 71 71 --prefix PATH : "${makeBinPath [ bash coreutils which]}"\ 72 72 --prefix JAVA_LIBRARY_PATH : "${makeLibraryPath finalAttrs.buildInputs}" 73 73 done 74 74 '' + optionalString sparkSupport '' 75 75 # Add the spark shuffle service jar to YARN 76 - cp ${spark.src}/yarn/spark-${spark.version}-yarn-shuffle.jar $out/lib/${finalAttrs.untarDir}/share/hadoop/yarn/ 76 + cp ${spark.src}/yarn/spark-${spark.version}-yarn-shuffle.jar $out/share/hadoop/yarn/ 77 77 '' + (finalAttrs.libPatches); 78 78 79 79 passthru = { inherit tests; }; ··· 101 101 }); 102 102 nativeLibs = [ stdenv.cc.cc.lib protobuf zlib snappy libtirpc ]; 103 103 libPatchesGenerator = finalAttrs: ('' 104 - ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/${finalAttrs.untarDir}/lib/native/libsasl2.so.2 105 - ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/${finalAttrs.untarDir}/lib/native/ 106 - ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/${finalAttrs.untarDir}/lib/native/ 107 - ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/${finalAttrs.untarDir}/lib/native/ 108 - ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/${finalAttrs.untarDir}/lib/native/ 104 + ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/native/libsasl2.so.2 105 + ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/native/ 106 + ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/native/ 107 + ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/native/ 108 + ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/native/ 109 109 '' + optionalString stdenv.isLinux '' 110 110 # libjvm.so for Java >=11 111 - patchelf --add-rpath ${finalAttrs.jdk.home}/lib/server $out/lib/${finalAttrs.untarDir}/lib/native/libnativetask.so.1.0.0 111 + patchelf --add-rpath ${finalAttrs.jdk.home}/lib/server $out/lib/native/libnativetask.so.1.0.0 112 112 # Java 8 has libjvm.so at a different path 113 - patchelf --add-rpath ${finalAttrs.jdk.home}/jre/lib/amd64/server $out/lib/${finalAttrs.untarDir}/lib/native/libnativetask.so.1.0.0 113 + patchelf --add-rpath ${finalAttrs.jdk.home}/jre/lib/amd64/server $out/lib/native/libnativetask.so.1.0.0 114 114 # NixOS/nixpkgs#193370 115 115 # This workaround is needed to use protobuf 3.19 116 116 # for hadoop 3.3 117 - patchelf --replace-needed libprotobuf.so.18 libprotobuf.so $out/lib/${finalAttrs.untarDir}/lib/native/libhdfspp.so 117 + patchelf --replace-needed libprotobuf.so.18 libprotobuf.so $out/lib/native/libhdfspp.so 118 118 # for hadoop 3.2 119 - patchelf --replace-needed libprotobuf.so.8 libprotobuf.so $out/lib/${finalAttrs.untarDir}/lib/native/libhdfspp.so 119 + patchelf --replace-needed libprotobuf.so.8 libprotobuf.so $out/lib/native/libhdfspp.so 120 120 patchelf --replace-needed libcrypto.so.1.1 libcrypto.so \ 121 - $out/lib/${finalAttrs.untarDir}/lib/native/{libhdfspp.so.0.1.0,examples/{pipes-sort,wordcount-nopipe,wordcount-part,wordcount-simple}} 121 + $out/lib/native/{libhdfspp.so.0.1.0,examples/{pipes-sort,wordcount-nopipe,wordcount-part,wordcount-simple}} 122 122 ''); 123 123 in 124 124 { ··· 138 138 }; 139 139 aarch64-darwin = aarch64-linux; 140 140 }; 141 - untarDir = "${pname}-${platformAttrs.${stdenv.system}.version}"; 142 141 jdk = jdk11_headless; 143 142 inherit nativeLibs libPatchesGenerator; 144 143 # TODO: Package and add Intel Storage Acceleration Library