spark: provided-hadoop -> nixpkgs hadoop

spark should use nixpkgs provided hadoop to make it consistent
for bigger Nix hadoop + spark setup

+10 -13
+10 -13
pkgs/applications/networking/cluster/spark/default.nix
··· 1 - { stdenv, fetchzip, makeWrapper, jre, pythonPackages, coreutils 2 , RSupport? true, R 3 , mesosSupport ? true, mesos 4 , version 5 }: 6 7 let 8 - versionMap = { 9 - "2.2.1" = { 10 - hadoopVersion = "hadoop2.7"; 11 - sparkSha256 = "10nxsf9a6hj1263sxv0cbdqxdb8mb4cl6iqq32ljq9ydvk32s99c"; 12 - }; 13 - }; 14 in 15 16 - with versionMap.${version}; 17 - 18 with stdenv.lib; 19 20 stdenv.mkDerivation rec { ··· 22 name = "spark-${version}"; 23 24 src = fetchzip { 25 - url = "mirror://apache/spark/${name}/${name}-bin-${hadoopVersion}.tgz"; 26 - sha256 = sparkSha256; 27 }; 28 29 buildInputs = [ makeWrapper jre pythonPackages.python pythonPackages.numpy ] 30 ++ optional RSupport R 31 ++ optional mesosSupport mesos; 32 33 - untarDir = "${name}-bin-${hadoopVersion}"; 34 installPhase = '' 35 mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java} 36 mv * $out/lib/${untarDir} ··· 42 cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF 43 export JAVA_HOME="${jre}" 44 export SPARK_HOME="$out/lib/${untarDir}" 45 export PYSPARK_PYTHON="${pythonPackages.python}/bin/${pythonPackages.python.executable}" 46 export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH" 47 ${optionalString RSupport ··· 63 homepage = "http://spark.apache.org"; 64 license = stdenv.lib.licenses.asl20; 65 platforms = stdenv.lib.platforms.all; 66 - maintainers = with maintainers; [ thoughtpolice offline ]; 67 repositories.git = git://git.apache.org/spark.git; 68 }; 69 }
··· 1 + { stdenv, fetchzip, makeWrapper, jre, pythonPackages, coreutils, hadoop 2 , RSupport? true, R 3 , mesosSupport ? true, mesos 4 , version 5 }: 6 7 let 8 + sha256 = { 9 + "1.6.3" = "142hw73wf20d846l83ydx0yg7qj5qxywm4h7qrhwnd7lsy2sbnjf"; 10 + "2.2.1" = "10nxsf9a6hj1263sxv0cbdqxdb8mb4cl6iqq32ljq9ydvk32s99c"; 11 + }.${version}; 12 in 13 14 with stdenv.lib; 15 16 stdenv.mkDerivation rec { ··· 18 name = "spark-${version}"; 19 20 src = fetchzip { 21 + inherit sha256; 22 + url = "mirror://apache/spark/${name}/${name}-bin-without-hadoop.tgz"; 23 }; 24 25 buildInputs = [ makeWrapper jre pythonPackages.python pythonPackages.numpy ] 26 ++ optional RSupport R 27 ++ optional mesosSupport mesos; 28 29 + untarDir = "${name}-bin-without-hadoop"; 30 installPhase = '' 31 mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java} 32 mv * $out/lib/${untarDir} ··· 38 cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF 39 export JAVA_HOME="${jre}" 40 export SPARK_HOME="$out/lib/${untarDir}" 41 + export SPARK_DIST_CLASSPATH=$(${hadoop}/bin/hadoop classpath) 42 export PYSPARK_PYTHON="${pythonPackages.python}/bin/${pythonPackages.python.executable}" 43 export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH" 44 ${optionalString RSupport ··· 60 homepage = "http://spark.apache.org"; 61 license = stdenv.lib.licenses.asl20; 62 platforms = stdenv.lib.platforms.all; 63 + maintainers = with maintainers; [ thoughtpolice offline kamilchm ]; 64 repositories.git = git://git.apache.org/spark.git; 65 }; 66 }