spark: provided-hadoop -> nixpkgs hadoop

spark should use nixpkgs provided hadoop to make it consistent
for bigger Nix hadoop + spark setup

+10 -13
+10 -13
pkgs/applications/networking/cluster/spark/default.nix
··· 1 - { stdenv, fetchzip, makeWrapper, jre, pythonPackages, coreutils 1 + { stdenv, fetchzip, makeWrapper, jre, pythonPackages, coreutils, hadoop 2 2 , RSupport? true, R 3 3 , mesosSupport ? true, mesos 4 4 , version 5 5 }: 6 6 7 7 let 8 - versionMap = { 9 - "2.2.1" = { 10 - hadoopVersion = "hadoop2.7"; 11 - sparkSha256 = "10nxsf9a6hj1263sxv0cbdqxdb8mb4cl6iqq32ljq9ydvk32s99c"; 12 - }; 13 - }; 8 + sha256 = { 9 + "1.6.3" = "142hw73wf20d846l83ydx0yg7qj5qxywm4h7qrhwnd7lsy2sbnjf"; 10 + "2.2.1" = "10nxsf9a6hj1263sxv0cbdqxdb8mb4cl6iqq32ljq9ydvk32s99c"; 11 + }.${version}; 14 12 in 15 13 16 - with versionMap.${version}; 17 - 18 14 with stdenv.lib; 19 15 20 16 stdenv.mkDerivation rec { ··· 22 18 name = "spark-${version}"; 23 19 24 20 src = fetchzip { 25 - url = "mirror://apache/spark/${name}/${name}-bin-${hadoopVersion}.tgz"; 26 - sha256 = sparkSha256; 21 + inherit sha256; 22 + url = "mirror://apache/spark/${name}/${name}-bin-without-hadoop.tgz"; 27 23 }; 28 24 29 25 buildInputs = [ makeWrapper jre pythonPackages.python pythonPackages.numpy ] 30 26 ++ optional RSupport R 31 27 ++ optional mesosSupport mesos; 32 28 33 - untarDir = "${name}-bin-${hadoopVersion}"; 29 + untarDir = "${name}-bin-without-hadoop"; 34 30 installPhase = '' 35 31 mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java} 36 32 mv * $out/lib/${untarDir} ··· 42 38 cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF 43 39 export JAVA_HOME="${jre}" 44 40 export SPARK_HOME="$out/lib/${untarDir}" 41 + export SPARK_DIST_CLASSPATH=$(${hadoop}/bin/hadoop classpath) 45 42 export PYSPARK_PYTHON="${pythonPackages.python}/bin/${pythonPackages.python.executable}" 46 43 export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH" 47 44 ${optionalString RSupport ··· 63 60 homepage = "http://spark.apache.org"; 64 61 license = stdenv.lib.licenses.asl20; 65 62 platforms = stdenv.lib.platforms.all; 66 - maintainers = with maintainers; [ thoughtpolice offline ]; 63 + maintainers = with maintainers; [ thoughtpolice offline kamilchm ]; 67 64 repositories.git = git://git.apache.org/spark.git; 68 65 }; 69 66 }