1{ buildPythonPackage, fetchPypi, lib, py4j }:
2
3buildPythonPackage rec {
4 pname = "pyspark";
5 version = "3.1.1";
6
7 src = fetchPypi {
8 inherit pname version;
9 sha256 = "104abc146d4ffb72d4c683d25d7af5a6bf955d94590a76f542ee23185670aa7e";
10 };
11
12 # pypandoc is broken with pandoc2, so we just lose docs.
13 postPatch = ''
14 sed -i "s/'pypandoc'//" setup.py
15
16 substituteInPlace setup.py --replace py4j==0.10.9 'py4j>=0.10.9,<0.11'
17 '';
18
19 propagatedBuildInputs = [ py4j ];
20
21 # Tests assume running spark...
22 doCheck = false;
23
24 meta = with lib; {
25 description = "Apache Spark";
26 homepage = "https://github.com/apache/spark/tree/master/python";
27 license = licenses.asl20;
28 maintainers = [ maintainers.shlevy ];
29 };
30}