lol

python310Packages.pyspark: 3.3.0 -> 3.3.1

+24 -3
+24 -3
pkgs/development/python-modules/pyspark/default.nix
··· 1 1 { lib 2 2 , buildPythonPackage 3 3 , fetchPypi 4 + , numpy 5 + , pandas 4 6 , py4j 7 + , pyarrow 8 + , pythonOlder 5 9 }: 6 10 7 11 buildPythonPackage rec { 8 12 pname = "pyspark"; 9 - version = "3.3.0"; 13 + version = "3.3.1"; 14 + format = "setuptools"; 15 + 16 + disabled = pythonOlder "3.7"; 10 17 11 18 src = fetchPypi { 12 19 inherit pname version; 13 - sha256 = "sha256-fr6OlQVke00STVqC/KYN/TiRAhz4rWxeyId37uzpLPc="; 20 + hash = "sha256-6Z+n3pK+QGiEv9gxwyuTBqOpneRM/Dmi7vtu0HRF1fo="; 14 21 }; 15 22 16 23 # pypandoc is broken with pandoc2, so we just lose docs. ··· 25 32 py4j 26 33 ]; 27 34 35 + passthru.optional-dependencies = { 36 + ml = [ 37 + numpy 38 + ]; 39 + mllib = [ 40 + numpy 41 + ]; 42 + sql = [ 43 + numpy 44 + pandas 45 + pyarrow 46 + ]; 47 + }; 48 + 28 49 # Tests assume running spark instance 29 50 doCheck = false; 30 51 ··· 40 61 binaryBytecode 41 62 ]; 42 63 license = licenses.asl20; 43 - maintainers = [ maintainers.shlevy ]; 64 + maintainers = with maintainers; [ shlevy ]; 44 65 }; 45 66 }