at 22.05-pre 3.9 kB view raw
1# For the moment we only support the CPU and GPU backends of jaxlib. The TPU 2# backend will require some additional work. Those wheels are located here: 3# https://storage.googleapis.com/jax-releases/libtpu_releases.html. 4 5# For future reference, the easiest way to test the GPU backend is to run 6# NIX_PATH=.. nix-shell -p python3 python3Packages.jax "python3Packages.jaxlib.override { cudaSupport = true; }" 7# export XLA_FLAGS=--xla_gpu_force_compilation_parallelism=1 8# python -c "from jax.lib import xla_bridge; assert xla_bridge.get_backend().platform == 'gpu'" 9# python -c "from jax import random; random.PRNGKey(0)" 10# python -c "from jax import random; x = random.normal(random.PRNGKey(0), (100, 100)); x @ x" 11# There's no convenient way to test the GPU backend in the derivation since the 12# nix build environment blocks access to the GPU. See also: 13# * https://github.com/google/jax/issues/971#issuecomment-508216439 14# * https://github.com/google/jax/issues/5723#issuecomment-913038780 15 16{ addOpenGLRunpath, autoPatchelfHook, buildPythonPackage, config 17, fetchurl, isPy39, lib, stdenv 18# propagatedBuildInputs 19, absl-py, flatbuffers, scipy, cudatoolkit_11 20# Options: 21, cudaSupport ? config.cudaSupport or false 22}: 23 24assert cudaSupport -> lib.versionAtLeast cudatoolkit_11.version "11.1"; 25 26let 27 device = if cudaSupport then "gpu" else "cpu"; 28in 29buildPythonPackage rec { 30 pname = "jaxlib"; 31 version = "0.1.71"; 32 format = "wheel"; 33 34 # At the time of writing (8/19/21), there are releases for 3.7-3.9. Supporting 35 # all of them is a pain, so we focus on 3.9, the current nixpkgs python3 36 # version. 37 disabled = !isPy39; 38 39 src = { 40 cpu = fetchurl { 41 url = "https://storage.googleapis.com/jax-releases/nocuda/jaxlib-${version}-cp39-none-manylinux2010_x86_64.whl"; 42 sha256 = "sha256:0rqhs6qabydizlv5d3rb20dbv6612rr7dqfniy9r6h4kazdinsn6"; 43 }; 44 gpu = fetchurl { 45 url = "https://storage.googleapis.com/jax-releases/cuda111/jaxlib-${version}+cuda111-cp39-none-manylinux2010_x86_64.whl"; 46 sha256 = "sha256:065kyzjsk9m84d138p99iymdiiicm1qz8a3iwxz8rspl43rwrw89"; 47 }; 48 }.${device}; 49 50 # Prebuilt wheels are dynamically linked against things that nix can't find. 51 # Run `autoPatchelfHook` to automagically fix them. 52 nativeBuildInputs = [ autoPatchelfHook ] ++ lib.optional cudaSupport addOpenGLRunpath; 53 # Dynamic link dependencies 54 buildInputs = [ stdenv.cc.cc ]; 55 56 # jaxlib contains shared libraries that open other shared libraries via dlopen 57 # and these implicit dependencies are not recognized by ldd or 58 # autoPatchelfHook. That means we need to sneak them into rpath. This step 59 # must be done after autoPatchelfHook and the automatic stripping of 60 # artifacts. autoPatchelfHook runs in postFixup and auto-stripping runs in the 61 # patchPhase. Dependencies: 62 # * libcudart.so.11.0 -> cudatoolkit_11.lib 63 # * libcublas.so.11 -> cudatoolkit_11 64 # * libcuda.so.1 -> opengl driver in /run/opengl-driver/lib 65 preInstallCheck = lib.optional cudaSupport '' 66 shopt -s globstar 67 68 addOpenGLRunpath $out/**/*.so 69 70 for file in $out/**/*.so; do 71 rpath=$(patchelf --print-rpath $file) 72 # For some reason `makeLibraryPath` on `cudatoolkit_11` maps to 73 # <cudatoolkit_11.lib>/lib which is different from <cudatoolkit_11>/lib. 74 patchelf --set-rpath "$rpath:${cudatoolkit_11}/lib:${lib.makeLibraryPath [ cudatoolkit_11.lib ]}" $file 75 done 76 ''; 77 78 # pip dependencies and optionally cudatoolkit. 79 propagatedBuildInputs = [ absl-py flatbuffers scipy ] ++ lib.optional cudaSupport cudatoolkit_11; 80 81 pythonImportsCheck = [ "jaxlib" ]; 82 83 meta = with lib; { 84 description = "XLA library for JAX"; 85 homepage = "https://github.com/google/jax"; 86 license = licenses.asl20; 87 maintainers = with maintainers; [ samuela ]; 88 platforms = [ "x86_64-linux" ]; 89 }; 90}