1{ stdenv
2, lib
3, fetchurl
4, buildPythonPackage
5, isPy3k, pythonOlder, pythonAtLeast, astor
6, gast
7, google-pasta
8, wrapt
9, numpy
10, six
11, termcolor
12, packaging
13, protobuf
14, absl-py
15, grpcio
16, mock
17, scipy
18, wheel
19, opt-einsum
20, backports_weakref
21, tensorflow-estimator
22, tensorboard
23, cudaSupport ? false
24, cudaPackages ? {}
25, patchelfUnstable
26, zlib
27, python
28, keras-applications
29, keras-preprocessing
30, addOpenGLRunpath
31, astunparse
32, flatbuffers
33, h5py
34, typing-extensions
35}:
36
37# We keep this binary build for two reasons:
38# - the source build doesn't work on Darwin.
39# - the source build is currently brittle and not easy to maintain
40
41# unsupported combination
42assert ! (stdenv.isDarwin && cudaSupport);
43
44let
45 packages = import ./binary-hashes.nix;
46 inherit (cudaPackages) cudatoolkit cudnn;
47in buildPythonPackage {
48 pname = "tensorflow" + lib.optionalString cudaSupport "-gpu";
49 inherit (packages) version;
50 format = "wheel";
51
52 src = let
53 pyVerNoDot = lib.strings.stringAsChars (x: if x == "." then "" else x) python.pythonVersion;
54 platform = if stdenv.isDarwin then "mac" else "linux";
55 unit = if cudaSupport then "gpu" else "cpu";
56 key = "${platform}_py_${pyVerNoDot}_${unit}";
57 in fetchurl packages.${key};
58
59 propagatedBuildInputs = [
60 astunparse
61 flatbuffers
62 typing-extensions
63 packaging
64 protobuf
65 numpy
66 scipy
67 termcolor
68 grpcio
69 six
70 astor
71 absl-py
72 gast
73 opt-einsum
74 google-pasta
75 wrapt
76 tensorflow-estimator
77 tensorboard
78 keras-applications
79 keras-preprocessing
80 h5py
81 ] ++ lib.optional (!isPy3k) mock
82 ++ lib.optionals (pythonOlder "3.4") [ backports_weakref ];
83
84 # remove patchelfUnstable once patchelf 0.14 with https://github.com/NixOS/patchelf/pull/256 becomes the default
85 nativeBuildInputs = [ wheel ] ++ lib.optionals cudaSupport [ addOpenGLRunpath patchelfUnstable ];
86
87 preConfigure = ''
88 unset SOURCE_DATE_EPOCH
89
90 # Make sure that dist and the wheel file are writable.
91 chmod u+rwx -R ./dist
92
93 pushd dist
94
95 orig_name="$(echo ./*.whl)"
96 wheel unpack --dest unpacked ./*.whl
97 rm ./*.whl
98 (
99 cd unpacked/tensorflow*
100 # Adjust dependency requirements:
101 # - Relax flatbuffers, gast, protobuf, tensorboard, and tensorflow-estimator version requirements that don't match what we have packaged
102 # - The purpose of python3Packages.libclang is not clear at the moment and we don't have it packaged yet
103 # - keras and tensorlow-io-gcs-filesystem will be considered as optional for now.
104 sed -i *.dist-info/METADATA \
105 -e "/Requires-Dist: flatbuffers/d" \
106 -e "/Requires-Dist: gast/d" \
107 -e "/Requires-Dist: keras/d" \
108 -e "/Requires-Dist: libclang/d" \
109 -e "/Requires-Dist: protobuf/d" \
110 -e "/Requires-Dist: tensorboard/d" \
111 -e "/Requires-Dist: tensorflow-estimator/d" \
112 -e "/Requires-Dist: tensorflow-io-gcs-filesystem/d"
113 )
114 wheel pack ./unpacked/tensorflow*
115 mv *.whl $orig_name # avoid changes to the _os_arch.whl suffix
116
117 popd
118 '';
119
120 # Note that we need to run *after* the fixup phase because the
121 # libraries are loaded at runtime. If we run in preFixup then
122 # patchelf --shrink-rpath will remove the cuda libraries.
123 postFixup =
124 let
125 # rpaths we only need to add if CUDA is enabled.
126 cudapaths = lib.optionals cudaSupport [
127 cudatoolkit.out
128 cudatoolkit.lib
129 cudnn
130 ];
131
132 libpaths = [
133 stdenv.cc.cc.lib
134 zlib
135 ];
136
137 rpath = lib.makeLibraryPath (libpaths ++ cudapaths);
138 in
139 lib.optionalString stdenv.isLinux ''
140 # This is an array containing all the directories in the tensorflow2
141 # package that contain .so files.
142 #
143 # TODO: Create this list programmatically, and remove paths that aren't
144 # actually needed.
145 rrPathArr=(
146 "$out/${python.sitePackages}/tensorflow/"
147 "$out/${python.sitePackages}/tensorflow/core/kernels"
148 "$out/${python.sitePackages}/tensorflow/compiler/tf2tensorrt/"
149 "$out/${python.sitePackages}/tensorflow/compiler/tf2xla/ops/"
150 "$out/${python.sitePackages}/tensorflow/lite/experimental/microfrontend/python/ops/"
151 "$out/${python.sitePackages}/tensorflow/lite/python/interpreter_wrapper/"
152 "$out/${python.sitePackages}/tensorflow/lite/python/optimize/"
153 "$out/${python.sitePackages}/tensorflow/python/"
154 "$out/${python.sitePackages}/tensorflow/python/framework/"
155 "$out/${python.sitePackages}/tensorflow/python/autograph/impl/testing"
156 "$out/${python.sitePackages}/tensorflow/python/data/experimental/service"
157 "$out/${python.sitePackages}/tensorflow/python/framework"
158 "$out/${python.sitePackages}/tensorflow/python/profiler/internal"
159 "${rpath}"
160 )
161
162 # The the bash array into a colon-separated list of RPATHs.
163 rrPath=$(IFS=$':'; echo "''${rrPathArr[*]}")
164 echo "about to run patchelf with the following rpath: $rrPath"
165
166 find $out -type f \( -name '*.so' -or -name '*.so.*' \) | while read lib; do
167 echo "about to patchelf $lib..."
168 chmod a+rx "$lib"
169 patchelf --set-rpath "$rrPath" "$lib"
170 ${lib.optionalString cudaSupport ''
171 addOpenGLRunpath "$lib"
172 ''}
173 done
174 '';
175
176 # Upstream has a pip hack that results in bin/tensorboard being in both tensorflow
177 # and the propagated input tensorboard, which causes environment collisions.
178 # Another possibility would be to have tensorboard only in the buildInputs
179 # See https://github.com/NixOS/nixpkgs/pull/44381 for more information.
180 postInstall = ''
181 rm $out/bin/tensorboard
182 '';
183
184 pythonImportsCheck = [
185 "tensorflow"
186 "tensorflow.python"
187 "tensorflow.python.framework"
188 ];
189
190 passthru = {
191 inherit cudaPackages;
192 };
193
194 meta = with lib; {
195 description = "Computation using data flow graphs for scalable machine learning";
196 homepage = "http://tensorflow.org";
197 sourceProvenance = with sourceTypes; [ binaryNativeCode ];
198 license = licenses.asl20;
199 maintainers = with maintainers; [ jyp abbradar cdepillabout ];
200 platforms = [ "x86_64-linux" "x86_64-darwin" ];
201 };
202}