1{ lib
2, stdenv
3, addOpenGLRunpath
4, autoPatchelfHook
5, buildPythonPackage
6, cudaPackages
7, fetchurl
8, pythonAtLeast
9, pythonOlder
10, pillow
11, python
12, torch-bin
13}:
14
15let
16 pyVerNoDot = builtins.replaceStrings [ "." ] [ "" ] python.pythonVersion;
17 srcs = import ./binary-hashes.nix version;
18 unsupported = throw "Unsupported system";
19 version = "0.15.2";
20in buildPythonPackage {
21 inherit version;
22
23 pname = "torchvision";
24
25 format = "wheel";
26
27 src = fetchurl srcs."${stdenv.system}-${pyVerNoDot}" or unsupported;
28
29 disabled = (pythonOlder "3.8") || (pythonAtLeast "3.12");
30
31 # Note that we don't rely on config.cudaSupport here, because the Linux wheels all come built with CUDA support.
32 buildInputs = with cudaPackages; lib.optionals stdenv.isLinux [
33 # $out/${sitePackages}/torchvision/_C.so wants libcudart.so.11.0 but torchvision.libs only ships
34 # libcudart.$hash.so.11.0
35 cuda_cudart
36 ];
37
38 nativeBuildInputs = lib.optionals stdenv.isLinux [
39 autoPatchelfHook
40 addOpenGLRunpath
41 ];
42
43 propagatedBuildInputs = [
44 pillow
45 torch-bin
46 ];
47
48 # The wheel-binary is not stripped to avoid the error of `ImportError: libtorch_cuda_cpp.so: ELF load command address/offset not properly aligned.`.
49 dontStrip = true;
50
51 pythonImportsCheck = [ "torchvision" ];
52
53 preInstall = lib.optionalString stdenv.isLinux ''
54 addAutoPatchelfSearchPath "${torch-bin}/${python.sitePackages}/torch"
55 '';
56
57 meta = with lib; {
58 description = "PyTorch vision library";
59 homepage = "https://pytorch.org/";
60 changelog = "https://github.com/pytorch/vision/releases/tag/v${version}";
61 # Includes CUDA and Intel MKL, but redistributions of the binary are not limited.
62 # https://docs.nvidia.com/cuda/eula/index.html
63 # https://www.intel.com/content/www/us/en/developer/articles/license/onemkl-license-faq.html
64 license = licenses.bsd3;
65 sourceProvenance = with sourceTypes; [ binaryNativeCode ];
66 platforms = [ "aarch64-darwin" "x86_64-darwin" "x86_64-linux" ];
67 maintainers = with maintainers; [ junjihashimoto ];
68 };
69}