Merge pull request #143525 from junjihashimoto/bump-pytorch

authored by Sandro and committed by GitHub 7ba1fac5 e8aa00d4

+245 -91
+24 -44
pkgs/development/libraries/science/math/libtorch/bin.nix
··· 2 , stdenv 3 , fetchzip 4 , lib 5 6 , addOpenGLRunpath 7 , patchelf 8 , fixDarwinDylibNames 9 10 , cudaSupport 11 - , cudatoolkit_11_1 12 - , cudnn_cudatoolkit_11_1 13 }: 14 15 let ··· 18 # this derivation. However, we should ensure on version bumps 19 # that the CUDA toolkit for `passthru.tests` is still 20 # up-to-date. 21 - version = "1.9.0"; 22 device = if cudaSupport then "cuda" else "cpu"; 23 srcs = import ./binary-hashes.nix version; 24 unavailable = throw "libtorch is not available for this platform"; 25 in stdenv.mkDerivation { 26 inherit version; 27 pname = "libtorch"; ··· 67 ''} 68 done 69 '' + lib.optionalString stdenv.isDarwin '' 70 - install_name_tool -change @rpath/libshm.dylib $out/lib/libshm.dylib $out/lib/libtorch_python.dylib 71 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libtorch_python.dylib 72 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libtorch_python.dylib 73 - install_name_tool -change @rpath/libtorch.dylib $out/lib/libtorch.dylib $out/lib/libtorch_python.dylib 74 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libtorch_python.dylib 75 - 76 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libtorch.dylib 77 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libtorch.dylib 78 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libtorch.dylib 79 - 80 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libtorch_cpu.dylib 81 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libtorch_cpu.dylib 82 - install_name_tool -change @rpath/libtensorpipe.dylib $out/lib/libtensorpipe.dylib $out/lib/libtorch_cpu.dylib 83 - 84 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libcaffe2_observers.dylib 85 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libcaffe2_observers.dylib 86 - install_name_tool -change @rpath/libtorch.dylib $out/lib/libtorch.dylib $out/lib/libcaffe2_observers.dylib 87 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libcaffe2_observers.dylib 88 - 89 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libcaffe2_module_test_dynamic.dylib 90 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libcaffe2_module_test_dynamic.dylib 91 - install_name_tool -change @rpath/libtorch.dylib $out/lib/libtorch.dylib $out/lib/libcaffe2_module_test_dynamic.dylib 92 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libcaffe2_module_test_dynamic.dylib 93 - 94 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libcaffe2_detectron_ops.dylib 95 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libcaffe2_detectron_ops.dylib 96 - install_name_tool -change @rpath/libtorch.dylib $out/lib/libtorch.dylib $out/lib/libcaffe2_detectron_ops.dylib 97 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libcaffe2_detectron_ops.dylib 98 - 99 - install_name_tool -change @rpath/libc10.dylib $out/lib/libc10.dylib $out/lib/libshm.dylib 100 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libshm.dylib 101 - install_name_tool -change @rpath/libtorch.dylib $out/lib/libtorch.dylib $out/lib/libshm.dylib 102 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libshm.dylib 103 - 104 - install_name_tool -change @rpath/libiomp5.dylib $out/lib/libiomp5.dylib $out/lib/libtorch_global_deps.dylib 105 - install_name_tool -change @rpath/libtorch_cpu.dylib $out/lib/libtorch_cpu.dylib $out/lib/libtorch_global_deps.dylib 106 ''; 107 108 outputs = [ "out" "dev" ]; 109 110 passthru.tests.cmake = callPackage ./test { 111 inherit cudaSupport; 112 - cudatoolkit = cudatoolkit_11_1; 113 - cudnn = cudnn_cudatoolkit_11_1; 114 }; 115 116 meta = with lib; { 117 description = "C++ API of the PyTorch machine learning framework"; 118 homepage = "https://pytorch.org/"; 119 - license = licenses.unfree; # Includes CUDA and Intel MKL. 120 - maintainers = with maintainers; [ ]; 121 - platforms = with platforms; linux ++ darwin; 122 }; 123 }
··· 2 , stdenv 3 , fetchzip 4 , lib 5 + , libcxx 6 7 , addOpenGLRunpath 8 , patchelf 9 , fixDarwinDylibNames 10 11 , cudaSupport 12 }: 13 14 let ··· 17 # this derivation. However, we should ensure on version bumps 18 # that the CUDA toolkit for `passthru.tests` is still 19 # up-to-date. 20 + version = "1.10.0"; 21 device = if cudaSupport then "cuda" else "cpu"; 22 srcs = import ./binary-hashes.nix version; 23 unavailable = throw "libtorch is not available for this platform"; 24 + libcxx-for-libtorch = if stdenv.hostPlatform.system == "x86_64-darwin" then libcxx else stdenv.cc.cc.lib; 25 in stdenv.mkDerivation { 26 inherit version; 27 pname = "libtorch"; ··· 67 ''} 68 done 69 '' + lib.optionalString stdenv.isDarwin '' 70 + for f in $out/lib/*.dylib; do 71 + otool -L $f 72 + done 73 + for f in $out/lib/*.dylib; do 74 + install_name_tool -id $out/lib/$(basename $f) $f || true 75 + for rpath in $(otool -L $f | grep rpath | awk '{print $1}');do 76 + install_name_tool -change $rpath $out/lib/$(basename $rpath) $f 77 + done 78 + if otool -L $f | grep /usr/lib/libc++ >& /dev/null; then 79 + install_name_tool -change /usr/lib/libc++.1.dylib ${libcxx-for-libtorch.outPath}/lib/libc++.1.0.dylib $f 80 + fi 81 + done 82 + for f in $out/lib/*.dylib; do 83 + otool -L $f 84 + done 85 ''; 86 87 outputs = [ "out" "dev" ]; 88 89 passthru.tests.cmake = callPackage ./test { 90 inherit cudaSupport; 91 }; 92 93 meta = with lib; { 94 description = "C++ API of the PyTorch machine learning framework"; 95 homepage = "https://pytorch.org/"; 96 + # Includes CUDA and Intel MKL, but redistributions of the binary are not limited. 97 + # https://docs.nvidia.com/cuda/eula/index.html 98 + # https://www.intel.com/content/www/us/en/developer/articles/license/onemkl-license-faq.html 99 + license = licenses.bsd3; 100 + maintainers = with maintainers; [ junjihashimoto ]; 101 + platforms = platforms.unix; 102 }; 103 }
+17 -12
pkgs/development/libraries/science/math/libtorch/binary-hashes.nix
··· 1 - version: { 2 - x86_64-darwin-cpu = { 3 - url = "https://download.pytorch.org/libtorch/cpu/libtorch-macos-${version}.zip"; 4 - hash = "sha256-TOJ+iQpqazta46y4IzIbfEGMjz/fz+pRDV8fKqriB6Q="; 5 - }; 6 - x86_64-linux-cpu = { 7 - url = "https://download.pytorch.org/libtorch/cpu/libtorch-cxx11-abi-shared-with-deps-${version}%2Bcpu.zip"; 8 - hash = "sha256-gZMNLCzW3j+eplBqWo6lVvuHS5iRqtMD8NL3MoszsVg="; 9 - }; 10 - x86_64-linux-cuda = { 11 - url = "https://download.pytorch.org/libtorch/cu111/libtorch-cxx11-abi-shared-with-deps-${version}%2Bcu111.zip"; 12 - hash = "sha256-dRu4F8k2SAbtghwrPJNyX0u3tsODCbXfi9EqUdf4xYc="; 13 }; 14 }
··· 1 + version : builtins.getAttr version { 2 + "1.10.0" = { 3 + x86_64-darwin-cpu = { 4 + name = "libtorch-macos-1.10.0.zip"; 5 + url = "https://download.pytorch.org/libtorch/cpu/libtorch-macos-1.10.0.zip"; 6 + hash = "sha256-HSisxHs466c6XwvZEbkV/1kVNBzJOy3uVw9Bh497Vk8="; 7 + }; 8 + x86_64-linux-cpu = { 9 + name = "libtorch-cxx11-abi-shared-with-deps-1.10.0-cpu.zip"; 10 + url = "https://download.pytorch.org/libtorch/cpu/libtorch-cxx11-abi-shared-with-deps-1.10.0%2Bcpu.zip"; 11 + hash = "sha256-wAtA+AZx3HjaFbsrbyfkSXjYM0BP8H5HwCgyHbgJXJ0="; 12 + }; 13 + x86_64-linux-cuda = { 14 + name = "libtorch-cxx11-abi-shared-with-deps-1.10.0-cu113.zip"; 15 + url = "https://download.pytorch.org/libtorch/cu113/libtorch-cxx11-abi-shared-with-deps-1.10.0%2Bcu113.zip"; 16 + hash = "sha256-jPylK4j0V8SEQ8cZU+O22P7kQ28wanIB0GkBzRGyTj8="; 17 + }; 18 }; 19 }
+38
pkgs/development/libraries/science/math/libtorch/prefetch.sh
···
··· 1 + #!/usr/bin/env nix-shell 2 + #!nix-shell -i bash -p nix-prefetch-scripts 3 + 4 + set -eou pipefail 5 + 6 + version=$1 7 + 8 + bucket="https://download.pytorch.org/libtorch" 9 + CUDA_VERSION=cu113 10 + 11 + url_and_key_list=( 12 + "x86_64-darwin-cpu $bucket/cpu/libtorch-macos-${version}.zip libtorch-macos-${version}.zip" 13 + "x86_64-linux-cpu $bucket/cpu/libtorch-cxx11-abi-shared-with-deps-${version}%2Bcpu.zip libtorch-cxx11-abi-shared-with-deps-${version}-cpu.zip" 14 + "x86_64-linux-cuda $bucket/${CUDA_VERSION}/libtorch-cxx11-abi-shared-with-deps-${version}%2B${CUDA_VERSION}.zip libtorch-cxx11-abi-shared-with-deps-${version}-${CUDA_VERSION}.zip" 15 + ) 16 + 17 + hashfile="binary-hashes-$version.nix" 18 + echo " \"$version\" = {" >> $hashfile 19 + 20 + for url_and_key in "${url_and_key_list[@]}"; do 21 + key=$(echo "$url_and_key" | cut -d' ' -f1) 22 + url=$(echo "$url_and_key" | cut -d' ' -f2) 23 + name=$(echo "$url_and_key" | cut -d' ' -f3) 24 + 25 + echo "prefetching ${url}..." 26 + hash=$(nix hash to-sri --type sha256 $(nix-prefetch-url --unpack "$url" --name "$name")) 27 + 28 + echo " $key = {" >> $hashfile 29 + echo " name = \"$name\";" >> $hashfile 30 + echo " url = \"$url\";" >> $hashfile 31 + echo " hash = \"$hash\";" >> $hashfile 32 + echo " };" >> $hashfile 33 + 34 + echo 35 + done 36 + 37 + echo " };" >> $hashfile 38 + echo "done."
+8 -2
pkgs/development/python-modules/pytorch/bin.nix
··· 18 pyVerNoDot = builtins.replaceStrings [ "." ] [ "" ] python.pythonVersion; 19 srcs = import ./binary-hashes.nix version; 20 unsupported = throw "Unsupported system"; 21 - version = "1.9.1"; 22 in buildPythonPackage { 23 inherit version; 24 ··· 59 done 60 ''; 61 62 pythonImportsCheck = [ "torch" ]; 63 64 meta = with lib; { 65 description = "Open source, prototype-to-production deep learning platform"; 66 homepage = "https://pytorch.org/"; 67 changelog = "https://github.com/pytorch/pytorch/releases/tag/v${version}"; 68 - license = licenses.unfree; # Includes CUDA and Intel MKL. 69 platforms = platforms.linux; 70 maintainers = with maintainers; [ junjihashimoto ]; 71 };
··· 18 pyVerNoDot = builtins.replaceStrings [ "." ] [ "" ] python.pythonVersion; 19 srcs = import ./binary-hashes.nix version; 20 unsupported = throw "Unsupported system"; 21 + version = "1.10.0"; 22 in buildPythonPackage { 23 inherit version; 24 ··· 59 done 60 ''; 61 62 + # The wheel-binary is not stripped to avoid the error of `ImportError: libtorch_cuda_cpp.so: ELF load command address/offset not properly aligned.`. 63 + dontStrip = true; 64 + 65 pythonImportsCheck = [ "torch" ]; 66 67 meta = with lib; { 68 description = "Open source, prototype-to-production deep learning platform"; 69 homepage = "https://pytorch.org/"; 70 changelog = "https://github.com/pytorch/pytorch/releases/tag/v${version}"; 71 + # Includes CUDA and Intel MKL, but redistributions of the binary are not limited. 72 + # https://docs.nvidia.com/cuda/eula/index.html 73 + # https://www.intel.com/content/www/us/en/developer/articles/license/onemkl-license-faq.html 74 + license = licenses.bsd3; 75 platforms = platforms.linux; 76 maintainers = with maintainers; [ junjihashimoto ]; 77 };
+10 -10
pkgs/development/python-modules/pytorch/binary-hashes.nix
··· 6 # To add a new version, run "prefetch.sh 'new-version'" to paste the generated file as follows. 7 8 version : builtins.getAttr version { 9 - "1.9.1" = { 10 x86_64-linux-37 = { 11 - name = "torch-1.9.1-cp37-cp37m-linux_x86_64.whl"; 12 - url = "https://download.pytorch.org/whl/cu111/torch-1.9.1%2Bcu111-cp37-cp37m-linux_x86_64.whl"; 13 - hash = "sha256-qzXbbpLX+ZlRv41oAyQRk3guU0n/6vuNzWw+nOieL6s="; 14 }; 15 x86_64-linux-38 = { 16 - name = "torch-1.9.1-cp38-cp38-linux_x86_64.whl"; 17 - url = "https://download.pytorch.org/whl/cu111/torch-1.9.1%2Bcu111-cp38-cp38-linux_x86_64.whl"; 18 - hash = "sha256-JUbcqugax08/iN1LKfXq0ohSpejmbKhbT0by7qMGAzw="; 19 }; 20 x86_64-linux-39 = { 21 - name = "torch-1.9.1-cp39-cp39-linux_x86_64.whl"; 22 - url = "https://download.pytorch.org/whl/cu111/torch-1.9.1%2Bcu111-cp39-cp39-linux_x86_64.whl"; 23 - hash = "sha256-wNLLtR9ZxKkVOTzwbAikM5H83pXyH+aPHVFyfrO4c1M="; 24 }; 25 }; 26 }
··· 6 # To add a new version, run "prefetch.sh 'new-version'" to paste the generated file as follows. 7 8 version : builtins.getAttr version { 9 + "1.10.0" = { 10 x86_64-linux-37 = { 11 + name = "torch-1.10.0-cp37-cp37m-linux_x86_64.whl"; 12 + url = "https://download.pytorch.org/whl/cu113/torch-1.10.0%2Bcu113-cp37-cp37m-linux_x86_64.whl"; 13 + hash = "sha256-KpDbklee2HXSqgrWr1U1nj8EJqUjBWp7SbACw8xtKtg="; 14 }; 15 x86_64-linux-38 = { 16 + name = "torch-1.10.0-cp38-cp38-linux_x86_64.whl"; 17 + url = "https://download.pytorch.org/whl/cu113/torch-1.10.0%2Bcu113-cp38-cp38-linux_x86_64.whl"; 18 + hash = "sha256-zM3cMriUG9A+3in/ChzOLytRETpe4ju4uXkxasIRQYM="; 19 }; 20 x86_64-linux-39 = { 21 + name = "torch-1.10.0-cp39-cp39-linux_x86_64.whl"; 22 + url = "https://download.pytorch.org/whl/cu113/torch-1.10.0%2Bcu113-cp39-cp39-linux_x86_64.whl"; 23 + hash = "sha256-w8UJDh4b5cgDu7ZSvDoKzNH4hiXEyRfvpycNOg+wJOg="; 24 }; 25 }; 26 }
+7 -6
pkgs/development/python-modules/pytorch/prefetch.sh
··· 1 #!/usr/bin/env nix-shell 2 #!nix-shell -i bash -p nix-prefetch-scripts 3 4 version=$1 5 6 - bucket="https://download.pytorch.org/whl/cu111" 7 8 url_and_key_list=( 9 - "x86_64-linux-37 $bucket/torch-${version}%2Bcu111-cp37-cp37m-linux_x86_64.whl torch-${version}-cp37-cp37m-linux_x86_64.whl" 10 - "x86_64-linux-38 $bucket/torch-${version}%2Bcu111-cp38-cp38-linux_x86_64.whl torch-${version}-cp38-cp38-linux_x86_64.whl" 11 - "x86_64-linux-39 $bucket/torch-${version}%2Bcu111-cp39-cp39-linux_x86_64.whl torch-${version}-cp39-cp39-linux_x86_64.whl" 12 ) 13 14 - hashfile=binary-hashes-"$version".nix 15 - rm -f $hashfile 16 echo " \"$version\" = {" >> $hashfile 17 18 for url_and_key in "${url_and_key_list[@]}"; do
··· 1 #!/usr/bin/env nix-shell 2 #!nix-shell -i bash -p nix-prefetch-scripts 3 4 + set -eou pipefail 5 + 6 version=$1 7 8 + bucket="https://download.pytorch.org/whl/cu113" 9 10 url_and_key_list=( 11 + "x86_64-linux-37 $bucket/torch-${version}%2Bcu113-cp37-cp37m-linux_x86_64.whl torch-${version}-cp37-cp37m-linux_x86_64.whl" 12 + "x86_64-linux-38 $bucket/torch-${version}%2Bcu113-cp38-cp38-linux_x86_64.whl torch-${version}-cp38-cp38-linux_x86_64.whl" 13 + "x86_64-linux-39 $bucket/torch-${version}%2Bcu113-cp39-cp39-linux_x86_64.whl torch-${version}-cp39-cp39-linux_x86_64.whl" 14 ) 15 16 + hashfile="binary-hashes-$version.nix" 17 echo " \"$version\" = {" >> $hashfile 18 19 for url_and_key in "${url_and_key_list[@]}"; do
+52
pkgs/development/python-modules/torchaudio/bin.nix
···
··· 1 + { lib 2 + , stdenv 3 + , buildPythonPackage 4 + , fetchurl 5 + , python 6 + , pytorch-bin 7 + , pythonOlder 8 + , pythonAtLeast 9 + }: 10 + 11 + buildPythonPackage rec { 12 + pname = "torchaudio"; 13 + version = "0.10.0"; 14 + format = "wheel"; 15 + 16 + src = 17 + let pyVerNoDot = lib.replaceStrings [ "." ] [ "" ] python.pythonVersion; 18 + unsupported = throw "Unsupported system"; 19 + srcs = (import ./binary-hashes.nix version)."${stdenv.system}-${pyVerNoDot}" or unsupported; 20 + in fetchurl srcs; 21 + 22 + disabled = ! (pythonAtLeast "3.7" && pythonOlder "3.10"); 23 + 24 + propagatedBuildInputs = [ 25 + pytorch-bin 26 + ]; 27 + 28 + # The wheel-binary is not stripped to avoid the error of `ImportError: libtorch_cuda_cpp.so: ELF load command address/offset not properly aligned.`. 29 + dontStrip = true; 30 + 31 + pythonImportsCheck = [ "torchaudio" ]; 32 + 33 + postFixup = '' 34 + # Note: after patchelf'ing, libcudart can still not be found. However, this should 35 + # not be an issue, because PyTorch is loaded before torchvision and brings 36 + # in the necessary symbols. 37 + patchelf --set-rpath "${lib.makeLibraryPath [ stdenv.cc.cc.lib ]}:${pytorch-bin}/${python.sitePackages}/torch/lib:" \ 38 + "$out/${python.sitePackages}/torchaudio/_torchaudio.so" 39 + ''; 40 + 41 + meta = with lib; { 42 + description = "PyTorch audio library"; 43 + homepage = "https://pytorch.org/"; 44 + changelog = "https://github.com/pytorch/audio/releases/tag/v${version}"; 45 + # Includes CUDA and Intel MKL, but redistributions of the binary are not limited. 46 + # https://docs.nvidia.com/cuda/eula/index.html 47 + # https://www.intel.com/content/www/us/en/developer/articles/license/onemkl-license-faq.html 48 + license = licenses.bsd3; 49 + platforms = platforms.linux; 50 + maintainers = with maintainers; [ junjihashimoto ]; 51 + }; 52 + }
+26
pkgs/development/python-modules/torchaudio/binary-hashes.nix
···
··· 1 + # Warning: Need to update at the same time as pytorch-bin 2 + # 3 + # Precompiled wheels can be found at: 4 + # https://download.pytorch.org/whl/torch_stable.html 5 + 6 + # To add a new version, run "prefetch.sh 'new-version'" to paste the generated file as follows. 7 + 8 + version : builtins.getAttr version { 9 + "0.10.0" = { 10 + x86_64-linux-37 = { 11 + name = "torchaudio-0.10.0-cp37-cp37m-linux_x86_64.whl"; 12 + url = "https://download.pytorch.org/whl/cu113/torchaudio-0.10.0%2Bcu113-cp37-cp37m-linux_x86_64.whl"; 13 + hash = "sha256-FspXTTODdkO0nPUJcJm8+vLIvckUa8gRfBPBT9LcKPw="; 14 + }; 15 + x86_64-linux-38 = { 16 + name = "torchaudio-0.10.0-cp38-cp38-linux_x86_64.whl"; 17 + url = "https://download.pytorch.org/whl/cu113/torchaudio-0.10.0%2Bcu113-cp38-cp38-linux_x86_64.whl"; 18 + hash = "sha256-Mf7QdXBSIIWRfT7ACthEwFA1V2ieid8legbMnRQnzqI="; 19 + }; 20 + x86_64-linux-39 = { 21 + name = "torchaudio-0.10.0-cp39-cp39-linux_x86_64.whl"; 22 + url = "https://download.pytorch.org/whl/cu113/torchaudio-0.10.0%2Bcu113-cp39-cp39-linux_x86_64.whl"; 23 + hash = "sha256-LMSGNdmku1iHRy1jCRTTOYcQlRL+Oc9jjZC1nx++skA="; 24 + }; 25 + }; 26 + }
+37
pkgs/development/python-modules/torchaudio/prefetch.sh
···
··· 1 + #!/usr/bin/env nix-shell 2 + #!nix-shell -i bash -p nix-prefetch-scripts 3 + 4 + set -eou pipefail 5 + 6 + version=$1 7 + 8 + bucket="https://download.pytorch.org/whl/cu113" 9 + 10 + url_and_key_list=( 11 + "x86_64-linux-37 $bucket/torchaudio-${version}%2Bcu113-cp37-cp37m-linux_x86_64.whl torchaudio-${version}-cp37-cp37m-linux_x86_64.whl" 12 + "x86_64-linux-38 $bucket/torchaudio-${version}%2Bcu113-cp38-cp38-linux_x86_64.whl torchaudio-${version}-cp38-cp38-linux_x86_64.whl" 13 + "x86_64-linux-39 $bucket/torchaudio-${version}%2Bcu113-cp39-cp39-linux_x86_64.whl torchaudio-${version}-cp39-cp39-linux_x86_64.whl" 14 + ) 15 + 16 + hashfile=binary-hashes-"$version".nix 17 + echo " \"$version\" = {" >> $hashfile 18 + 19 + for url_and_key in "${url_and_key_list[@]}"; do 20 + key=$(echo "$url_and_key" | cut -d' ' -f1) 21 + url=$(echo "$url_and_key" | cut -d' ' -f2) 22 + name=$(echo "$url_and_key" | cut -d' ' -f3) 23 + 24 + echo "prefetching ${url}..." 25 + hash=$(nix hash to-sri --type sha256 `nix-prefetch-url "$url" --name "$name"`) 26 + 27 + echo " $key = {" >> $hashfile 28 + echo " name = \"$name\";" >> $hashfile 29 + echo " url = \"$url\";" >> $hashfile 30 + echo " hash = \"$hash\";" >> $hashfile 31 + echo " };" >> $hashfile 32 + 33 + echo 34 + done 35 + 36 + echo " };" >> $hashfile 37 + echo "done."
+7 -1
pkgs/development/python-modules/torchvision/bin.nix
··· 15 pyVerNoDot = builtins.replaceStrings [ "." ] [ "" ] python.pythonVersion; 16 srcs = import ./binary-hashes.nix version; 17 unsupported = throw "Unsupported system"; 18 - version = "0.10.1"; 19 in buildPythonPackage { 20 inherit version; 21 ··· 36 pytorch-bin 37 ]; 38 39 pythonImportsCheck = [ "torchvision" ]; 40 41 postFixup = let ··· 52 description = "PyTorch vision library"; 53 homepage = "https://pytorch.org/"; 54 changelog = "https://github.com/pytorch/vision/releases/tag/v${version}"; 55 license = licenses.bsd3; 56 platforms = platforms.linux; 57 maintainers = with maintainers; [ junjihashimoto ];
··· 15 pyVerNoDot = builtins.replaceStrings [ "." ] [ "" ] python.pythonVersion; 16 srcs = import ./binary-hashes.nix version; 17 unsupported = throw "Unsupported system"; 18 + version = "0.11.1"; 19 in buildPythonPackage { 20 inherit version; 21 ··· 36 pytorch-bin 37 ]; 38 39 + # The wheel-binary is not stripped to avoid the error of `ImportError: libtorch_cuda_cpp.so: ELF load command address/offset not properly aligned.`. 40 + dontStrip = true; 41 + 42 pythonImportsCheck = [ "torchvision" ]; 43 44 postFixup = let ··· 55 description = "PyTorch vision library"; 56 homepage = "https://pytorch.org/"; 57 changelog = "https://github.com/pytorch/vision/releases/tag/v${version}"; 58 + # Includes CUDA and Intel MKL, but redistributions of the binary are not limited. 59 + # https://docs.nvidia.com/cuda/eula/index.html 60 + # https://www.intel.com/content/www/us/en/developer/articles/license/onemkl-license-faq.html 61 license = licenses.bsd3; 62 platforms = platforms.linux; 63 maintainers = with maintainers; [ junjihashimoto ];
+10 -10
pkgs/development/python-modules/torchvision/binary-hashes.nix
··· 6 # To add a new version, run "prefetch.sh 'new-version'" to paste the generated file as follows. 7 8 version : builtins.getAttr version { 9 - "0.10.1" = { 10 x86_64-linux-37 = { 11 - name = "torchvision-0.10.1-cp37-cp37m-linux_x86_64.whl"; 12 - url = "https://download.pytorch.org/whl/cu111/torchvision-0.10.1%2Bcu111-cp37-cp37m-linux_x86_64.whl"; 13 - hash = "sha256-1MdsCrOLGkYpfbtv011/b6QG+yKaE+O0jUKeUVj2BJY="; 14 }; 15 x86_64-linux-38 = { 16 - name = "torchvision-0.10.1-cp38-cp38-linux_x86_64.whl"; 17 - url = "https://download.pytorch.org/whl/cu111/torchvision-0.10.1%2Bcu111-cp38-cp38-linux_x86_64.whl"; 18 - hash = "sha256-LtmsnNSa9g3tCdjW1jhu7AZlGgfyYIVh5/2R+WwcxSo="; 19 }; 20 x86_64-linux-39 = { 21 - name = "torchvision-0.10.1-cp39-cp39-linux_x86_64.whl"; 22 - url = "https://download.pytorch.org/whl/cu111/torchvision-0.10.1%2Bcu111-cp39-cp39-linux_x86_64.whl"; 23 - hash = "sha256-ZOC/angyiLeOhe+7dAs0W6XlQRKK00T/iI+aBgFNpA0="; 24 }; 25 }; 26 }
··· 6 # To add a new version, run "prefetch.sh 'new-version'" to paste the generated file as follows. 7 8 version : builtins.getAttr version { 9 + "0.11.1" = { 10 x86_64-linux-37 = { 11 + name = "torchvision-0.11.1-cp37-cp37m-linux_x86_64.whl"; 12 + url = "https://download.pytorch.org/whl/cu113/torchvision-0.11.1%2Bcu113-cp37-cp37m-linux_x86_64.whl"; 13 + hash = "sha256-2xKWqWNKqmOMyVJnPfbtF+B9PQ7z4S66J1T3P8EvM0I="; 14 }; 15 x86_64-linux-38 = { 16 + name = "torchvision-0.11.1-cp38-cp38-linux_x86_64.whl"; 17 + url = "https://download.pytorch.org/whl/cu113/torchvision-0.11.1%2Bcu113-cp38-cp38-linux_x86_64.whl"; 18 + hash = "sha256-bFxvJaNEomytXXANHng+oU8YSLGkuO/TSzkoDskkaIE="; 19 }; 20 x86_64-linux-39 = { 21 + name = "torchvision-0.11.1-cp39-cp39-linux_x86_64.whl"; 22 + url = "https://download.pytorch.org/whl/cu113/torchvision-0.11.1%2Bcu113-cp39-cp39-linux_x86_64.whl"; 23 + hash = "sha256-ysN3LmSKR+FVKYGnCGQJqa8lVApVT5rPMO+NHmmazAc="; 24 }; 25 }; 26 }
+7 -6
pkgs/development/python-modules/torchvision/prefetch.sh
··· 1 #!/usr/bin/env nix-shell 2 #!nix-shell -i bash -p nix-prefetch-scripts 3 4 version=$1 5 6 - bucket="https://download.pytorch.org/whl/cu111" 7 8 url_and_key_list=( 9 - "x86_64-linux-37 $bucket/torchvision-${version}%2Bcu111-cp37-cp37m-linux_x86_64.whl torchvision-${version}-cp37-cp37m-linux_x86_64.whl" 10 - "x86_64-linux-38 $bucket/torchvision-${version}%2Bcu111-cp38-cp38-linux_x86_64.whl torchvision-${version}-cp38-cp38-linux_x86_64.whl" 11 - "x86_64-linux-39 $bucket/torchvision-${version}%2Bcu111-cp39-cp39-linux_x86_64.whl torchvision-${version}-cp39-cp39-linux_x86_64.whl" 12 ) 13 14 - hashfile=binary-hashes-"$version".nix 15 - rm -f $hashfile 16 echo " \"$version\" = {" >> $hashfile 17 18 for url_and_key in "${url_and_key_list[@]}"; do
··· 1 #!/usr/bin/env nix-shell 2 #!nix-shell -i bash -p nix-prefetch-scripts 3 4 + set -eou pipefail 5 + 6 version=$1 7 8 + bucket="https://download.pytorch.org/whl/cu113" 9 10 url_and_key_list=( 11 + "x86_64-linux-37 $bucket/torchvision-${version}%2Bcu113-cp37-cp37m-linux_x86_64.whl torchvision-${version}-cp37-cp37m-linux_x86_64.whl" 12 + "x86_64-linux-38 $bucket/torchvision-${version}%2Bcu113-cp38-cp38-linux_x86_64.whl torchvision-${version}-cp38-cp38-linux_x86_64.whl" 13 + "x86_64-linux-39 $bucket/torchvision-${version}%2Bcu113-cp39-cp39-linux_x86_64.whl torchvision-${version}-cp39-cp39-linux_x86_64.whl" 14 ) 15 16 + hashfile="binary-hashes-$version.nix" 17 echo " \"$version\" = {" >> $hashfile 18 19 for url_and_key in "${url_and_key_list[@]}"; do
+2
pkgs/top-level/python-packages.nix
··· 9344 9345 toposort = callPackage ../development/python-modules/toposort { }; 9346 9347 torchgpipe = callPackage ../development/python-modules/torchgpipe { }; 9348 9349 torchvision = callPackage ../development/python-modules/torchvision { };
··· 9344 9345 toposort = callPackage ../development/python-modules/toposort { }; 9346 9347 + torchaudio-bin = callPackage ../development/python-modules/torchaudio/bin.nix { }; 9348 + 9349 torchgpipe = callPackage ../development/python-modules/torchgpipe { }; 9350 9351 torchvision = callPackage ../development/python-modules/torchvision { };