lol

mxnet: init at 0.11.0

+45
+43
pkgs/applications/science/math/mxnet/default.nix
··· 1 + { stdenv, lib, fetchgit, cmake 2 + , opencv, gtest, openblas, liblapack 3 + , cudaSupport ? false, cudatoolkit 4 + , cudnnSupport ? false, cudnn 5 + }: 6 + 7 + assert cudnnSupport -> cudaSupport; 8 + 9 + stdenv.mkDerivation rec { 10 + name = "mxnet-${version}"; 11 + version = "0.11.0"; 12 + 13 + # Submodules needed 14 + src = fetchgit { 15 + url = "https://github.com/apache/incubator-mxnet"; 16 + rev = "refs/tags/${version}"; 17 + sha256 = "1qgns0c70a1gfyil96h17ms736nwdkp9kv496gvs9pkzqzvr6cpz"; 18 + }; 19 + 20 + nativeBuildInputs = [ cmake ]; 21 + 22 + buildInputs = [ opencv gtest openblas liblapack ] 23 + ++ lib.optional cudaSupport cudatoolkit 24 + ++ lib.optional cudnnSupport cudnn; 25 + 26 + cmakeFlags = lib.optional (!cudaSupport) "-DUSE_CUDA=OFF" 27 + ++ lib.optional (!cudnnSupport) "-DUSE_CUDNN=OFF"; 28 + 29 + installPhase = '' 30 + install -Dm755 libmxnet.so $out/lib/libmxnet.so 31 + cp -r ../include $out 32 + ''; 33 + 34 + enableParallelBuilding = true; 35 + 36 + meta = with stdenv.lib; { 37 + description = "Lightweight, Portable, Flexible Distributed/Mobile Deep Learning with Dynamic, Mutation-aware Dataflow Dep Scheduler"; 38 + homepage = "https://mxnet.incubator.apache.org/"; 39 + maintainers = with maintainers; [ abbradar ]; 40 + license = licenses.asl20; 41 + platforms = platforms.linux; 42 + }; 43 + }
+2
pkgs/top-level/all-packages.nix
··· 18472 18472 sbcl = null; 18473 18473 }; 18474 18474 18475 + mxnet = callPackage ../applications/science/math/mxnet { }; 18476 + 18475 18477 wxmaxima = callPackage ../applications/science/math/wxmaxima { wxGTK = wxGTK30; }; 18476 18478 18477 18479 pari = callPackage ../applications/science/math/pari { tex = texlive.combined.scheme-basic; };