at 23.11-beta 211 lines 5.9 kB view raw
1{ stdenv 2, lib 3, fetchFromGitHub 4, fetchFromGitLab 5, fetchpatch 6, fetchurl 7, Foundation 8, abseil-cpp 9, cmake 10, libpng 11, nlohmann_json 12, nsync 13, pkg-config 14, python3Packages 15, re2 16, zlib 17, microsoft-gsl 18, iconv 19, gtest 20, protobuf_21 21, pythonSupport ? true 22}: 23 24 25let 26 howard-hinnant-date = fetchFromGitHub { 27 owner = "HowardHinnant"; 28 repo = "date"; 29 rev = "v2.4.1"; 30 sha256 = "sha256-BYL7wxsYRI45l8C3VwxYIIocn5TzJnBtU0UZ9pHwwZw="; 31 }; 32 33 eigen = fetchFromGitLab { 34 owner = "libeigen"; 35 repo = "eigen"; 36 rev = "d10b27fe37736d2944630ecd7557cefa95cf87c9"; 37 sha256 = "sha256-Lmco0s9gIm9sIw7lCr5Iewye3RmrHEE4HLfyzRkQCm0="; 38 }; 39 40 mp11 = fetchFromGitHub { 41 owner = "boostorg"; 42 repo = "mp11"; 43 rev = "boost-1.79.0"; 44 sha256 = "sha256-ZxgPDLvpISrjpEHKpLGBowRKGfSwTf6TBfJD18yw+LM="; 45 }; 46 47 safeint = fetchFromGitHub { 48 owner = "dcleblanc"; 49 repo = "safeint"; 50 rev = "ff15c6ada150a5018c5ef2172401cb4529eac9c0"; 51 sha256 = "sha256-PK1ce4C0uCR4TzLFg+elZdSk5DdPCRhhwT3LvEwWnPU="; 52 }; 53 54 pytorch_cpuinfo = fetchFromGitHub { 55 owner = "pytorch"; 56 repo = "cpuinfo"; 57 # There are no tags in the repository 58 rev = "5916273f79a21551890fd3d56fc5375a78d1598d"; 59 sha256 = "sha256-nXBnloVTuB+AVX59VDU/Wc+Dsx94o92YQuHp3jowx2A="; 60 }; 61 62 flatbuffers = fetchFromGitHub { 63 owner = "google"; 64 repo = "flatbuffers"; 65 rev = "v1.12.0"; 66 sha256 = "sha256-L1B5Y/c897Jg9fGwT2J3+vaXsZ+lfXnskp8Gto1p/Tg="; 67 }; 68 69 gtest' = gtest.overrideAttrs (oldAttrs: rec { 70 version = "1.13.0"; 71 src = fetchFromGitHub { 72 owner = "google"; 73 repo = "googletest"; 74 rev = "v${version}"; 75 hash = "sha256-LVLEn+e7c8013pwiLzJiiIObyrlbBHYaioO/SWbItPQ="; 76 }; 77 }); 78in 79stdenv.mkDerivation rec { 80 pname = "onnxruntime"; 81 version = "1.15.1"; 82 83 src = fetchFromGitHub { 84 owner = "microsoft"; 85 repo = "onnxruntime"; 86 rev = "v${version}"; 87 sha256 = "sha256-SnHo2sVACc++fog7Tg6f2LK/Sv/EskFzN7RZS7D113s="; 88 fetchSubmodules = true; 89 }; 90 91 nativeBuildInputs = [ 92 cmake 93 pkg-config 94 python3Packages.python 95 protobuf_21 96 ] ++ lib.optionals pythonSupport (with python3Packages; [ 97 setuptools 98 wheel 99 pip 100 pythonOutputDistHook 101 ]); 102 103 buildInputs = [ 104 libpng 105 zlib 106 nlohmann_json 107 nsync 108 re2 109 microsoft-gsl 110 ] ++ lib.optionals pythonSupport [ 111 python3Packages.numpy 112 python3Packages.pybind11 113 python3Packages.packaging 114 ] ++ lib.optionals stdenv.isDarwin [ 115 Foundation 116 iconv 117 ]; 118 119 nativeCheckInputs = lib.optionals pythonSupport (with python3Packages; [ 120 gtest' 121 pytest 122 sympy 123 onnx 124 ]); 125 126 # TODO: build server, and move .so's to lib output 127 # Python's wheel is stored in a separate dist output 128 outputs = [ "out" "dev" ] ++ lib.optionals pythonSupport [ "dist" ]; 129 130 enableParallelBuilding = true; 131 132 cmakeDir = "../cmake"; 133 134 cmakeFlags = [ 135 "-DABSL_ENABLE_INSTALL=ON" 136 "-DFETCHCONTENT_FULLY_DISCONNECTED=ON" 137 "-DFETCHCONTENT_QUIET=OFF" 138 "-DFETCHCONTENT_SOURCE_DIR_ABSEIL_CPP=${abseil-cpp.src}" 139 "-DFETCHCONTENT_SOURCE_DIR_DATE=${howard-hinnant-date}" 140 "-DFETCHCONTENT_SOURCE_DIR_EIGEN=${eigen}" 141 "-DFETCHCONTENT_SOURCE_DIR_FLATBUFFERS=${flatbuffers}" 142 "-DFETCHCONTENT_SOURCE_DIR_GOOGLE_NSYNC=${nsync.src}" 143 "-DFETCHCONTENT_SOURCE_DIR_MP11=${mp11}" 144 "-DFETCHCONTENT_SOURCE_DIR_ONNX=${python3Packages.onnx.src}" 145 "-DFETCHCONTENT_SOURCE_DIR_PYTORCH_CPUINFO=${pytorch_cpuinfo}" 146 "-DFETCHCONTENT_SOURCE_DIR_RE2=${re2.src}" 147 "-DFETCHCONTENT_SOURCE_DIR_SAFEINT=${safeint}" 148 "-DFETCHCONTENT_TRY_FIND_PACKAGE_MODE=ALWAYS" 149 "-Donnxruntime_BUILD_SHARED_LIB=ON" 150 "-Donnxruntime_BUILD_UNIT_TESTS=ON" 151 "-Donnxruntime_ENABLE_LTO=ON" 152 "-Donnxruntime_USE_FULL_PROTOBUF=OFF" 153 ] ++ lib.optionals pythonSupport [ 154 "-Donnxruntime_ENABLE_PYTHON=ON" 155 ]; 156 157 env = lib.optionalAttrs stdenv.cc.isClang { 158 NIX_CFLAGS_COMPILE = toString [ 159 "-Wno-error=deprecated-declarations" 160 "-Wno-error=unused-but-set-variable" 161 ]; 162 }; 163 164 doCheck = true; 165 166 postPatch = '' 167 substituteInPlace cmake/libonnxruntime.pc.cmake.in \ 168 --replace '$'{prefix}/@CMAKE_INSTALL_ @CMAKE_INSTALL_ 169 '' + lib.optionalString (stdenv.hostPlatform.system == "aarch64-linux") '' 170 # https://github.com/NixOS/nixpkgs/pull/226734#issuecomment-1663028691 171 rm -v onnxruntime/test/optimizer/nhwc_transformer_test.cc 172 ''; 173 174 postBuild = lib.optionalString pythonSupport '' 175 python ../setup.py bdist_wheel 176 ''; 177 178 postInstall = '' 179 # perform parts of `tools/ci_build/github/linux/copy_strip_binary.sh` 180 install -m644 -Dt $out/include \ 181 ../include/onnxruntime/core/framework/provider_options.h \ 182 ../include/onnxruntime/core/providers/cpu/cpu_provider_factory.h \ 183 ../include/onnxruntime/core/session/onnxruntime_*.h 184 ''; 185 186 passthru = { 187 protobuf = protobuf_21; 188 tests = lib.optionalAttrs pythonSupport { 189 python = python3Packages.onnxruntime; 190 }; 191 }; 192 193 meta = with lib; { 194 description = "Cross-platform, high performance scoring engine for ML models"; 195 longDescription = '' 196 ONNX Runtime is a performance-focused complete scoring engine 197 for Open Neural Network Exchange (ONNX) models, with an open 198 extensible architecture to continually address the latest developments 199 in AI and Deep Learning. ONNX Runtime stays up to date with the ONNX 200 standard with complete implementation of all ONNX operators, and 201 supports all ONNX releases (1.2+) with both future and backwards 202 compatibility. 203 ''; 204 homepage = "https://github.com/microsoft/onnxruntime"; 205 changelog = "https://github.com/microsoft/onnxruntime/releases/tag/v${version}"; 206 # https://github.com/microsoft/onnxruntime/blob/master/BUILD.md#architectures 207 platforms = platforms.unix; 208 license = licenses.mit; 209 maintainers = with maintainers; [ jonringer puffnfresh ck3d cbourjau ]; 210 }; 211}