at 24.11-pre 3.8 kB view raw
1{ 2 lib, 3 buildPythonPackage, 4 hatch-fancy-pypi-readme, 5 hatch-vcs, 6 hatchling, 7 pytestCheckHook, 8 pythonOlder, 9 pythonRelaxDepsHook, 10 accelerate, 11 bentoml, 12 bitsandbytes, 13 build, 14 click, 15 ctranslate2, 16 datasets, 17 docker, 18 einops, 19 ghapi, 20 huggingface-hub, 21 hypothesis, 22 ipython, 23 jupyter, 24 jupytext, 25 nbformat, 26 notebook, 27 openai, 28 openllm-client, 29 openllm-core, 30 optimum, 31 peft, 32 pytest-mock, 33 pytest-randomly, 34 pytest-rerunfailures, 35 pytest-xdist, 36 safetensors, 37 scipy, 38 sentencepiece, 39 soundfile, 40 syrupy, 41 tabulate, 42 tiktoken, 43 transformers, 44 openai-triton, 45 xformers, 46}: 47 48buildPythonPackage rec { 49 inherit (openllm-core) src version; 50 pname = "openllm"; 51 pyproject = true; 52 53 disabled = pythonOlder "3.8"; 54 55 sourceRoot = "${src.name}/openllm-python"; 56 57 nativeBuildInputs = [ pythonRelaxDepsHook ]; 58 59 pythonRemoveDeps = [ 60 # remove cuda-python as it has an unfree license 61 "cuda-python" 62 ]; 63 64 build-system = [ 65 hatch-fancy-pypi-readme 66 hatch-vcs 67 hatchling 68 ]; 69 70 dependencies = 71 [ 72 accelerate 73 bentoml 74 bitsandbytes 75 build 76 click 77 einops 78 ghapi 79 openllm-client 80 openllm-core 81 optimum 82 safetensors 83 scipy 84 sentencepiece 85 transformers 86 ] 87 ++ bentoml.optional-dependencies.io 88 ++ tabulate.optional-dependencies.widechars 89 ++ transformers.optional-dependencies.tokenizers 90 ++ transformers.optional-dependencies.torch; 91 92 optional-dependencies = { 93 agents = [ 94 # diffusers 95 soundfile 96 transformers 97 ] ++ transformers.optional-dependencies.agents; 98 awq = [ 99 # autoawq 100 ]; 101 baichuan = [ 102 # cpm-kernels 103 ]; 104 chatglm = [ 105 # cpm-kernels 106 ]; 107 ctranslate = [ ctranslate2 ]; 108 falcon = [ xformers ]; 109 fine-tune = [ 110 datasets 111 huggingface-hub 112 peft 113 # trl 114 ]; 115 ggml = [ 116 # ctransformers 117 ]; 118 gptq = [ 119 # auto-gptq 120 ]; # ++ autogptq.optional-dependencies.triton; 121 grpc = [ bentoml ] ++ bentoml.optional-dependencies.grpc; 122 mpt = [ openai-triton ]; 123 openai = [ 124 openai 125 tiktoken 126 ] ++ openai.optional-dependencies.datalib; 127 playground = [ 128 ipython 129 jupyter 130 jupytext 131 nbformat 132 notebook 133 ]; 134 starcoder = [ bitsandbytes ]; 135 vllm = [ 136 # vllm 137 ]; 138 full = 139 with optional-dependencies; 140 ( 141 agents 142 ++ awq 143 ++ baichuan 144 ++ chatglm 145 ++ ctranslate 146 ++ falcon 147 ++ fine-tune 148 ++ ggml 149 ++ gptq 150 ++ mpt 151 # disambiguate between derivation input and passthru field 152 ++ optional-dependencies.openai 153 ++ playground 154 ++ starcoder 155 ++ vllm 156 ); 157 all = optional-dependencies.full; 158 }; 159 160 nativeCheckInputs = [ 161 docker 162 hypothesis 163 pytest-mock 164 pytest-randomly 165 pytest-rerunfailures 166 pytest-xdist 167 pytestCheckHook 168 syrupy 169 ]; 170 171 preCheck = '' 172 export HOME=$TMPDIR 173 # skip GPUs test on CI 174 export GITHUB_ACTIONS=1 175 # disable hypothesis' deadline 176 export CI=1 177 ''; 178 179 disabledTestPaths = [ 180 # require network access 181 "tests/models" 182 ]; 183 184 disabledTests = [ 185 # incompatible with recent TypedDict 186 # https://github.com/bentoml/OpenLLM/blob/f3fd32d596253ae34c68e2e9655f19f40e05f666/openllm-python/tests/configuration_test.py#L18-L21 187 "test_missing_default" 188 ]; 189 190 pythonImportsCheck = [ "openllm" ]; 191 192 meta = with lib; { 193 description = "Operating LLMs in production"; 194 homepage = "https://github.com/bentoml/OpenLLM/tree/main/openllm-python"; 195 changelog = "https://github.com/bentoml/OpenLLM/blob/${src.rev}/CHANGELOG.md"; 196 license = licenses.asl20; 197 maintainers = with maintainers; [ 198 happysalada 199 natsukium 200 ]; 201 }; 202}