nixpkgs mirror (for testing) github.com/NixOS/nixpkgs
nix
at python-updates 233 lines 4.0 kB view raw
1{ 2 lib, 3 buildPythonPackage, 4 fetchFromGitHub, 5 6 # build-system 7 setuptools, 8 9 # dependencies 10 filelock, 11 huggingface-hub, 12 numpy, 13 protobuf, 14 packaging, 15 pyyaml, 16 regex, 17 safetensors, 18 tokenizers, 19 tqdm, 20 typer-slim, 21 22 # optional-dependencies 23 # sklearn 24 scikit-learn, 25 # torch 26 torch, 27 accelerate, 28 # hf_xet 29 hf-xet, 30 # retrieval 31 faiss, 32 datasets, 33 # tokenizers 34 # ftfy 35 ftfy, 36 # modelcreation 37 cookiecutter, 38 # sagemaker 39 sagemaker, 40 # optuna 41 optuna, 42 # ray 43 ray, 44 # hub-kernels 45 kernels, 46 # serving 47 openai, 48 pydantic, 49 uvicorn, 50 fastapi, 51 starlette, 52 rich, 53 # audio 54 librosa, 55 phonemizer, 56 # speech 57 torchaudio, 58 # vision 59 pillow, 60 # timm 61 timm, 62 # torch-vision 63 torchvision, 64 # video 65 av, 66 # num2words 67 num2words, 68 # sentencepiece 69 sentencepiece, 70 # tiktoken 71 tiktoken, 72 blobfile, 73 # mistral-common 74 mistral-common, 75 # chat_template 76 jinja2, 77 jmespath, 78 # quality 79 ruff, 80 gitpython, 81 urllib3, 82 libcst, 83 pandas, 84 # torchhub 85 importlib-metadata, 86}: 87 88buildPythonPackage (finalAttrs: { 89 pname = "transformers"; 90 version = "5.0.0"; 91 pyproject = true; 92 93 src = fetchFromGitHub { 94 owner = "huggingface"; 95 repo = "transformers"; 96 tag = "v${finalAttrs.version}"; 97 hash = "sha256-ART1ARd+hfC0GQNDa225SWF0zTFUKE4eDxFYbWFaTl8="; 98 }; 99 100 build-system = [ setuptools ]; 101 102 dependencies = [ 103 filelock 104 huggingface-hub 105 numpy 106 packaging 107 pyyaml 108 regex 109 safetensors 110 tokenizers 111 tqdm 112 typer-slim 113 ]; 114 115 optional-dependencies = lib.fix (self: { 116 ja = [ 117 # fugashi 118 # ipadic 119 # unidic_lite 120 # unidic 121 # sudachipy 122 # sudachidict_core 123 # rhoknp 124 ]; 125 sklearn = [ scikit-learn ]; 126 torch = [ 127 torch 128 accelerate 129 ]; 130 accelerate = [ accelerate ]; 131 hf_xet = [ hf-xet ]; 132 retrieval = [ 133 faiss 134 datasets 135 ]; 136 tokenizers = [ tokenizers ]; 137 ftfy = [ ftfy ]; 138 modelcreation = [ cookiecutter ]; 139 sagemaker = [ sagemaker ]; 140 deepspeed = [ 141 # deepspeed 142 ] 143 ++ self.accelerate; 144 optuna = [ optuna ]; 145 ray = [ ray ] ++ ray.optional-dependencies.tune; 146 hub-kernels = [ kernels ]; 147 integrations = self.hub-kernels ++ self.optuna ++ self.ray; 148 serving = [ 149 openai 150 pydantic 151 uvicorn 152 fastapi 153 starlette 154 rich 155 ] 156 ++ self.torch; 157 audio = [ 158 librosa 159 # pyctcdecode 160 phonemizer 161 # kenlm 162 ]; 163 speech = [ torchaudio ] ++ self.audio; 164 torch-speech = [ torchaudio ] ++ self.audio; 165 vision = [ pillow ]; 166 timm = [ timm ]; 167 torch-vision = [ torchvision ] ++ self.vision; 168 natten = [ 169 # natten 170 ]; 171 codecarbon = [ 172 # codecarbon 173 ]; 174 video = [ av ]; 175 num2words = [ num2words ]; 176 sentencepiece = [ 177 sentencepiece 178 protobuf 179 ]; 180 tiktoken = [ 181 tiktoken 182 blobfile 183 ]; 184 mistral-common = [ mistral-common ] ++ mistral-common.optional-dependencies.image; 185 chat_template = [ 186 jinja2 187 jmespath 188 ]; 189 quality = [ 190 datasets 191 ruff 192 gitpython 193 urllib3 194 libcst 195 rich 196 pandas 197 ]; 198 torchhub = [ 199 filelock 200 huggingface-hub 201 importlib-metadata 202 numpy 203 packaging 204 protobuf 205 regex 206 sentencepiece 207 torch 208 tokenizers 209 tqdm 210 ]; 211 benchmark = [ 212 # optimum-benchmark 213 ]; 214 }); 215 216 # Many tests require internet access. 217 doCheck = false; 218 219 pythonImportsCheck = [ "transformers" ]; 220 221 meta = { 222 homepage = "https://github.com/huggingface/transformers"; 223 description = "Natural Language Processing for TensorFlow 2.0 and PyTorch"; 224 mainProgram = "transformers-cli"; 225 changelog = "https://github.com/huggingface/transformers/releases/tag/${finalAttrs.src.tag}"; 226 license = lib.licenses.asl20; 227 platforms = lib.platforms.unix; 228 maintainers = with lib.maintainers; [ 229 pashashocky 230 happysalada 231 ]; 232 }; 233})