1{
2 lib,
3 aiohttp,
4 apscheduler,
5 azure-identity,
6 azure-keyvault-secrets,
7 backoff,
8 buildPythonPackage,
9 click,
10 fastapi,
11 fastapi-sso,
12 fetchFromGitHub,
13 google-cloud-kms,
14 gunicorn,
15 importlib-metadata,
16 jinja2,
17 openai,
18 orjson,
19 poetry-core,
20 prisma,
21 pyjwt,
22 python-dotenv,
23 python-multipart,
24 pythonOlder,
25 pyyaml,
26 requests,
27 resend,
28 rq,
29 streamlit,
30 tiktoken,
31 tokenizers,
32 uvicorn,
33}:
34
35buildPythonPackage rec {
36 pname = "litellm";
37 version = "1.37.16";
38 pyproject = true;
39
40 disabled = pythonOlder "3.8";
41
42 src = fetchFromGitHub {
43 owner = "BerriAI";
44 repo = "litellm";
45 rev = "refs/tags/v${version}";
46 hash = "sha256-WOkblyzncIn1F67qlh8rTosCal6j4zlXsHHrWbwhJOo=";
47 };
48
49 postPatch = ''
50 rm -rf dist
51 '';
52
53 build-system = [ poetry-core ];
54
55 dependencies = [
56 aiohttp
57 click
58 importlib-metadata
59 jinja2
60 openai
61 requests
62 python-dotenv
63 tiktoken
64 tokenizers
65 ];
66
67 passthru.optional-dependencies = {
68 proxy = [
69 apscheduler
70 backoff
71 fastapi
72 fastapi-sso
73 gunicorn
74 orjson
75 pyjwt
76 python-multipart
77 pyyaml
78 rq
79 uvicorn
80 ];
81 extra_proxy = [
82 azure-identity
83 azure-keyvault-secrets
84 google-cloud-kms
85 prisma
86 resend
87 streamlit
88 ];
89 };
90
91 # the import check phase fails trying to do a network request to openai
92 # pythonImportsCheck = [ "litellm" ];
93
94 # no tests
95 doCheck = false;
96
97 meta = with lib; {
98 description = "Use any LLM as a drop in replacement for gpt-3.5-turbo. Use Azure, OpenAI, Cohere, Anthropic, Ollama, VLLM, Sagemaker, HuggingFace, Replicate (100+ LLMs)";
99 mainProgram = "litellm";
100 homepage = "https://github.com/BerriAI/litellm";
101 changelog = "https://github.com/BerriAI/litellm/releases/tag/v${version}";
102 license = licenses.mit;
103 maintainers = with maintainers; [ happysalada ];
104 };
105}