nixpkgs mirror (for testing) github.com/NixOS/nixpkgs
nix
at python-updates 167 lines 3.9 kB view raw
1{ 2 lib, 3 aiohttp, 4 aiosqlite, 5 banks, 6 buildPythonPackage, 7 dataclasses-json, 8 deprecated, 9 dirtyjson, 10 fetchFromGitHub, 11 filetype, 12 fsspec, 13 hatchling, 14 jsonpath-ng, 15 llama-index-workflows, 16 llamaindex-py-client, 17 nest-asyncio, 18 networkx, 19 nltk-data, 20 nltk, 21 numpy, 22 openai, 23 pandas, 24 pillow, 25 pytest-asyncio, 26 pytest-mock, 27 pytestCheckHook, 28 pyvis, 29 pyyaml, 30 requests, 31 spacy, 32 sqlalchemy, 33 tenacity, 34 tiktoken, 35 tree-sitter, 36 typing-inspect, 37}: 38 39buildPythonPackage rec { 40 pname = "llama-index-core"; 41 version = "0.14.12"; 42 pyproject = true; 43 44 src = fetchFromGitHub { 45 owner = "run-llama"; 46 repo = "llama_index"; 47 tag = "v${version}"; 48 hash = "sha256-grF9IToAMc3x5/40+u3lHU9RyjROWu1e3M6N1owq0f4="; 49 }; 50 51 sourceRoot = "${src.name}/${pname}"; 52 53 # When `llama-index` is imported, it uses `nltk` to look for the following files and tries to 54 # download them if they aren't present. 55 # https://github.com/run-llama/llama_index/blob/6efa53cebd5c8ccf363582c932fffde44d61332e/llama-index-core/llama_index/core/utils.py#L59-L67 56 # Setting `NLTK_DATA` to a writable path can also solve this problem, but it needs to be done in 57 # every package that depends on `llama-index-core` for `pythonImportsCheck` not to fail, so this 58 # solution seems more elegant. 59 postPatch = '' 60 mkdir -p llama_index/core/_static/nltk_cache/corpora/stopwords/ 61 cp -r ${nltk-data.stopwords}/corpora/stopwords/* llama_index/core/_static/nltk_cache/corpora/stopwords/ 62 63 mkdir -p llama_index/core/_static/nltk_cache/tokenizers/punkt/ 64 cp -r ${nltk-data.punkt}/tokenizers/punkt/* llama_index/core/_static/nltk_cache/tokenizers/punkt/ 65 ''; 66 67 pythonRelaxDeps = [ 68 "setuptools" 69 "tenacity" 70 ]; 71 72 build-system = [ hatchling ]; 73 74 dependencies = [ 75 aiohttp 76 aiosqlite 77 banks 78 dataclasses-json 79 deprecated 80 dirtyjson 81 filetype 82 fsspec 83 jsonpath-ng 84 llama-index-workflows 85 llamaindex-py-client 86 nest-asyncio 87 networkx 88 nltk 89 numpy 90 openai 91 pandas 92 pillow 93 pyvis 94 pyyaml 95 requests 96 spacy 97 sqlalchemy 98 tenacity 99 tiktoken 100 typing-inspect 101 ]; 102 103 nativeCheckInputs = [ 104 tree-sitter 105 pytest-asyncio 106 pytest-mock 107 pytestCheckHook 108 ]; 109 110 pythonImportsCheck = [ "llama_index" ]; 111 112 disabledTestPaths = [ 113 # Tests require network access 114 "tests/agent/" 115 "tests/callbacks/" 116 "tests/chat_engine/" 117 "tests/evaluation/" 118 "tests/indices/" 119 "tests/ingestion/" 120 "tests/memory/" 121 "tests/node_parser/" 122 "tests/objects/" 123 "tests/playground/" 124 "tests/postprocessor/" 125 "tests/query_engine/" 126 "tests/question_gen/" 127 "tests/response_synthesizers/" 128 "tests/retrievers/" 129 "tests/selectors/" 130 "tests/test_utils.py" 131 "tests/text_splitter/" 132 "tests/token_predictor/" 133 "tests/tools/" 134 "tests/schema/" 135 "tests/multi_modal_llms/" 136 ]; 137 138 disabledTests = [ 139 # Tests require network access 140 "test_context_extraction_basic" 141 "test_context_extraction_custom_prompt" 142 "test_context_extraction_oversized_document" 143 "test_document_block_from_b64" 144 "test_document_block_from_bytes" 145 "test_document_block_from_path" 146 "test_document_block_from_url" 147 "test_from_namespaced_persist_dir" 148 "test_from_persist_dir" 149 "test_mimetype_raw_data" 150 "test_multiple_documents_context" 151 "test_predict_and_call_via_react_agent" 152 "test_resource" 153 # asyncio.exceptions.InvalidStateError: invalid state 154 "test_workflow_context_to_dict_mid_run" 155 "test_SimpleDirectoryReader" 156 # RuntimeError 157 "test_str" 158 ]; 159 160 meta = { 161 description = "Data framework for your LLM applications"; 162 homepage = "https://github.com/run-llama/llama_index/"; 163 changelog = "https://github.com/run-llama/llama_index/blob/${src.tag}/CHANGELOG.md"; 164 license = lib.licenses.mit; 165 maintainers = with lib.maintainers; [ fab ]; 166 }; 167}