Clone of https://github.com/NixOS/nixpkgs.git (to stress-test knotserver)
1{ 2 lib, 3 buildPythonPackage, 4 fetchPypi, 5 hatchling, 6 llama-index-core, 7 llama-index-llms-openai, 8}: 9 10buildPythonPackage rec { 11 pname = "llama-index-multi-modal-llms-openai"; 12 version = "0.5.1"; 13 pyproject = true; 14 15 src = fetchPypi { 16 pname = "llama_index_multi_modal_llms_openai"; 17 inherit version; 18 hash = "sha256-3zr/AMNgI8X4xJ+XKjJfcYI+0PTdnNR5lV12r8FGV18="; 19 }; 20 21 build-system = [ hatchling ]; 22 23 dependencies = [ 24 llama-index-core 25 llama-index-llms-openai 26 ]; 27 28 # Tests are only available in the mono repo 29 doCheck = false; 30 31 pythonImportsCheck = [ "llama_index.multi_modal_llms.openai" ]; 32 33 meta = with lib; { 34 description = "LlamaIndex Multi-Modal-Llms Integration for OpenAI"; 35 homepage = "https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-openai"; 36 license = licenses.mit; 37 maintainers = with maintainers; [ fab ]; 38 }; 39}