at master 1.4 kB view raw
1{ 2 lib, 3 buildPythonPackage, 4 fetchFromGitHub, 5 anyio, 6 backoff, 7 httpx, 8 idna, 9 langchain, 10 llama-index, 11 openai, 12 opentelemetry-api, 13 opentelemetry-sdk, 14 opentelemetry-exporter-otlp, 15 packaging, 16 poetry-core, 17 pydantic, 18 requests, 19 wrapt, 20}: 21 22buildPythonPackage rec { 23 pname = "langfuse"; 24 version = "3.5.0"; 25 pyproject = true; 26 27 src = fetchFromGitHub { 28 owner = "langfuse"; 29 repo = "langfuse-python"; 30 tag = "v${version}"; 31 hash = "sha256-1qRdx2j5M6eIIzsHeNgGagqhvCb7WBNAmdVeie6bHI4="; 32 }; 33 34 build-system = [ poetry-core ]; 35 36 pythonRelaxDeps = [ "packaging" ]; 37 38 dependencies = [ 39 anyio 40 backoff 41 httpx 42 idna 43 opentelemetry-api 44 opentelemetry-sdk 45 opentelemetry-exporter-otlp 46 packaging 47 pydantic 48 requests 49 wrapt 50 ]; 51 52 optional-dependencies = { 53 langchain = [ langchain ]; 54 llama-index = [ llama-index ]; 55 openai = [ openai ]; 56 }; 57 58 pythonImportsCheck = [ "langfuse" ]; 59 60 # tests require network access and openai api key 61 doCheck = false; 62 63 meta = { 64 description = "Instrument your LLM app with decorators or low-level SDK and get detailed tracing/observability"; 65 homepage = "https://github.com/langfuse/langfuse-python"; 66 changelog = "https://github.com/langfuse/langfuse-python/releases/tag/${src.tag}"; 67 license = lib.licenses.mit; 68 maintainers = with lib.maintainers; [ natsukium ]; 69 }; 70}