evcc: 0.131.4 -> 0.131.5
[NixPkgs.git] / pkgs / development / python-modules / llama-index-core / default.nix
blobbe4eef72fa7ecba8c08252f8e22b695685b3ac2d
2   lib,
3   aiohttp,
4   buildPythonPackage,
5   dataclasses-json,
6   deprecated,
7   dirtyjson,
8   fetchFromGitHub,
9   fsspec,
10   jsonpath-ng,
11   llamaindex-py-client,
12   nest-asyncio,
13   networkx,
14   nltk,
15   nltk-data,
16   numpy,
17   openai,
18   pandas,
19   pillow,
20   poetry-core,
21   pytest-asyncio,
22   pytest-mock,
23   pytestCheckHook,
24   pythonOlder,
25   pyvis,
26   pyyaml,
27   requests,
28   spacy,
29   sqlalchemy,
30   tenacity,
31   tiktoken,
32   tree-sitter,
33   typing-inspect,
36 buildPythonPackage rec {
37   pname = "llama-index-core";
38   version = "0.11.20";
39   pyproject = true;
41   disabled = pythonOlder "3.8";
43   src = fetchFromGitHub {
44     owner = "run-llama";
45     repo = "llama_index";
46     rev = "refs/tags/v${version}";
47     hash = "sha256-r4xedtxoYv6CcxtDrgwau9LY3kOBg3jXlQm1g59L7x4=";
48   };
50   sourceRoot = "${src.name}/${pname}";
52   # When `llama-index` is imported, it uses `nltk` to look for the following files and tries to
53   # download them if they aren't present.
54   # https://github.com/run-llama/llama_index/blob/6efa53cebd5c8ccf363582c932fffde44d61332e/llama-index-core/llama_index/core/utils.py#L59-L67
55   # Setting `NLTK_DATA` to a writable path can also solve this problem, but it needs to be done in
56   # every package that depends on `llama-index-core` for `pythonImportsCheck` not to fail, so this
57   # solution seems more elegant.
58   postPatch = ''
59     mkdir -p llama_index/core/_static/nltk_cache/corpora/stopwords/
60     cp -r ${nltk-data.stopwords}/corpora/stopwords/* llama_index/core/_static/nltk_cache/corpora/stopwords/
62     mkdir -p llama_index/core/_static/nltk_cache/tokenizers/punkt/
63     cp -r ${nltk-data.punkt}/tokenizers/punkt/* llama_index/core/_static/nltk_cache/tokenizers/punkt/
64   '';
66   pythonRelaxDeps = [ "tenacity" ];
68   build-system = [ poetry-core ];
70   dependencies = [
71     aiohttp
72     dataclasses-json
73     deprecated
74     dirtyjson
75     fsspec
76     jsonpath-ng
77     llamaindex-py-client
78     nest-asyncio
79     networkx
80     nltk
81     numpy
82     openai
83     pandas
84     pillow
85     pyvis
86     pyyaml
87     requests
88     spacy
89     sqlalchemy
90     tenacity
91     tiktoken
92     typing-inspect
93   ];
95   nativeCheckInputs = [
96     tree-sitter
97     pytest-asyncio
98     pytest-mock
99     pytestCheckHook
100   ];
102   pythonImportsCheck = [ "llama_index" ];
104   disabledTestPaths = [
105     # Tests require network access
106     "tests/agent/"
107     "tests/callbacks/"
108     "tests/chat_engine/"
109     "tests/evaluation/"
110     "tests/indices/"
111     "tests/ingestion/"
112     "tests/memory/"
113     "tests/node_parser/"
114     "tests/objects/"
115     "tests/playground/"
116     "tests/postprocessor/"
117     "tests/query_engine/"
118     "tests/question_gen/"
119     "tests/response_synthesizers/"
120     "tests/retrievers/"
121     "tests/selectors/"
122     "tests/test_utils.py"
123     "tests/text_splitter/"
124     "tests/token_predictor/"
125     "tests/tools/"
126   ];
128   disabledTests = [
129     # Tests require network access
130     "test_from_namespaced_persist_dir"
131     "test_from_persist_dir"
132     # asyncio.exceptions.InvalidStateError: invalid state
133     "test_workflow_context_to_dict_mid_run"
134   ];
136   meta = with lib; {
137     description = "Data framework for your LLM applications";
138     homepage = "https://github.com/run-llama/llama_index/";
139     changelog = "https://github.com/run-llama/llama_index/blob/${version}/CHANGELOG.md";
140     license = licenses.mit;
141     maintainers = with maintainers; [ fab ];
142   };