evcc: 0.131.4 -> 0.131.5
[NixPkgs.git] / pkgs / development / python-modules / llama-index-embeddings-gemini / default.nix
blob7bd5e2d400217f66c97549a7aeebc4210a4caeeb
2   lib,
3   buildPythonPackage,
4   fetchPypi,
5   google-generativeai,
6   llama-index-core,
7   poetry-core,
8   pythonOlder,
9 }:
11 buildPythonPackage rec {
12   pname = "llama-index-embeddings-gemini";
13   version = "0.2.1";
14   pyproject = true;
16   disabled = pythonOlder "3.9";
18   src = fetchPypi {
19     pname = "llama_index_embeddings_gemini";
20     inherit version;
21     hash = "sha256-TA3dm1+NZ0Km9Li9cswzm5fusVYXTRqA9W3U4OgT92I=";
22   };
24   pythonRelaxDeps = [ "google-generativeai" ];
26   build-system = [ poetry-core ];
29   dependencies = [
30     google-generativeai
31     llama-index-core
32   ];
34   # Tests are only available in the mono repo
35   doCheck = false;
37   pythonImportsCheck = [ "llama_index.embeddings.gemini" ];
39   meta = with lib; {
40     description = "LlamaIndex Llms Integration for Gemini";
41     homepage = "https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/embeddings/llama-index-embeddings-gemini";
42     license = licenses.mit;
43     maintainers = with maintainers; [ fab ];
44   };