3 , hatch-fancy-pypi-readme
35 , pytest-rerunfailures
51 buildPythonPackage rec {
52 inherit (openllm-core) src version;
56 disabled = pythonOlder "3.8";
58 sourceRoot = "source/openllm-python";
61 hatch-fancy-pypi-readme
68 # remove cuda-python as it has an unfree license
72 propagatedBuildInputs = [
84 ] ++ bentoml.optional-dependencies.io
85 ++ tabulate.optional-dependencies.widechars
86 ++ transformers.optional-dependencies.tokenizers
87 ++ transformers.optional-dependencies.torch;
89 passthru.optional-dependencies = {
94 ] ++ transformers.optional-dependencies.agents;
127 ]; # ++ autogptq.optional-dependencies.triton;
130 ] ++ openllm-client.optional-dependencies.grpc;
143 ] ++ openai.optional-dependencies.embeddings;
165 full = with passthru.optional-dependencies; (
166 agents ++ baichuan ++ chatglm ++ falcon ++ fine-tune ++ flan-t5 ++ ggml ++ gptq ++ llama ++ mpt ++ openai ++ opt ++ playground ++ starcoder ++ vllm
168 all = passthru.optional-dependencies.full;
171 nativeCheckInputs = [
184 # skip GPUs test on CI
185 export GITHUB_ACTIONS=1
186 # disable hypothesis' deadline
191 # these tests access to huggingface.co
198 pythonImportsCheck = [ "openllm" ];
201 description = "Operating LLMs in production";
202 homepage = "https://github.com/bentoml/OpenLLM/tree/main/openllm-python";
203 changelog = "https://github.com/bentoml/OpenLLM/blob/${src.rev}/CHANGELOG.md";
204 license = licenses.asl20;
205 maintainers = with maintainers; [ happysalada natsukium ];