3 , hatch-fancy-pypi-readme
33 , pytest-rerunfailures
47 buildPythonPackage rec {
48 inherit (openllm-core) src version;
52 disabled = pythonOlder "3.8";
54 sourceRoot = "${src.name}/openllm-python";
61 # remove cuda-python as it has an unfree license
66 hatch-fancy-pypi-readme
86 ] ++ bentoml.optional-dependencies.io
87 ++ tabulate.optional-dependencies.widechars
88 ++ transformers.optional-dependencies.tokenizers
89 ++ transformers.optional-dependencies.torch;
91 optional-dependencies = {
96 ] ++ transformers.optional-dependencies.agents;
123 ]; # ++ autogptq.optional-dependencies.triton;
126 ] ++ bentoml.optional-dependencies.grpc;
133 ] ++ openai.optional-dependencies.datalib;
147 full = with optional-dependencies; (
148 agents ++ awq ++ baichuan ++ chatglm ++ ctranslate ++ falcon ++ fine-tune ++ ggml ++ gptq ++ mpt
149 # disambiguate between derivation input and passthru field
150 ++ optional-dependencies.openai
151 ++ playground ++ starcoder ++ vllm
153 all = optional-dependencies.full;
156 nativeCheckInputs = [
169 # skip GPUs test on CI
170 export GITHUB_ACTIONS=1
171 # disable hypothesis' deadline
175 disabledTestPaths = [
176 # require network access
181 # incompatible with recent TypedDict
182 # https://github.com/bentoml/OpenLLM/blob/f3fd32d596253ae34c68e2e9655f19f40e05f666/openllm-python/tests/configuration_test.py#L18-L21
183 "test_missing_default"
186 pythonImportsCheck = [ "openllm" ];
189 description = "Operating LLMs in production";
190 homepage = "https://github.com/bentoml/OpenLLM/tree/main/openllm-python";
191 changelog = "https://github.com/bentoml/OpenLLM/blob/${src.rev}/CHANGELOG.md";
192 license = licenses.asl20;
193 maintainers = with maintainers; [ happysalada natsukium ];