2023-05-07 16:30:43 -07:00
|
|
|
psutil
|
|
|
|
sentencepiece # Required for LLaMA tokenizer.
|
2024-06-16 16:07:01 -07:00
|
|
|
numpy < 2.0.0
|
2024-03-28 22:16:12 -07:00
|
|
|
requests
|
2024-06-19 22:37:33 +08:00
|
|
|
tqdm
|
2024-03-28 22:16:12 -07:00
|
|
|
py-cpuinfo
|
2024-07-24 16:22:16 -04:00
|
|
|
transformers >= 4.43.2 # Required for Chameleon and Llama 3.1 hotfox.
|
2024-04-18 14:36:39 -07:00
|
|
|
tokenizers >= 0.19.1 # Required for Llama 3.
|
2024-08-16 01:03:01 +08:00
|
|
|
protobuf # Required by LlamaTokenizer.
|
2023-05-07 16:30:43 -07:00
|
|
|
fastapi
|
2024-05-15 19:13:36 -04:00
|
|
|
aiohttp
|
2024-09-04 17:35:36 -07:00
|
|
|
openai >= 1.40.0 # Ensure modern openai package (ensure types module present)
|
2023-09-27 21:41:36 -03:00
|
|
|
uvicorn[standard]
|
2024-08-21 14:28:21 +08:00
|
|
|
pydantic >= 2.8 # Required for OpenAI server.
|
2024-06-03 13:56:41 +08:00
|
|
|
pillow # Required for image processing
|
2024-02-28 13:38:26 +08:00
|
|
|
prometheus_client >= 0.18.0
|
2024-04-29 01:59:33 +03:00
|
|
|
prometheus-fastapi-instrumentator >= 7.0.0
|
2024-05-17 14:58:52 -04:00
|
|
|
tiktoken >= 0.6.0 # Required for DBRX tokenizer
|
2024-08-11 15:11:50 +03:00
|
|
|
lm-format-enforcer == 0.10.6
|
2024-07-11 12:37:11 +08:00
|
|
|
outlines >= 0.0.43, < 0.1 # Requires torch >= 2.1.0
|
2024-08-09 10:39:41 +08:00
|
|
|
typing_extensions >= 4.10
|
2024-04-14 21:50:08 -07:00
|
|
|
filelock >= 3.10.4 # filelock starts to support `mode` argument from 3.10.4
|
2024-09-04 15:18:13 -05:00
|
|
|
partial-json-parser # used for parsing partial JSON outputs
|
2024-07-09 18:49:11 -07:00
|
|
|
pyzmq
|
2024-08-18 17:57:20 -07:00
|
|
|
msgspec
|
2024-08-06 07:54:23 +08:00
|
|
|
gguf == 0.9.1
|
2024-08-13 16:24:17 -07:00
|
|
|
importlib_metadata
|
2024-08-27 14:40:02 +02:00
|
|
|
mistral_common >= 1.3.4
|
2024-09-01 14:46:57 -07:00
|
|
|
pyyaml
|
2024-09-07 14:03:16 -06:00
|
|
|
six>=1.16.0; python_version > '3.11' # transitive dependency of pandas that needs to be the latest version for python 3.12
|