2023-05-13 08:45:17 +08:00
|
|
|
|
pymupdf
|
|
|
|
|
|
paddlepaddle==2.4.2
|
2023-05-21 22:27:02 +08:00
|
|
|
|
paddleocr~=2.6.1.3
|
2023-05-20 01:58:08 +08:00
|
|
|
|
langchain==0.0.174
|
2023-05-17 13:44:39 +08:00
|
|
|
|
transformers==4.29.1
|
2023-04-14 01:06:13 +08:00
|
|
|
|
unstructured[local-inference]
|
|
|
|
|
|
layoutparser[layoutmodels,tesseract]
|
2023-05-21 22:27:02 +08:00
|
|
|
|
nltk~=3.8.1
|
2023-04-14 01:06:13 +08:00
|
|
|
|
sentence-transformers
|
|
|
|
|
|
beautifulsoup4
|
|
|
|
|
|
icetk
|
|
|
|
|
|
cpm_kernels
|
|
|
|
|
|
faiss-cpu
|
2023-05-06 21:38:15 +08:00
|
|
|
|
gradio==3.28.3
|
2023-05-21 22:27:02 +08:00
|
|
|
|
fastapi~=0.95.0
|
|
|
|
|
|
uvicorn~=0.21.1
|
|
|
|
|
|
pypinyin~=0.48.0
|
2023-05-15 19:13:12 +08:00
|
|
|
|
click~=8.1.3
|
2023-05-14 00:36:42 +08:00
|
|
|
|
tabulate
|
2023-05-26 18:50:20 +08:00
|
|
|
|
feedparser
|
2023-05-21 22:27:02 +08:00
|
|
|
|
azure-core
|
2023-05-31 22:31:54 +08:00
|
|
|
|
openai
|
2023-05-26 22:32:12 +08:00
|
|
|
|
#accelerate~=0.18.0
|
|
|
|
|
|
#peft~=0.3.0
|
|
|
|
|
|
#bitsandbytes; platform_system != "Windows"
|
2023-05-21 22:27:02 +08:00
|
|
|
|
|
2023-07-11 19:36:50 +08:00
|
|
|
|
# 要调用llama-cpp模型,如vicuma-13b量化模型需要安装llama-cpp-python库
|
|
|
|
|
|
# but!!! 实测pip install 不好使,需要手动从ttps://github.com/abetlen/llama-cpp-python/releases/下载
|
|
|
|
|
|
# 而且注意不同时期的ggml格式并不!兼!容!!!因此需要安装的llama-cpp-python版本也不一致,需要手动测试才能确定
|
|
|
|
|
|
# 实测ggml-vicuna-13b-1.1在llama-cpp-python 0.1.63上可正常兼容
|
|
|
|
|
|
# 不过!!!本项目模型加载的方式控制的比较严格,与llama-cpp-python的兼容性较差,很多参数设定不能使用,
|
|
|
|
|
|
# 建议如非必要还是不要使用llama-cpp
|
2023-05-21 22:27:02 +08:00
|
|
|
|
torch~=2.0.0
|
|
|
|
|
|
pydantic~=1.10.7
|
|
|
|
|
|
starlette~=0.26.1
|
|
|
|
|
|
numpy~=1.23.5
|
|
|
|
|
|
tqdm~=4.65.0
|
|
|
|
|
|
requests~=2.28.2
|
2023-06-11 21:25:02 +08:00
|
|
|
|
tenacity~=8.2.2
|
|
|
|
|
|
charset_normalizer==2.1.0
|