2023-05-13 08:45:17 +08:00
|
|
|
pymupdf
|
|
|
|
|
paddlepaddle==2.4.2
|
|
|
|
|
paddleocr
|
2023-05-20 01:58:08 +08:00
|
|
|
langchain==0.0.174
|
2023-05-17 13:44:39 +08:00
|
|
|
transformers==4.29.1
|
2023-04-14 01:06:13 +08:00
|
|
|
unstructured[local-inference]
|
|
|
|
|
layoutparser[layoutmodels,tesseract]
|
|
|
|
|
nltk
|
|
|
|
|
sentence-transformers
|
|
|
|
|
beautifulsoup4
|
|
|
|
|
icetk
|
|
|
|
|
cpm_kernels
|
|
|
|
|
faiss-cpu
|
2023-04-23 19:52:29 +08:00
|
|
|
accelerate
|
2023-05-06 21:38:15 +08:00
|
|
|
gradio==3.28.3
|
2023-05-03 21:29:28 +08:00
|
|
|
fastapi
|
|
|
|
|
uvicorn
|
2023-05-04 21:01:55 +08:00
|
|
|
peft
|
2023-05-08 23:49:57 +08:00
|
|
|
pypinyin
|
2023-05-15 19:13:12 +08:00
|
|
|
click~=8.1.3
|
2023-05-14 00:36:42 +08:00
|
|
|
tabulate
|
2023-05-18 22:54:41 +08:00
|
|
|
bitsandbytes; platform_system != "Windows"
|
|
|
|
|
llama-cpp-python==0.1.34; platform_system != "Windows"
|
|
|
|
|
https://github.com/abetlen/llama-cpp-python/releases/download/v0.1.34/llama_cpp_python-0.1.34-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|