diff --git a/cli.py b/cli.py index 6cce3ca..a4f9f5b 100644 --- a/cli.py +++ b/cli.py @@ -1,6 +1,7 @@ import click from api import api_start as api_start +from cli_demo import main as cli_start from configs.model_config import llm_model_dict, embedding_model_dict @@ -42,17 +43,45 @@ def start(): @click.option('-i', '--ip', default='0.0.0.0', show_default=True, type=str, help='api_server listen address.') @click.option('-p', '--port', default=7861, show_default=True, type=int, help='api_server listen port.') def start_api(ip, port): + # 调用api_start之前需要先loadCheckPoint,并传入加载检查点的参数, + # 理论上可以用click包进行包装,但过于繁琐,改动较大, + # 此处仍用parser包,并以models.loader.args.DEFAULT_ARGS的参数为默认参数 + # 如有改动需要可以更改models.loader.args.DEFAULT_ARGS + from models import shared + from models.loader import LoaderCheckPoint + from models.loader.args import DEFAULT_ARGS + shared.loaderCheckPoint = LoaderCheckPoint(DEFAULT_ARGS) api_start(host=ip, port=port) +# # 通过cli.py调用cli_demo时需要在cli.py里初始化模型,否则会报错: + # langchain-ChatGLM: error: unrecognized arguments: start cli + # 为此需要先将 + # args = None + # args = parser.parse_args() + # args_dict = vars(args) + # shared.loaderCheckPoint = LoaderCheckPoint(args_dict) + # 语句从main函数里取出放到函数外部 + # 然后在cli.py里初始化 @start.command(name="cli", context_settings=dict(help_option_names=['-h', '--help'])) -def start_cli(): - import cli_demo - cli_demo.main() +def start_cli(info): + print("通过cli.py调用cli_demo...") + from models import shared + from models.loader import LoaderCheckPoint + from models.loader.args import DEFAULT_ARGS + shared.loaderCheckPoint = LoaderCheckPoint(DEFAULT_ARGS) + cli_start() + +# 同cli命令,通过cli.py调用webui时,argparse的初始化需要放到cli.py里, +# 但由于webui.py里,模型初始化通过init_model函数实现,也无法简单地分离出主函数, +# 因此除非对webui进行大改,否则无法通过python cli.py start webui 调用webui。 +# 故建议不要通过以上命令启动webui,将下述语句注释掉 @start.command(name="webui", context_settings=dict(help_option_names=['-h', '--help'])) -def start_webui(): +@click.option('-i', '--info', default="start client", show_default=True, type=str) +def start_webui(info): + print(info) import webui diff --git a/cli_demo.py b/cli_demo.py index 485f4fc..938ebb3 100644 --- a/cli_demo.py +++ b/cli_demo.py @@ -12,10 +12,7 @@ REPLY_WITH_SOURCE = True def main(): - args = None - args = parser.parse_args() - args_dict = vars(args) - shared.loaderCheckPoint = LoaderCheckPoint(args_dict) + llm_model_ins = shared.loaderLLM() llm_model_ins.history_len = LLM_HISTORY_LEN @@ -53,4 +50,17 @@ def main(): if __name__ == "__main__": +# # 通过cli.py调用cli_demo时需要在cli.py里初始化模型,否则会报错: + # langchain-ChatGLM: error: unrecognized arguments: start cli + # 为此需要先将 + # args = None + # args = parser.parse_args() + # args_dict = vars(args) + # shared.loaderCheckPoint = LoaderCheckPoint(args_dict) + # 语句从main函数里取出放到函数外部 + # 然后在cli.py里初始化 + args = None + args = parser.parse_args() + args_dict = vars(args) + shared.loaderCheckPoint = LoaderCheckPoint(args_dict) main() diff --git a/models/loader/args.py b/models/loader/args.py index 8e05d20..b15ad5e 100644 --- a/models/loader/args.py +++ b/models/loader/args.py @@ -31,7 +31,7 @@ def dir_path(string): return s -parser = argparse.ArgumentParser(prog='langchina-ChatGLM', +parser = argparse.ArgumentParser(prog='langchain-ChatGLM', description='About langchain-ChatGLM, local knowledge based ChatGLM with langchain | ' '基于本地知识库的 ChatGLM 问答')