更新readme中的embedding_model 配置管理部分

This commit is contained in:
shanshi 2023-11-29 13:45:03 +08:00
parent 6b20ca9ef7
commit aea1e2b134
2 changed files with 24 additions and 17 deletions

View File

@ -139,6 +139,7 @@ git lfs clone https://huggingface.co/THUDM/chatglm2-6b
# install Embedding-model
git lfs clone https://huggingface.co/shibing624/text2vec-base-chinese
cp ~/shibing624/text2vec-base-chinese ~/codefuse-chatbot/embedding_models/
```
@ -155,18 +156,21 @@ os.environ["OPENAI_API_KEY"] = "sk-xxx"
# 可自行替换自己需要的api_base_url
os.environ["API_BASE_URL"] = "https://api.openai.com/v1"
# vi model_config#95 你需要选择的语言模型
# vi model_config#105 你需要选择的语言模型
LLM_MODEL = "gpt-3.5-turbo"
# vi model_config#33 你需要选择的向量模型
# vi model_config#43 你需要选择的向量模型
EMBEDDING_MODEL = "text2vec-base"
# vi model_config#19 修改成你的本地路径如果能直接连接huggingface则无需修改
"text2vec-base": "/home/user/xx/text2vec-base-chinese",
# vi model_config#25 修改成你的本地路径如果能直接连接huggingface则无需修改
"text2vec-base": "shibing624/text2vec-base-chinese",
# 是否启动本地的notebook用于代码解释默认启动docker的notebook
# vi server_config#35True启动docker的notebookfalse启动local的notebook
"do_remote": False, / "do_remote": True,
# vi server_config#8~14, 推荐采用容器启动服务
DOCKER_SERVICE = True
# 是否采用容器沙箱
SANDBOX_DO_REMOTE = True
# 是否采用api服务来进行
NO_REMOTE_API = True
```
5、启动服务
@ -182,9 +186,9 @@ python dev_opsgpt/service/llm_api.py
```
```bash
# 配置好server_config.py后可一键启动
cd examples
# python ../dev_opsgpt/service/llm_api.py 若需使用本地大语言模型,可执行该命令
bash start_webui.sh
python start.py
```
## 🤗 致谢

View File

@ -144,18 +144,21 @@ os.environ["OPENAI_API_KEY"] = "sk-xxx"
# You can replace the api_base_url yourself
os.environ["API_BASE_URL"] = "https://api.openai.com/v1"
# vi model_config#95 You need to choose the language model
# vi model_config#105 You need to choose the language model
LLM_MODEL = "gpt-3.5-turbo"
# vi model_config#33 You need to choose the vector model
# vi model_config#43 You need to choose the vector model
EMBEDDING_MODEL = "text2vec-base"
# vi model_config#19 Modify to your local path, if you can directly connect to huggingface, no modification is needed
"text2vec-base": "/home/user/xx/text2vec-base-chinese",
# vi model_config#25 Modify to your local path, if you can directly connect to huggingface, no modification is needed
"text2vec-base": "shibing624/text2vec-base-chinese",
# Whether to start the local notebook for code interpretation, start the docker notebook by default
# vi server_config#35, True to start the docker notebook, false to start the local notebook
"do_remote": False, / "do_remote": True,
# vi server_config#8~14, it is recommended to start the service using containers.
DOCKER_SERVICE = True
# Whether to use container sandboxing is up to your specific requirements and preferences
SANDBOX_DO_REMOTE = True
# Whether to use api-service to use chatbot
NO_REMOTE_API = True
```
5. Start the Service
@ -171,8 +174,8 @@ python dev_opsgpt/service/llm_api.py
```
```bash
# After configuring server_config.py, you can start with just one click.
cd examples
# python ../dev_opsgpt/service/llm_api.py If you need to use the local large language model, you can execute this command
bash start_webui.sh
```