更新readme中的embedding_model 配置管理部分
This commit is contained in:
parent
6b20ca9ef7
commit
aea1e2b134
22
README.md
22
README.md
|
@ -139,6 +139,7 @@ git lfs clone https://huggingface.co/THUDM/chatglm2-6b
|
||||||
|
|
||||||
# install Embedding-model
|
# install Embedding-model
|
||||||
git lfs clone https://huggingface.co/shibing624/text2vec-base-chinese
|
git lfs clone https://huggingface.co/shibing624/text2vec-base-chinese
|
||||||
|
cp ~/shibing624/text2vec-base-chinese ~/codefuse-chatbot/embedding_models/
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -155,18 +156,21 @@ os.environ["OPENAI_API_KEY"] = "sk-xxx"
|
||||||
# 可自行替换自己需要的api_base_url
|
# 可自行替换自己需要的api_base_url
|
||||||
os.environ["API_BASE_URL"] = "https://api.openai.com/v1"
|
os.environ["API_BASE_URL"] = "https://api.openai.com/v1"
|
||||||
|
|
||||||
# vi model_config#95 你需要选择的语言模型
|
# vi model_config#105 你需要选择的语言模型
|
||||||
LLM_MODEL = "gpt-3.5-turbo"
|
LLM_MODEL = "gpt-3.5-turbo"
|
||||||
|
|
||||||
# vi model_config#33 你需要选择的向量模型
|
# vi model_config#43 你需要选择的向量模型
|
||||||
EMBEDDING_MODEL = "text2vec-base"
|
EMBEDDING_MODEL = "text2vec-base"
|
||||||
|
|
||||||
# vi model_config#19 修改成你的本地路径,如果能直接连接huggingface则无需修改
|
# vi model_config#25 修改成你的本地路径,如果能直接连接huggingface则无需修改
|
||||||
"text2vec-base": "/home/user/xx/text2vec-base-chinese",
|
"text2vec-base": "shibing624/text2vec-base-chinese",
|
||||||
|
|
||||||
# 是否启动本地的notebook用于代码解释,默认启动docker的notebook
|
# vi server_config#8~14, 推荐采用容器启动服务
|
||||||
# vi server_config#35,True启动docker的notebook,false启动local的notebook
|
DOCKER_SERVICE = True
|
||||||
"do_remote": False, / "do_remote": True,
|
# 是否采用容器沙箱
|
||||||
|
SANDBOX_DO_REMOTE = True
|
||||||
|
# 是否采用api服务来进行
|
||||||
|
NO_REMOTE_API = True
|
||||||
```
|
```
|
||||||
|
|
||||||
5、启动服务
|
5、启动服务
|
||||||
|
@ -182,9 +186,9 @@ python dev_opsgpt/service/llm_api.py
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# 配置好server_config.py后,可一键启动
|
||||||
cd examples
|
cd examples
|
||||||
# python ../dev_opsgpt/service/llm_api.py 若需使用本地大语言模型,可执行该命令
|
python start.py
|
||||||
bash start_webui.sh
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## 🤗 致谢
|
## 🤗 致谢
|
||||||
|
|
19
README_en.md
19
README_en.md
|
@ -144,18 +144,21 @@ os.environ["OPENAI_API_KEY"] = "sk-xxx"
|
||||||
# You can replace the api_base_url yourself
|
# You can replace the api_base_url yourself
|
||||||
os.environ["API_BASE_URL"] = "https://api.openai.com/v1"
|
os.environ["API_BASE_URL"] = "https://api.openai.com/v1"
|
||||||
|
|
||||||
# vi model_config#95 You need to choose the language model
|
# vi model_config#105 You need to choose the language model
|
||||||
LLM_MODEL = "gpt-3.5-turbo"
|
LLM_MODEL = "gpt-3.5-turbo"
|
||||||
|
|
||||||
# vi model_config#33 You need to choose the vector model
|
# vi model_config#43 You need to choose the vector model
|
||||||
EMBEDDING_MODEL = "text2vec-base"
|
EMBEDDING_MODEL = "text2vec-base"
|
||||||
|
|
||||||
# vi model_config#19 Modify to your local path, if you can directly connect to huggingface, no modification is needed
|
# vi model_config#25 Modify to your local path, if you can directly connect to huggingface, no modification is needed
|
||||||
"text2vec-base": "/home/user/xx/text2vec-base-chinese",
|
"text2vec-base": "shibing624/text2vec-base-chinese",
|
||||||
|
|
||||||
# Whether to start the local notebook for code interpretation, start the docker notebook by default
|
# vi server_config#8~14, it is recommended to start the service using containers.
|
||||||
# vi server_config#35, True to start the docker notebook, false to start the local notebook
|
DOCKER_SERVICE = True
|
||||||
"do_remote": False, / "do_remote": True,
|
# Whether to use container sandboxing is up to your specific requirements and preferences
|
||||||
|
SANDBOX_DO_REMOTE = True
|
||||||
|
# Whether to use api-service to use chatbot
|
||||||
|
NO_REMOTE_API = True
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Start the Service
|
5. Start the Service
|
||||||
|
@ -171,8 +174,8 @@ python dev_opsgpt/service/llm_api.py
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# After configuring server_config.py, you can start with just one click.
|
||||||
cd examples
|
cd examples
|
||||||
# python ../dev_opsgpt/service/llm_api.py If you need to use the local large language model, you can execute this command
|
|
||||||
bash start_webui.sh
|
bash start_webui.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue