2023-09-28 10:58:58 +08:00
|
|
|
|
import streamlit as st
|
|
|
|
|
from streamlit_chatbox import *
|
|
|
|
|
from typing import List, Dict
|
|
|
|
|
from datetime import datetime
|
2023-11-07 19:44:47 +08:00
|
|
|
|
from random import randint
|
2023-09-28 10:58:58 +08:00
|
|
|
|
from .utils import *
|
2024-04-23 16:44:13 +08:00
|
|
|
|
from loguru import logger
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
2024-04-23 16:44:13 +08:00
|
|
|
|
from muagent.utils import *
|
|
|
|
|
from muagent.tools import TOOL_SETS
|
|
|
|
|
from muagent.chat.search_chat import SEARCH_ENGINES
|
|
|
|
|
from muagent.connector import PHASE_LIST, PHASE_CONFIGS
|
|
|
|
|
from muagent.service.service_factory import get_cb_details_by_cb_name
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
2024-03-12 15:31:06 +08:00
|
|
|
|
from configs.model_config import EMBEDDING_DEVICE, EMBEDDING_MODEL, embedding_model_dict, EMBEDDING_ENGINE, KB_ROOT_PATH, llm_model_dict
|
2023-09-28 10:58:58 +08:00
|
|
|
|
chat_box = ChatBox(
|
|
|
|
|
assistant_avatar="../sources/imgs/devops-chatbot2.png"
|
|
|
|
|
)
|
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
cur_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
|
|
2023-09-28 10:58:58 +08:00
|
|
|
|
GLOBAL_EXE_CODE_TEXT = ""
|
2023-12-07 20:17:21 +08:00
|
|
|
|
GLOBAL_MESSAGE = {"figures": {}, "final_contents": {}}
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
|
|
# 加载YAML文件
|
|
|
|
|
webui_yaml_filename = "webui_zh.yaml" if True else "webui_en.yaml"
|
2024-01-26 14:03:25 +08:00
|
|
|
|
with open(os.path.join(cur_dir, f"yamls/{webui_yaml_filename}"), 'r', encoding="utf-8") as f:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
try:
|
|
|
|
|
webui_configs = yaml.safe_load(f)
|
|
|
|
|
except yaml.YAMLError as exc:
|
|
|
|
|
print(exc)
|
|
|
|
|
|
|
|
|
|
|
2023-12-07 20:17:21 +08:00
|
|
|
|
def get_messages_history(history_len: int, isDetailed=False) -> List[Dict]:
|
2023-09-28 10:58:58 +08:00
|
|
|
|
def filter(msg):
|
|
|
|
|
'''
|
|
|
|
|
针对当前简单文本对话,只返回每条消息的第一个element的内容
|
|
|
|
|
'''
|
|
|
|
|
content = [x._content for x in msg["elements"] if x._output_method in ["markdown", "text"]]
|
2023-12-07 20:17:21 +08:00
|
|
|
|
content = content[0] if content else ""
|
|
|
|
|
if isDetailed:
|
|
|
|
|
for k, v in GLOBAL_MESSAGE["final_contents"].items():
|
|
|
|
|
if k == content:
|
|
|
|
|
content = v[-1]
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
for k, v in GLOBAL_MESSAGE["figures"].items():
|
|
|
|
|
content = content.replace(v, k)
|
|
|
|
|
|
2023-09-28 10:58:58 +08:00
|
|
|
|
return {
|
|
|
|
|
"role": msg["role"],
|
2023-12-07 20:17:21 +08:00
|
|
|
|
"content": content,
|
2023-09-28 10:58:58 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
history = chat_box.filter_history(100000, filter) # workaround before upgrading streamlit-chatbox.
|
|
|
|
|
user_count = 0
|
|
|
|
|
i = 1
|
|
|
|
|
for i in range(1, len(history) + 1):
|
|
|
|
|
if history[-i]["role"] == "user":
|
|
|
|
|
user_count += 1
|
|
|
|
|
if user_count >= history_len:
|
|
|
|
|
break
|
|
|
|
|
return history[-i:]
|
|
|
|
|
|
|
|
|
|
|
2023-12-07 20:17:21 +08:00
|
|
|
|
def upload2sandbox(upload_file, api: ApiRequest):
|
|
|
|
|
if upload_file is None:
|
|
|
|
|
res = {"msg": False}
|
|
|
|
|
else:
|
|
|
|
|
res = api.web_sd_upload(upload_file)
|
|
|
|
|
|
2023-09-28 10:58:58 +08:00
|
|
|
|
def dialogue_page(api: ApiRequest):
|
|
|
|
|
global GLOBAL_EXE_CODE_TEXT
|
|
|
|
|
chat_box.init_session()
|
|
|
|
|
|
|
|
|
|
with st.sidebar:
|
|
|
|
|
# TODO: 对话模型与会话绑定
|
|
|
|
|
def on_mode_change():
|
|
|
|
|
mode = st.session_state.dialogue_mode
|
2023-12-26 11:41:53 +08:00
|
|
|
|
text = webui_configs["dialogue"]["text_mode_swtich"] + f"{mode}"
|
|
|
|
|
if mode == webui_configs["dialogue"]["mode"][1]:
|
2023-09-28 10:58:58 +08:00
|
|
|
|
cur_kb = st.session_state.get("selected_kb")
|
|
|
|
|
if cur_kb:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
text = text + webui_configs["dialogue"]["text_knowledgeBase_swtich"] + f'`{cur_kb}`'
|
2023-09-28 10:58:58 +08:00
|
|
|
|
st.toast(text)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
|
|
|
|
|
dialogue_mode = st.selectbox(webui_configs["dialogue"]["mode_instruction"],
|
|
|
|
|
webui_configs["dialogue"]["mode"],
|
|
|
|
|
# ["LLM 对话",
|
|
|
|
|
# "知识库问答",
|
|
|
|
|
# "代码知识库问答",
|
|
|
|
|
# "搜索引擎问答",
|
|
|
|
|
# "Agent问答"
|
|
|
|
|
# ],
|
2023-09-28 10:58:58 +08:00
|
|
|
|
on_change=on_mode_change,
|
|
|
|
|
key="dialogue_mode",
|
|
|
|
|
)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
history_len = st.number_input(webui_configs["dialogue"]["history_length"], 0, 10, 3)
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
|
|
|
|
def on_kb_change():
|
2023-12-26 11:41:53 +08:00
|
|
|
|
st.toast(f"{webui_configs['dialogue']['text_loaded_kbase']}: {st.session_state.selected_kb}")
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
2023-11-07 19:44:47 +08:00
|
|
|
|
def on_cb_change():
|
2023-12-26 11:41:53 +08:00
|
|
|
|
st.toast(f"{webui_configs['dialogue']['text_loaded_cbase']}: {st.session_state.selected_cb}")
|
2023-12-07 20:17:21 +08:00
|
|
|
|
cb_details = get_cb_details_by_cb_name(st.session_state.selected_cb)
|
|
|
|
|
st.session_state['do_interpret'] = cb_details['do_interpret']
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
if "interpreter_file_key" not in st.session_state:
|
|
|
|
|
st.session_state["interpreter_file_key"] = 0
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
|
|
|
|
not_agent_qa = True
|
2023-12-07 20:17:21 +08:00
|
|
|
|
interpreter_file = ""
|
|
|
|
|
is_detailed = False
|
2023-12-26 11:41:53 +08:00
|
|
|
|
if dialogue_mode == webui_configs["dialogue"]["mode"][1]:
|
|
|
|
|
with st.expander(webui_configs["dialogue"]["kbase_expander_name"], True):
|
2023-09-28 10:58:58 +08:00
|
|
|
|
kb_list = api.list_knowledge_bases(no_remote_api=True)
|
|
|
|
|
selected_kb = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["kbase_selectbox_name"],
|
2023-09-28 10:58:58 +08:00
|
|
|
|
kb_list,
|
|
|
|
|
on_change=on_kb_change,
|
|
|
|
|
key="selected_kb",
|
|
|
|
|
)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
kb_top_k = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["kbase_ninput_topk_name"], 1, 20, 3)
|
|
|
|
|
score_threshold = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["kbase_ninput_score_threshold_name"],
|
|
|
|
|
0.0, float(SCORE_THRESHOLD), float(SCORE_THRESHOLD),
|
|
|
|
|
float(SCORE_THRESHOLD//100))
|
|
|
|
|
|
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][2]:
|
|
|
|
|
with st.expander(webui_configs["dialogue"]["cbase_expander_name"], True):
|
2023-11-07 19:44:47 +08:00
|
|
|
|
cb_list = api.list_cb(no_remote_api=True)
|
|
|
|
|
logger.debug('codebase_list={}'.format(cb_list))
|
|
|
|
|
selected_cb = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["cbase_selectbox_name"],
|
2023-11-07 19:44:47 +08:00
|
|
|
|
cb_list,
|
|
|
|
|
on_change=on_cb_change,
|
|
|
|
|
key="selected_cb",
|
|
|
|
|
)
|
2023-12-07 20:17:21 +08:00
|
|
|
|
|
|
|
|
|
# change do_interpret
|
2023-12-26 11:41:53 +08:00
|
|
|
|
st.toast(f"{webui_configs['dialogue']['text_loaded_cbase']}: {st.session_state.selected_cb}")
|
2023-12-07 20:17:21 +08:00
|
|
|
|
cb_details = get_cb_details_by_cb_name(st.session_state.selected_cb)
|
|
|
|
|
st.session_state['do_interpret'] = cb_details['do_interpret']
|
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
cb_code_limit = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["cbase_ninput_topk_name"], 1, 20, 1)
|
2023-12-07 20:17:21 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
search_type_list = webui_configs["dialogue"]["cbase_search_type_v1"] if st.session_state['do_interpret'] == 'YES' \
|
|
|
|
|
else webui_configs["dialogue"]["cbase_search_type_v2"]
|
2023-12-07 20:17:21 +08:00
|
|
|
|
|
|
|
|
|
cb_search_type = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["cbase_selectbox_type_name"],
|
2023-12-07 20:17:21 +08:00
|
|
|
|
search_type_list,
|
|
|
|
|
key='cb_search_type'
|
|
|
|
|
)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][3]:
|
|
|
|
|
with st.expander(webui_configs["dialogue"]["expander_search_name"], True):
|
|
|
|
|
search_engine = st.selectbox(
|
|
|
|
|
webui_configs["dialogue"]["selectbox_search_name"],
|
|
|
|
|
SEARCH_ENGINES.keys(), 0)
|
|
|
|
|
se_top_k = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["ninput_search_topk_name"], 1, 20, 3)
|
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][4]:
|
2023-11-07 19:44:47 +08:00
|
|
|
|
not_agent_qa = False
|
2023-12-26 11:41:53 +08:00
|
|
|
|
with st.expander(webui_configs["dialogue"]["phase_expander_name"], True):
|
2023-11-07 19:44:47 +08:00
|
|
|
|
choose_phase = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["phase_selectbox_name"], PHASE_LIST, 0)
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
is_detailed = st.toggle(webui_configs["dialogue"]["phase_toggle_detailed_name"], False)
|
|
|
|
|
tool_using_on = st.toggle(
|
|
|
|
|
webui_configs["dialogue"]["phase_toggle_doToolUsing"],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
PHASE_CONFIGS[choose_phase].get("do_using_tool", False))
|
2023-11-07 19:44:47 +08:00
|
|
|
|
tool_selects = []
|
|
|
|
|
if tool_using_on:
|
|
|
|
|
with st.expander("工具军火库", True):
|
|
|
|
|
tool_selects = st.multiselect(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["phase_multiselect_tools"],
|
|
|
|
|
TOOL_SETS, ["WeatherInfo"])
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
search_on = st.toggle(webui_configs["dialogue"]["phase_toggle_doSearch"],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
PHASE_CONFIGS[choose_phase].get("do_search", False))
|
2023-11-07 19:44:47 +08:00
|
|
|
|
search_engine, top_k = None, 3
|
|
|
|
|
if search_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
with st.expander(webui_configs["dialogue"]["expander_search_name"], True):
|
|
|
|
|
search_engine = st.selectbox(
|
|
|
|
|
webui_configs["dialogue"]["selectbox_search_name"],
|
|
|
|
|
SEARCH_ENGINES.keys(), 0)
|
|
|
|
|
se_top_k = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["ninput_search_topk_name"], 1, 20, 3)
|
|
|
|
|
|
|
|
|
|
doc_retrieval_on = st.toggle(
|
|
|
|
|
webui_configs["dialogue"]["phase_toggle_doDocRetrieval"],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
PHASE_CONFIGS[choose_phase].get("do_doc_retrieval", False)
|
|
|
|
|
)
|
2023-11-07 19:44:47 +08:00
|
|
|
|
selected_kb, top_k, score_threshold = None, 3, 1.0
|
|
|
|
|
if doc_retrieval_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
with st.expander(webui_configs["dialogue"]["kbase_expander_name"], True):
|
2023-11-07 19:44:47 +08:00
|
|
|
|
kb_list = api.list_knowledge_bases(no_remote_api=True)
|
|
|
|
|
selected_kb = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["kbase_selectbox_name"],
|
2023-11-07 19:44:47 +08:00
|
|
|
|
kb_list,
|
|
|
|
|
on_change=on_kb_change,
|
|
|
|
|
key="selected_kb",
|
|
|
|
|
)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
top_k = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["kbase_ninput_topk_name"], 1, 20, 3)
|
|
|
|
|
score_threshold = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["kbase_ninput_score_threshold_name"],
|
|
|
|
|
0.0, float(SCORE_THRESHOLD), float(SCORE_THRESHOLD),
|
|
|
|
|
float(SCORE_THRESHOLD//100))
|
|
|
|
|
|
|
|
|
|
code_retrieval_on = st.toggle(
|
|
|
|
|
webui_configs["dialogue"]["phase_toggle_doCodeRetrieval"],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
PHASE_CONFIGS[choose_phase].get("do_code_retrieval", False))
|
2023-11-07 19:44:47 +08:00
|
|
|
|
selected_cb, top_k = None, 1
|
2023-12-07 20:17:21 +08:00
|
|
|
|
cb_search_type = "tag"
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if code_retrieval_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
with st.expander(webui_configs["dialogue"]["cbase_expander_name"], True):
|
2023-11-07 19:44:47 +08:00
|
|
|
|
cb_list = api.list_cb(no_remote_api=True)
|
|
|
|
|
logger.debug('codebase_list={}'.format(cb_list))
|
|
|
|
|
selected_cb = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["cbase_selectbox_name"],
|
2023-11-07 19:44:47 +08:00
|
|
|
|
cb_list,
|
|
|
|
|
on_change=on_cb_change,
|
|
|
|
|
key="selected_cb",
|
|
|
|
|
)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
# change do_interpret
|
|
|
|
|
st.toast(f"{webui_configs['dialogue']['text_loaded_cbase']}: {st.session_state.selected_cb}")
|
2023-12-07 20:17:21 +08:00
|
|
|
|
cb_details = get_cb_details_by_cb_name(st.session_state.selected_cb)
|
|
|
|
|
st.session_state['do_interpret'] = cb_details['do_interpret']
|
2023-12-26 11:41:53 +08:00
|
|
|
|
|
|
|
|
|
top_k = st.number_input(
|
|
|
|
|
webui_configs["dialogue"]["cbase_ninput_topk_name"], 1, 20, 1)
|
|
|
|
|
|
|
|
|
|
search_type_list = webui_configs["dialogue"]["cbase_search_type_v1"] if st.session_state['do_interpret'] == 'YES' \
|
|
|
|
|
else webui_configs["dialogue"]["cbase_search_type_v2"]
|
|
|
|
|
|
2023-12-07 20:17:21 +08:00
|
|
|
|
cb_search_type = st.selectbox(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["dialogue"]["cbase_selectbox_type_name"],
|
2023-12-07 20:17:21 +08:00
|
|
|
|
search_type_list,
|
|
|
|
|
key='cb_search_type'
|
|
|
|
|
)
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
with st.expander(webui_configs["sandbox"]["expander_name"], False):
|
2023-12-07 20:17:21 +08:00
|
|
|
|
|
|
|
|
|
interpreter_file = st.file_uploader(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs["sandbox"]["file_upload_name"],
|
2023-12-07 20:17:21 +08:00
|
|
|
|
[i for ls in LOADER2EXT_DICT.values() for i in ls] + ["jpg", "png"],
|
|
|
|
|
accept_multiple_files=False,
|
|
|
|
|
key=st.session_state.interpreter_file_key,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
files = api.web_sd_list_files()
|
|
|
|
|
files = files["data"]
|
2023-12-26 11:41:53 +08:00
|
|
|
|
download_file = st.selectbox(webui_configs["sandbox"]["selectbox_name"], files,
|
2023-12-07 20:17:21 +08:00
|
|
|
|
key="download_file",)
|
|
|
|
|
|
|
|
|
|
cols = st.columns(3)
|
|
|
|
|
file_url, file_name = api.web_sd_download(download_file)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
if cols[0].button(webui_configs["sandbox"]["button_upload_name"],):
|
2023-12-07 20:17:21 +08:00
|
|
|
|
upload2sandbox(interpreter_file, api)
|
|
|
|
|
st.session_state["interpreter_file_key"] += 1
|
|
|
|
|
interpreter_file = ""
|
|
|
|
|
st.experimental_rerun()
|
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
cols[1].download_button(webui_configs["sandbox"]["button_download_name"],
|
|
|
|
|
file_url, file_name)
|
|
|
|
|
if cols[2].button(webui_configs["sandbox"]["button_delete_name"],):
|
2023-12-07 20:17:21 +08:00
|
|
|
|
api.web_sd_delete(download_file)
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
2024-01-26 14:03:25 +08:00
|
|
|
|
# code_interpreter_on = st.toggle(
|
|
|
|
|
# webui_configs["sandbox"]["toggle_doCodeInterpreter"]) and not_agent_qa
|
|
|
|
|
# code_exec_on = st.toggle(webui_configs["sandbox"]["toggle_doAutoCodeExec"]) and not_agent_qa
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
|
|
|
|
# Display chat messages from history on app rerun
|
|
|
|
|
|
|
|
|
|
chat_box.output_messages()
|
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
chat_input_placeholder = webui_configs["chat"]["chat_placeholder"]
|
2023-09-28 10:58:58 +08:00
|
|
|
|
code_text = "" or GLOBAL_EXE_CODE_TEXT
|
|
|
|
|
codebox_res = None
|
|
|
|
|
|
|
|
|
|
if prompt := st.chat_input(chat_input_placeholder, key="prompt"):
|
2023-12-07 20:17:21 +08:00
|
|
|
|
upload2sandbox(interpreter_file, api)
|
|
|
|
|
logger.debug(f"prompt: {prompt}")
|
|
|
|
|
|
|
|
|
|
history = get_messages_history(history_len, is_detailed)
|
2023-09-28 10:58:58 +08:00
|
|
|
|
chat_box.user_say(prompt)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
if dialogue_mode == webui_configs["dialogue"]["mode"][0]:
|
|
|
|
|
chat_box.ai_say(webui_configs["chat"]["chatbox_saying"])
|
2023-09-28 10:58:58 +08:00
|
|
|
|
text = ""
|
2024-01-26 14:03:25 +08:00
|
|
|
|
r = api.chat_chat(
|
|
|
|
|
prompt, history, no_remote_api=True,
|
|
|
|
|
embed_model=EMBEDDING_MODEL, embed_model_path=embedding_model_dict[EMBEDDING_MODEL],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
model_device=EMBEDDING_DEVICE, embed_engine=EMBEDDING_ENGINE,api_key=llm_model_dict[LLM_MODEL]["api_key"],
|
|
|
|
|
api_base_url=llm_model_dict[LLM_MODEL]["api_base_url"],
|
2024-01-26 14:03:25 +08:00
|
|
|
|
llm_model=LLM_MODEL)
|
2023-09-28 10:58:58 +08:00
|
|
|
|
for t in r:
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if error_msg := check_error_msg(t): # check whether error occured
|
2023-09-28 10:58:58 +08:00
|
|
|
|
st.error(error_msg)
|
|
|
|
|
break
|
|
|
|
|
text += t["answer"]
|
2023-11-15 17:17:50 +08:00
|
|
|
|
|
2023-11-15 17:21:07 +08:00
|
|
|
|
# text = replace_lt_gt(text)
|
2023-11-15 17:17:50 +08:00
|
|
|
|
|
2023-09-28 10:58:58 +08:00
|
|
|
|
chat_box.update_msg(text)
|
2023-11-15 17:21:07 +08:00
|
|
|
|
# logger.debug(f"text: {text}")
|
2023-11-15 17:17:50 +08:00
|
|
|
|
|
2023-11-15 17:21:07 +08:00
|
|
|
|
# text = replace_lt_gt(text)
|
2023-11-15 17:17:50 +08:00
|
|
|
|
|
2023-09-28 10:58:58 +08:00
|
|
|
|
chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标
|
|
|
|
|
# 判断是否存在代码, 并提高编辑功能,执行功能
|
2024-01-26 14:03:25 +08:00
|
|
|
|
# code_text = api.codebox.decode_code_from_text(text)
|
|
|
|
|
# GLOBAL_EXE_CODE_TEXT = code_text
|
|
|
|
|
# if code_text and code_exec_on:
|
|
|
|
|
# codebox_res = api.codebox_chat("```"+code_text+"```", do_code_exe=True)
|
2023-12-26 11:41:53 +08:00
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][4]:
|
|
|
|
|
display_infos = [webui_configs["chat"]["chatbox_saying"]]
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if search_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
display_infos.append(Markdown("...", in_expander=True,
|
|
|
|
|
title=webui_configs["chat"]["chatbox_search_result"]))
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if doc_retrieval_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
display_infos.append(Markdown("...", in_expander=True,
|
|
|
|
|
title=webui_configs["chat"]["chatbox_doc_result"]))
|
|
|
|
|
if code_retrieval_on:
|
|
|
|
|
display_infos.append(Markdown("...", in_expander=True,
|
|
|
|
|
title=webui_configs["chat"]["chatbox_code_result"]))
|
|
|
|
|
|
2023-11-07 19:44:47 +08:00
|
|
|
|
chat_box.ai_say(display_infos)
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if 'history_node_list' in st.session_state:
|
|
|
|
|
history_node_list: List[str] = st.session_state['history_node_list']
|
|
|
|
|
else:
|
|
|
|
|
history_node_list: List[str] = []
|
|
|
|
|
|
|
|
|
|
input_kargs = {"query": prompt,
|
|
|
|
|
"phase_name": choose_phase,
|
|
|
|
|
"history": history,
|
|
|
|
|
"doc_engine_name": selected_kb,
|
|
|
|
|
"search_engine_name": search_engine,
|
|
|
|
|
"code_engine_name": selected_cb,
|
2023-12-07 20:17:21 +08:00
|
|
|
|
"cb_search_type": cb_search_type,
|
2023-11-07 19:44:47 +08:00
|
|
|
|
"top_k": top_k,
|
|
|
|
|
"score_threshold": score_threshold,
|
|
|
|
|
"do_search": search_on,
|
|
|
|
|
"do_doc_retrieval": doc_retrieval_on,
|
|
|
|
|
"do_code_retrieval": code_retrieval_on,
|
|
|
|
|
"do_tool_retrieval": False,
|
|
|
|
|
"custom_phase_configs": {},
|
|
|
|
|
"custom_chain_configs": {},
|
|
|
|
|
"custom_role_configs": {},
|
|
|
|
|
"choose_tools": tool_selects,
|
|
|
|
|
"history_node_list": history_node_list,
|
|
|
|
|
"isDetailed": is_detailed,
|
2023-12-07 20:17:21 +08:00
|
|
|
|
"upload_file": interpreter_file,
|
2024-01-26 14:03:25 +08:00
|
|
|
|
"embed_model": EMBEDDING_MODEL,
|
|
|
|
|
"model_device": EMBEDDING_DEVICE,
|
|
|
|
|
"embed_model_path": embedding_model_dict[EMBEDDING_MODEL],
|
|
|
|
|
"embed_engine": EMBEDDING_ENGINE,
|
|
|
|
|
"kb_root_path": KB_ROOT_PATH,
|
|
|
|
|
"model_name": LLM_MODEL,
|
2024-03-12 15:31:06 +08:00
|
|
|
|
"api_key": llm_model_dict[LLM_MODEL]["api_key"],
|
|
|
|
|
"api_base_url": llm_model_dict[LLM_MODEL]["api_base_url"],
|
2023-11-07 19:44:47 +08:00
|
|
|
|
}
|
|
|
|
|
text = ""
|
|
|
|
|
d = {"docs": []}
|
2023-12-07 20:17:21 +08:00
|
|
|
|
for idx_count, d in enumerate(api.agent_achat(**input_kargs)):
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if error_msg := check_error_msg(d): # check whether error occured
|
|
|
|
|
st.error(error_msg)
|
2024-01-26 14:03:25 +08:00
|
|
|
|
logger.debug(f"d: {d['answer']}")
|
2023-12-07 20:17:21 +08:00
|
|
|
|
text = d["answer"]
|
|
|
|
|
for text_length in range(0, len(text)+1, 10):
|
|
|
|
|
chat_box.update_msg(text[:text_length+10], element_index=0, streaming=True)
|
|
|
|
|
|
|
|
|
|
GLOBAL_MESSAGE.setdefault("final_contents", {}).setdefault(d.get("answer", ""), []).append(d.get("final_content", ""))
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
|
|
|
|
for k, v in d["figures"].items():
|
|
|
|
|
if k in text:
|
|
|
|
|
img_html = "\n<img src='data:image/png;base64,{}' class='img-fluid'>\n".format(v)
|
|
|
|
|
text = text.replace(k, img_html).replace(".png", "")
|
2023-12-07 20:17:21 +08:00
|
|
|
|
GLOBAL_MESSAGE.setdefault("figures", {}).setdefault(k, v)
|
|
|
|
|
|
2023-11-07 19:44:47 +08:00
|
|
|
|
chat_box.update_msg(text, element_index=0, streaming=False, state="complete") # 更新最终的字符串,去除光标
|
|
|
|
|
if search_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
chat_box.update_msg(f"{webui_configs['chat']['chatbox_search_result']}:\n\n" + "\n\n".join(d["search_docs"]), element_index=search_on, streaming=False, state="complete")
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if doc_retrieval_on:
|
2023-12-26 11:41:53 +08:00
|
|
|
|
chat_box.update_msg(f"{webui_configs['chat']['chatbox_doc_result']}:\n\n" + "\n\n".join(d["db_docs"]), element_index=search_on+doc_retrieval_on, streaming=False, state="complete")
|
|
|
|
|
if code_retrieval_on:
|
|
|
|
|
chat_box.update_msg(f"{webui_configs['chat']['chatbox_code_result']}:\n\n" + "\n\n".join(d["code_docs"]),
|
|
|
|
|
element_index=search_on+doc_retrieval_on+code_retrieval_on, streaming=False, state="complete")
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
|
|
|
|
history_node_list.extend([node[0] for node in d.get("related_nodes", [])])
|
|
|
|
|
history_node_list = list(set(history_node_list))
|
|
|
|
|
st.session_state['history_node_list'] = history_node_list
|
2023-12-26 11:41:53 +08:00
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][1]:
|
2023-09-28 10:58:58 +08:00
|
|
|
|
history = get_messages_history(history_len)
|
|
|
|
|
chat_box.ai_say([
|
2023-12-26 11:41:53 +08:00
|
|
|
|
f"{webui_configs['chat']['chatbox_doc_querying']} `{selected_kb}` ...",
|
|
|
|
|
Markdown("...", in_expander=True, title=webui_configs['chat']['chatbox_doc_result']),
|
2023-09-28 10:58:58 +08:00
|
|
|
|
])
|
|
|
|
|
text = ""
|
2023-11-07 19:44:47 +08:00
|
|
|
|
d = {"docs": []}
|
2024-01-26 14:03:25 +08:00
|
|
|
|
for idx_count, d in enumerate(
|
|
|
|
|
api.knowledge_base_chat(
|
|
|
|
|
prompt, selected_kb, kb_top_k, score_threshold, history,
|
|
|
|
|
embed_model=EMBEDDING_MODEL, embed_model_path=embedding_model_dict[EMBEDDING_MODEL],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
model_device=EMBEDDING_DEVICE, embed_engine=EMBEDDING_ENGINE, llm_model=LLM_MODEL,
|
|
|
|
|
api_key=llm_model_dict[LLM_MODEL]["api_key"],
|
|
|
|
|
api_base_url=llm_model_dict[LLM_MODEL]["api_base_url"],
|
|
|
|
|
)
|
2024-01-26 14:03:25 +08:00
|
|
|
|
):
|
2023-09-28 10:58:58 +08:00
|
|
|
|
if error_msg := check_error_msg(d): # check whether error occured
|
|
|
|
|
st.error(error_msg)
|
|
|
|
|
text += d["answer"]
|
|
|
|
|
if idx_count%10 == 0:
|
|
|
|
|
chat_box.update_msg(text, element_index=0)
|
|
|
|
|
# chat_box.update_msg("知识库匹配结果: \n\n".join(d["docs"]), element_index=1, streaming=False, state="complete")
|
|
|
|
|
chat_box.update_msg(text, element_index=0, streaming=False) # 更新最终的字符串,去除光标
|
2023-12-26 11:41:53 +08:00
|
|
|
|
chat_box.update_msg("{webui_configs['chat']['chatbox_doc_result']}: \n\n".join(d["docs"]), element_index=1, streaming=False, state="complete")
|
2024-03-12 15:31:06 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][2]:
|
2023-11-07 19:44:47 +08:00
|
|
|
|
logger.info('prompt={}'.format(prompt))
|
|
|
|
|
logger.info('history={}'.format(history))
|
|
|
|
|
if 'history_node_list' in st.session_state:
|
|
|
|
|
api.codeChat.history_node_list = st.session_state['history_node_list']
|
|
|
|
|
|
|
|
|
|
chat_box.ai_say([
|
2023-12-26 11:41:53 +08:00
|
|
|
|
f"{webui_configs['chat']['chatbox_code_querying']} `{selected_cb}` ...",
|
|
|
|
|
Markdown("...", in_expander=True, title=webui_configs['chat']['chatbox_code_result']),
|
2023-11-07 19:44:47 +08:00
|
|
|
|
])
|
|
|
|
|
text = ""
|
|
|
|
|
d = {"codes": []}
|
|
|
|
|
|
|
|
|
|
for idx_count, d in enumerate(api.code_base_chat(query=prompt, code_base_name=selected_cb,
|
|
|
|
|
code_limit=cb_code_limit, history=history,
|
2023-12-07 20:17:21 +08:00
|
|
|
|
cb_search_type=cb_search_type,
|
2024-01-26 14:03:25 +08:00
|
|
|
|
no_remote_api=True, embed_model=EMBEDDING_MODEL,
|
|
|
|
|
embed_model_path=embedding_model_dict[EMBEDDING_MODEL],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
embed_engine=EMBEDDING_ENGINE, llm_model=LLM_MODEL,
|
|
|
|
|
api_key=llm_model_dict[LLM_MODEL]["api_key"],
|
|
|
|
|
api_base_url=llm_model_dict[LLM_MODEL]["api_base_url"],
|
2024-01-26 14:03:25 +08:00
|
|
|
|
)):
|
2023-11-07 19:44:47 +08:00
|
|
|
|
if error_msg := check_error_msg(d):
|
|
|
|
|
st.error(error_msg)
|
|
|
|
|
text += d["answer"]
|
|
|
|
|
if idx_count % 10 == 0:
|
2023-11-15 17:21:07 +08:00
|
|
|
|
# text = replace_lt_gt(text)
|
2023-11-07 19:44:47 +08:00
|
|
|
|
chat_box.update_msg(text, element_index=0)
|
2023-12-07 20:17:21 +08:00
|
|
|
|
|
2023-11-15 17:17:50 +08:00
|
|
|
|
# postprocess
|
2024-03-12 15:31:06 +08:00
|
|
|
|
logger.debug(f"d={d}")
|
2023-12-07 20:17:21 +08:00
|
|
|
|
text = replace_lt_gt(text)
|
2023-11-07 19:44:47 +08:00
|
|
|
|
chat_box.update_msg(text, element_index=0, streaming=False) # 更新最终的字符串,去除光标
|
2023-11-15 17:17:50 +08:00
|
|
|
|
logger.debug('text={}'.format(text))
|
2023-11-07 19:44:47 +08:00
|
|
|
|
chat_box.update_msg("\n".join(d["codes"]), element_index=1, streaming=False, state="complete")
|
|
|
|
|
|
|
|
|
|
# session state update
|
2023-12-07 20:17:21 +08:00
|
|
|
|
# st.session_state['history_node_list'] = api.codeChat.history_node_list
|
2023-11-07 19:44:47 +08:00
|
|
|
|
|
2023-12-26 11:41:53 +08:00
|
|
|
|
elif dialogue_mode == webui_configs["dialogue"]["mode"][3]:
|
2023-09-28 10:58:58 +08:00
|
|
|
|
chat_box.ai_say([
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs['chat']['chatbox_searching'],
|
|
|
|
|
Markdown("...", in_expander=True, title=webui_configs['chat']['chatbox_search_result']),
|
2023-09-28 10:58:58 +08:00
|
|
|
|
])
|
|
|
|
|
text = ""
|
|
|
|
|
d = {"docs": []}
|
2024-01-26 14:03:25 +08:00
|
|
|
|
for idx_count, d in enumerate(
|
|
|
|
|
api.search_engine_chat(
|
|
|
|
|
prompt, search_engine, se_top_k, history, embed_model=EMBEDDING_MODEL,
|
|
|
|
|
embed_model_path=embedding_model_dict[EMBEDDING_MODEL],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
model_device=EMBEDDING_DEVICE, embed_engine=EMBEDDING_ENGINE, llm_model=LLM_MODEL,
|
2024-03-13 13:04:30 +08:00
|
|
|
|
api_key=llm_model_dict[LLM_MODEL]["api_key"],
|
2024-03-12 15:31:06 +08:00
|
|
|
|
api_base_url=llm_model_dict[LLM_MODEL]["api_base_url"],)
|
2024-01-26 14:03:25 +08:00
|
|
|
|
):
|
2023-09-28 10:58:58 +08:00
|
|
|
|
if error_msg := check_error_msg(d): # check whether error occured
|
|
|
|
|
st.error(error_msg)
|
|
|
|
|
text += d["answer"]
|
|
|
|
|
if idx_count%10 == 0:
|
|
|
|
|
chat_box.update_msg(text, element_index=0)
|
|
|
|
|
# chat_box.update_msg("搜索匹配结果: \n\n".join(d["docs"]), element_index=1, streaming=False)
|
|
|
|
|
chat_box.update_msg(text, element_index=0, streaming=False) # 更新最终的字符串,去除光标
|
2023-12-26 11:41:53 +08:00
|
|
|
|
chat_box.update_msg(f"{webui_configs['chat']['chatbox_search_result']}: \n\n".join(d["docs"]), element_index=1, streaming=False, state="complete")
|
2023-09-28 10:58:58 +08:00
|
|
|
|
|
2023-12-07 20:17:21 +08:00
|
|
|
|
# 将上传文件清空
|
|
|
|
|
st.session_state["interpreter_file_key"] += 1
|
|
|
|
|
st.experimental_rerun()
|
|
|
|
|
|
2023-09-28 10:58:58 +08:00
|
|
|
|
now = datetime.now()
|
|
|
|
|
with st.sidebar:
|
|
|
|
|
|
|
|
|
|
cols = st.columns(2)
|
|
|
|
|
export_btn = cols[0]
|
|
|
|
|
if cols[1].button(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs['export']['button_clear_conversation_name'],
|
2023-09-28 10:58:58 +08:00
|
|
|
|
use_container_width=True,
|
|
|
|
|
):
|
|
|
|
|
chat_box.reset_history()
|
|
|
|
|
GLOBAL_EXE_CODE_TEXT = ""
|
2023-11-15 17:17:50 +08:00
|
|
|
|
if 'history_node_list' in st.session_state:
|
|
|
|
|
st.session_state['history_node_list'] = []
|
2023-09-28 10:58:58 +08:00
|
|
|
|
st.experimental_rerun()
|
|
|
|
|
|
|
|
|
|
export_btn.download_button(
|
2023-12-26 11:41:53 +08:00
|
|
|
|
webui_configs['export']['download_button_export_name'],
|
2023-09-28 10:58:58 +08:00
|
|
|
|
"".join(chat_box.export2md()),
|
2023-12-26 11:41:53 +08:00
|
|
|
|
file_name=f"{now:%Y-%m-%d %H.%M}_conversations.md",
|
2023-09-28 10:58:58 +08:00
|
|
|
|
mime="text/markdown",
|
|
|
|
|
use_container_width=True,
|
|
|
|
|
)
|