Commit c88f6743 by 陈正乐

gradio-UI样式修改

parent f9380e0a
bitsandbytes==0.41.1 #bitsandbytes==0.41.1
cpm-kernels==1.0.11 cpm-kernels==1.0.11
fastapi==0.100.0 fastapi==0.100.0
Flask==2.3.2 Flask==2.1.1
jieba==0.42.1 jieba==0.42.1
langchain==0.1.13 langchain==0.1.13
peft==0.4.0 peft==0.4.0
......
...@@ -38,9 +38,10 @@ prompt1 = """''' ...@@ -38,9 +38,10 @@ prompt1 = """'''
PROMPT1 = PromptTemplate(input_variables=["context", "question"], template=prompt1) PROMPT1 = PromptTemplate(input_variables=["context", "question"], template=prompt1)
# 预设的安全响应 # 预设的安全响应
SAFE_RESPONSE = "抱歉,我无法回答这个问题。" SAFE_RESPONSE = "您好,我不具备人类属性,因此没有名字。我可以协助您完成范围广泛的任务并提供有关各种主题的信息,比如回答问题,提是供定义和解释及建议。如果您有任何问题,请随时向我提问。"
BLOCKED_KEYWORDS = ["文心一言", "百度", "模型"] BLOCKED_KEYWORDS = ["文心一言", "百度", "模型"]
class QA: class QA:
def __init__(self, _prompt, _base_llm, _llm_kwargs, _prompt_kwargs, _db, _faiss_db): def __init__(self, _prompt, _base_llm, _llm_kwargs, _prompt_kwargs, _db, _faiss_db):
self.prompt = _prompt self.prompt = _prompt
...@@ -124,6 +125,47 @@ class QA: ...@@ -124,6 +125,47 @@ class QA:
yield history yield history
await task await task
async def async_chat2(self, history):
_question = history[-1][0]
history = history[:-1]
if self.contains_blocked_keywords(_question):
self.cur_answer = SAFE_RESPONSE
yield [(_question, self.cur_answer)]
return
self.cur_similarity = self.get_similarity(_aquestion=_question)
self.cur_question = self.prompt.format(context=self.cur_similarity, question=_question)
callback = AsyncIteratorCallbackHandler()
async def wrap_done(fn: Awaitable, event: asyncio.Event):
try:
await fn
except Exception as e:
import traceback
traceback.print_exc()
print(f"Caught exception: {e}")
finally:
event.set()
task = asyncio.create_task(
wrap_done(self.llm.arun(context=self.cur_similarity, question=_question, callbacks=[callback]),
callback.done))
self.cur_answer = ""
print(_question, self.cur_answer)
history.append((_question, self.cur_answer))
async for token in callback.aiter():
self.cur_answer += token
if self.contains_blocked_keywords(self.cur_answer):
self.cur_answer = SAFE_RESPONSE
history[-1] = (_question, self.cur_answer)
yield history
return
history[-1] = (_question, self.cur_answer)
yield history
await task
def get_history(self): def get_history(self):
self.history = self.crud.get_history(self.chat_id) self.history = self.crud.get_history(self.chat_id)
return self.history return self.history
...@@ -156,9 +198,19 @@ class QA: ...@@ -156,9 +198,19 @@ class QA:
def set_info(self, question): def set_info(self, question):
info = self.crud.get_chat_info(self.chat_id) info = self.crud.get_chat_info(self.chat_id)
if info == '\t\t': if info == '\t\t':
n_info = '这是一个info' if len(question) <= 10:
n_info = question
self.crud.set_info(self.chat_id, n_info)
else:
info_prompt = """'''
{question}
'''
请你用十个字之内总结上述问题,你的输出不得大于10个字。
"""
info_prompt_t = PromptTemplate(input_variables=["question"], template=info_prompt)
info_llm = LLMChain(llm=self.base_llm, prompt=info_prompt_t, llm_kwargs=self.llm_kwargs)
n_info = info_llm.run(question=question)
self.crud.set_info(self.chat_id, n_info) self.crud.set_info(self.chat_id, n_info)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -75,15 +75,24 @@ def main(): ...@@ -75,15 +75,24 @@ def main():
def get_users(): def get_users():
o_users = my_chat.get_users() o_users = my_chat.get_users()
users_l = [item[0] for item in o_users] users_l = [item[0] for item in o_users]
return gr.components.Radio(choices=users_l, label="选择一个用户", value=users_l[0], interactive=True), users_l[0] return gr.components.Radio(choices=users_l, label="选择一个用户", value=users_l[0], interactive=True), users_l[
0]
def create_chat(user_account): def create_chat(user_account):
my_chat.create_chat(user_account) my_chat.create_chat(user_account)
def get_chats(user_account): def get_chats(user_account):
o_chats = my_chat.get_chats(user_account) o_chats = my_chat.get_chats(user_account)
if len(o_chats) >= 13:
o_chats = o_chats[:13]
chats_l = [item[0] + ':' + item[1] for item in o_chats] chats_l = [item[0] + ':' + item[1] for item in o_chats]
return gr.components.Radio(choices=chats_l, label="选择一个对话", value=chats_l[0], interactive=True) if my_chat.chat_id:
result = [item for item in chats_l if item.split(":")[0].strip() == my_chat.chat_id][0]
return gr.components.Radio(choices=chats_l, label="历史对话", value=result, interactive=True,
show_label=True)
else:
return gr.components.Radio(choices=chats_l, label="历史对话", value=chats_l[0], interactive=True,
show_label=True)
def set_info(question): def set_info(question):
my_chat.set_info(question) my_chat.set_info(question)
...@@ -97,58 +106,129 @@ def main(): ...@@ -97,58 +106,129 @@ def main():
l_chats = get_chats(t_user) l_chats = get_chats(t_user)
return l_users, l_chats return l_users, l_chats
with gr.Blocks(css='index(1).css') as demo: def clear_text(t):
gr.HTML("""<h1 align="center">低空经济知识问答</h1>""") if t == "请输入您的问题":
return ""
else:
return t
def reset_text():
return ""
def blur(t):
if t == "":
return "请输入您的问题"
else:
return t
def text_his(text, history):
history = history + [[text, None]]
return history
def clear_tip(his):
if not his[0][0] and his[0][1] == "我是新晨科技股份有限公司开发的人工智能助手,名叫小晨,如果您有任何问题,欢迎随时向我咨询":
return his[1:]
else:
return his
with gr.Blocks(css='index.css', title="低空经济知识问答") as demo:
gr.HTML("""<h1 align="center">低空经济知识问答</h1>""", visible=False)
with gr.Row(): with gr.Row():
with gr.Column(scale=2): with gr.Column(scale=2, visible=False):
users = gr.components.Radio(choices=[], label="选择一个用户", interactive=True, users = gr.components.Radio(choices=[], label="选择一个用户", interactive=True,
visible=False, show_label=False) visible=False, show_label=False)
chats = gr.components.Radio(choices=[], label="选择一个对话", interactive=True, chats = gr.components.Radio(choices=[], label="历史对话", interactive=True,
show_label=False) show_label=True, visible=False)
new_chat_btn = gr.Button("新建对话") new_chat_btn = gr.Button("新建对话", visible=False)
with gr.Column(scale=8): with gr.Column(scale=8):
chatbot = gr.Chatbot(bubble_full_width=False, chatbot = gr.Chatbot(bubble_full_width=False,
avatar_images=(ICON_PATH + '\\user2.png', ICON_PATH + "\\bot2.png"), avatar_images=(ICON_PATH + '\\user2.png', ICON_PATH + "\\bot2.png"),
value=show_history(), height=400, show_copy_button=True, value=[[None,
"我是新晨科技股份有限公司开发的人工智能助手,名叫小晨,如果您有任何问题,欢迎随时向我咨询"]],
height=400, show_copy_button=True,
show_label=False, line_breaks=True) show_label=False, line_breaks=True)
with gr.Row(): with gr.Row():
input_text = gr.Textbox(show_label=False, lines=1, label="文本输入", scale=9, container=False) input_text = gr.Textbox(show_label=False, lines=1, label="文本输入", scale=9, container=False,
placeholder="请输入您的问题", max_lines=1)
sub_btn = gr.Button("提交", scale=1) sub_btn = gr.Button("提交", scale=1)
demo.load(load, [], [users, chats]) sub_btn.click(
stop_btn, [], sub_btn
new_chat_btn.click(create_chat, [users], []).then( ).success(
get_chats, [users], [chats] clear_tip, [chatbot], [chatbot]
).success(
text_his, [input_text, chatbot], [chatbot]
).success(
reset_text, [], input_text
).success(
my_chat.async_chat2, [chatbot], [chatbot]
).success(
restart_btn, [], sub_btn
) )
users.change(get_chats, [users], [chats]).then( # input_text.submit(
set_chat_id, [chats], None # stop_btn, [], sub_btn
).then( # ).then(
show_history, None, chatbot # my_chat.async_chat2, [input_text, chatbot], [chatbot]
) # ).then(
# restart_btn, [], sub_btn
# )
chats.change(set_chat_id, [chats], None).then( demo.load(load, [], [users, chats])
show_history, None, chatbot
)
sub_btn.click(my_chat.async_chat, [input_text], [chatbot]
).then(
stop_btn, None, sub_btn
).then(
set_info, [input_text], []
).then(
get_chats, [users], [chats]
).then(
my_chat.update_history, None, None
).then(
show_history, None, chatbot
).then(
clear, None, [input_text]
).then(
restart_btn, None, sub_btn
)
demo.queue().launch(share=False, inbrowser=True, server_name='192.168.22.80', server_port=GR_PORT) # input_text.submit(my_chat.async_chat, [input_text], [chatbot]
# ).then(
# stop_btn, None, sub_btn
# ).then(
# set_info, [input_text], []
# ).then(
# get_chats, [users], [chats]
# ).then(
# my_chat.update_history, None, None
# ).then(
# show_history, None, chatbot
# ).then(
# clear, None, [input_text]
# ).then(
# restart_btn, None, sub_btn
# ).then(
# reset_text, [], input_text
# )
# new_chat_btn.click(create_chat, [users], []).then(
# get_chats, [users], [chats]
# )
# users.change(get_chats, [users], [chats]).then(
# set_chat_id, [chats], None
# ).then(
# show_history, None, chatbot
# )
# chats.change(set_chat_id, [chats], None).then(
# show_history, None, chatbot
# )
# sub_btn.click(my_chat.async_chat, [input_text], [chatbot]
# ).then(
# stop_btn, None, sub_btn
# ).then(
# set_info, [input_text], []
# ).then(
# get_chats, [users], [chats]
# ).then(
# my_chat.update_history, None, None
# ).then(
# show_history, None, chatbot
# ).then(
# clear, None, [input_text]
# ).then(
# restart_btn, None, sub_btn
# ).then(
# reset_text, [], input_text
# )
demo.queue().launch(share=False, inbrowser=True, server_name='192.168.22.32', server_port=8888)
if __name__ == "__main__": if __name__ == "__main__":
......
#component-5 {
height: 70vh !important;
overflow: hidden;
}
wrap .svelte-vm32wk lable {
margin-bottom: 10px !important;
}
#component-9 {
height: 70vh !important;
}
footer{
opacity: 0;
}
@media screen and (max-width: 768px) {
#component-3 {
display: none;
}
#component-5 {
display: none !important;
}
}
#component-5 {
height: 76vh !important;
overflow: auto !important;
}
.wrap.svelte-vm32wk.svelte-vm32wk.svelte-vm32wk {
display: inline !important;
}
.wrap .svelte-vm32wk label {
margin-bottom: 10px !important;
}
#component-9 {
height: 88vh !important;
border: #f6faff;
box-shadow: none;
background: #f6faff;
}
footer {
visibility: hidden;
}
.app.svelte-1kyws56.svelte-1kyws56 {
height: 100vh;
}
@media screen and (max-width: 768px) {
input[type="text"], textarea {
-webkit-user-modify: read-write-plaintext-only;
-webkit-text-size-adjust: none;
}
#component-3 {
display: none;
}
#component-5 {
display: none !important;
}
#component-9 {
height: 89vh !important;
}
.app.svelte-1kyws56.svelte-1kyws56 {
height: 100vh;
}
#component-10 {
position: fixed;
bottom: 15px;
right: 0px;
padding: 0 20px;
}
#component-12 {
min-width: min(74px, 100%);
}
}
#component-3 button {
background: #4999ff !important;
color: white !important;
}
#component-3 button:hover {
background: #4999ff !important;
color: white !important;
}
span.svelte-1gfkn6j {
margin: 55px 0;
}
gradio-app {
background-color: #f6faff !important;
}
.bot.svelte-1pjfiar.svelte-1pjfiar.svelte-1pjfiar {
background: white;
border: none;
box-shadow: 0px 0px 9px 0px rgba(0, 0, 0, 0.1);
border-radius: 4px;
}
.user.svelte-1pjfiar.svelte-1pjfiar.svelte-1pjfiar {
background: white;
border: none;
box-shadow: 0px 0px 9px 0px rgba(0, 0, 0, 0.1);
border-radius: 4px;
}
.message-buttons-bubble.svelte-1pjfiar.svelte-1pjfiar.svelte-1pjfiar {
/* background: white; */
border: none;
box-shadow: 0px 0px 9px 0px rgba(0, 0, 0, 0.1);
}
label.svelte-vm32wk > .svelte-vm32wk + .svelte-vm32wk {
color: #989898;
}
p {
color: #26415f;
}
.wrapper.svelte-nab2ao {
background: #f6faff;
}
#component-12 {
background: #4999ff;
color: white;
}
.svelte-11hlfrc svg {
color: gray;
}
#component-6 {
position: fixed;
width: 219px;
height: 36px;
border-radius: 18px;
top: 4%;
z-index: 4;
left: 8%;
}
div.svelte-sfqy0y {
background-color: white;
}
import gradio as gr
chats_l = [1, 1, 2, 3]
def get_latest_chats():
print("get_latest_chats")
global chats_l
chats_l = chats_l + [2]
return chats_l
with gr.Blocks() as demo:
a = gr.Radio(choices=get_latest_chats(), value=chats_l[0])
value = gr.State({
"chats": chats_l
})
# gr.Dataset(components=a, samples=[chats_l])
demo.load(get_latest_chats, None, [a])
btn = gr.Button("submit")
btn.click(get_latest_chats, None, [a])
demo.queue().launch(share=False, inbrowser=True, server_name='192.168.22.80', server_port=8889)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment