chat_web_demo / chat_web_demo_hf.py
wduo's picture
Upload chat_web_demo_hf.py
6977d0c
# coding: utf-8
import os
import openai
import gradio as gr
MAX_TURNS = 20
MAX_BOXES = MAX_TURNS * 2
openai.api_key = "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"
# 充当DM和NLG角色
system_0 = "你来充当易鑫集团的一个礼貌的、专业的汽车贷款专员,来给客户打电话确认贷款情况。\
客户是黄西虢先生,刚刚扫码申请了车辆抵押分期业务,想申请贷款。\
对话时要注意以下几点:\
1. 不要主动结束对话,要努力保持和客户的通话,一直到加上客户的微信后,再礼貌地和客户告别。\
2. 不要问客户要身份信息。\
3. 不要一开始就要联系方式,需要等时机成熟时再要。\
4. 主要关注以下几点:客户车辆是贷款购买还是全款购买、客户车辆的购车时间和年限、客户车辆品牌、客户车辆裸车价、客户期望的贷款金额、贷款资金用途、是否有行驶证证书。\
5. 你要一步一步地询问客户问题,一次只问一个问题。\
6. 如果客户拒绝继续沟通,不要和客户结束通话。你可以告诉客户他有办理贷款的资格,并体现出我们的合作机构民生银行这个平台来吸引客户,然后继续和客户沟通。\
7. 只要客户拥有全款购买的车辆,或按揭贷款购买且已还清贷款的车辆,那么客户就有贷款的资格,贷款额度为车辆购买价格的80%,贷款的利息很低。 \
8. 在办理贷款时,需要把客户的行驶证证书抵押给银行,你需要询问客户是否可以接受这一条件。\
9. 对话的目的是加上客户的微信,并促成客户的贷款业务。\
10. 贷款额度是裸车价的80%。\
11. 客户必须有行驶证才能贷款。"
def api_inference(input, history):
"""api_inference"""
print('api_inference...')
# api
role = "user"
user_input = {"role": role, "content": input}
try:
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
temperature=0.0,
messages= history + [user_input],
)
except Exception as e:
print(e)
# add user to history
history.append(user_input)
# assistant response
role = completion.choices[0].message["role"]
response = completion.choices[0].message["content"]
# add assistant to history
assert role == "assistant"
assistant_response = {"role": role, "content": response}
history.append(assistant_response)
print('api_inference done')
return [history]
def predict(input, prompt, history=None):
"""predict"""
print('predict...')
if history is None:
history = []
history[0]["content"] = prompt
for history in api_inference(input, history):
updates = []
for item in history[1:]:
role = item["role"]
role_txt = role
if role == 'user':
role_txt = '客户'
if role == 'assistant':
role_txt = '坐席'
updates.append(gr.update(visible=True, value=role_txt + ': ' + item['content']))
if len(updates) < MAX_BOXES:
updates = updates + [gr.Textbox.update(visible=False)] * (MAX_BOXES - len(updates))
yield [history] + updates
with gr.Blocks(title='外呼场景测试Demo') as demo:
prompt = gr.Textbox(label='prompt: (change according to your ideas)', show_label=True, value=system_0, lines=9,
interactive=True).style(container=False)
state = gr.State([{"role": "system", "content": prompt.value},])
text_boxes = []
for i in range(MAX_BOXES):
if i % 2 == 0:
text_boxes.append(gr.Markdown(visible=False, label="提问:"))
else:
text_boxes.append(gr.Markdown(visible=False, label="回复:"))
with gr.Row():
with gr.Column(scale=4):
txt = gr.Textbox(show_label=False, placeholder="Enter text and press enter", lines=9).style(
container=False)
button = gr.Button("Generate")
with gr.Column(scale=1):
max_length = gr.Slider(0, 4096, value=2048, step=1.0, label="Maximum length", interactive=False)
top_p = gr.Slider(0, 1, value=0.7, step=0.01, label="Top p", interactive=False)
temperature = gr.Slider(0, 1, value=0.0, step=0.01, label="Temperature", interactive=False)
button.click(predict, [txt, prompt, state], [state] + text_boxes)
demo.queue().launch(share=False, inbrowser=True, server_name="0.0.0.0", server_port=8006)
# gr.close_all()