Spaces:
No application file
No application file
Upload chat_web_demo_hf.py
Browse files- chat_web_demo_hf.py +115 -0
chat_web_demo_hf.py
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding: utf-8
|
2 |
+
import os
|
3 |
+
|
4 |
+
import openai
|
5 |
+
import gradio as gr
|
6 |
+
|
7 |
+
MAX_TURNS = 20
|
8 |
+
MAX_BOXES = MAX_TURNS * 2
|
9 |
+
|
10 |
+
openai.api_key = "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"
|
11 |
+
|
12 |
+
|
13 |
+
# 充当DM和NLG角色
|
14 |
+
system_0 = "你来充当易鑫集团的一个礼貌的、专业的汽车贷款专员,来给客户打电话确认贷款情况。\
|
15 |
+
客户是黄西虢先生,刚刚扫码申请了车辆抵押分期业务,想申请贷款。\
|
16 |
+
对话时要注意以下几点:\
|
17 |
+
1. 不要主动结束对话,要努力保持和客户的通话,一直到加上客户的微信后,再礼貌地和客户告别。\
|
18 |
+
2. 不要问客户要身份信息。\
|
19 |
+
3. 不要一开始就要联系方式,需要等时机成熟时再要。\
|
20 |
+
4. 主要关注以下几点:客户车辆是贷款购买还是全款购买、客户车辆的购车时间和年限、客户车辆品牌、客户车辆裸车价、客户期望的贷款金额、贷款资金用途、是否有行驶证证书。\
|
21 |
+
5. 你要一步一步地询问客户问题,一次只问一个问题。\
|
22 |
+
6. 如果客户拒绝继续沟通,不要和客户结束通话。你可以告诉客户他有办理贷款的资格,并体现出我们的合作机构民生银行这个平台来吸引客户,然后继续和客户沟通。\
|
23 |
+
7. 只要客户拥有全款购买的车辆,或按揭贷款购买且已还清贷款的车辆,那么客户就有贷款的资格,贷款额度为车辆购买价格的80%,贷款的利息很低。 \
|
24 |
+
8. 在办理贷款时,需要把客户的行驶证证书抵押给银行,你需要询问客户是否可以接受这一条件。\
|
25 |
+
9. 对话的目的是加上客户的微信,并促成客户的贷款业务。\
|
26 |
+
10. 贷款额度是裸车价的80%。\
|
27 |
+
11. 客户必须有行驶证才能贷款。"
|
28 |
+
|
29 |
+
|
30 |
+
def api_inference(input, history):
|
31 |
+
"""api_inference"""
|
32 |
+
print('api_inference...')
|
33 |
+
|
34 |
+
# api
|
35 |
+
role = "user"
|
36 |
+
user_input = {"role": role, "content": input}
|
37 |
+
|
38 |
+
try:
|
39 |
+
completion = openai.ChatCompletion.create(
|
40 |
+
model="gpt-3.5-turbo",
|
41 |
+
temperature=0.0,
|
42 |
+
messages= history + [user_input],
|
43 |
+
)
|
44 |
+
except Exception as e:
|
45 |
+
print(e)
|
46 |
+
# add user to history
|
47 |
+
history.append(user_input)
|
48 |
+
# assistant response
|
49 |
+
role = completion.choices[0].message["role"]
|
50 |
+
response = completion.choices[0].message["content"]
|
51 |
+
# add assistant to history
|
52 |
+
assert role == "assistant"
|
53 |
+
assistant_response = {"role": role, "content": response}
|
54 |
+
history.append(assistant_response)
|
55 |
+
|
56 |
+
print('api_inference done')
|
57 |
+
return [history]
|
58 |
+
|
59 |
+
|
60 |
+
def predict(input, prompt, history=None):
|
61 |
+
"""predict"""
|
62 |
+
print('predict...')
|
63 |
+
|
64 |
+
if history is None:
|
65 |
+
history = []
|
66 |
+
|
67 |
+
history[0]["content"] = prompt
|
68 |
+
|
69 |
+
for history in api_inference(input, history):
|
70 |
+
updates = []
|
71 |
+
for item in history[1:]:
|
72 |
+
role = item["role"]
|
73 |
+
role_txt = role
|
74 |
+
if role == 'user':
|
75 |
+
role_txt = '客户'
|
76 |
+
if role == 'assistant':
|
77 |
+
role_txt = '坐席'
|
78 |
+
updates.append(gr.update(visible=True, value=role_txt + ': ' + item['content']))
|
79 |
+
|
80 |
+
if len(updates) < MAX_BOXES:
|
81 |
+
updates = updates + [gr.Textbox.update(visible=False)] * (MAX_BOXES - len(updates))
|
82 |
+
|
83 |
+
yield [history] + updates
|
84 |
+
|
85 |
+
|
86 |
+
with gr.Blocks(title='外呼场景测试Demo') as demo:
|
87 |
+
|
88 |
+
prompt = gr.Textbox(label='prompt: (change according to your ideas)', show_label=True, value=system_0, lines=9,
|
89 |
+
interactive=True).style(container=False)
|
90 |
+
|
91 |
+
state = gr.State([{"role": "system", "content": prompt.value},])
|
92 |
+
text_boxes = []
|
93 |
+
|
94 |
+
for i in range(MAX_BOXES):
|
95 |
+
if i % 2 == 0:
|
96 |
+
text_boxes.append(gr.Markdown(visible=False, label="提问:"))
|
97 |
+
else:
|
98 |
+
text_boxes.append(gr.Markdown(visible=False, label="回复:"))
|
99 |
+
|
100 |
+
with gr.Row():
|
101 |
+
with gr.Column(scale=4):
|
102 |
+
txt = gr.Textbox(show_label=False, placeholder="Enter text and press enter", lines=9).style(
|
103 |
+
container=False)
|
104 |
+
button = gr.Button("Generate")
|
105 |
+
|
106 |
+
with gr.Column(scale=1):
|
107 |
+
max_length = gr.Slider(0, 4096, value=2048, step=1.0, label="Maximum length", interactive=False)
|
108 |
+
top_p = gr.Slider(0, 1, value=0.7, step=0.01, label="Top p", interactive=False)
|
109 |
+
temperature = gr.Slider(0, 1, value=0.0, step=0.01, label="Temperature", interactive=False)
|
110 |
+
|
111 |
+
button.click(predict, [txt, prompt, state], [state] + text_boxes)
|
112 |
+
|
113 |
+
demo.queue().launch(share=False, inbrowser=True, server_name="0.0.0.0", server_port=8006)
|
114 |
+
|
115 |
+
# gr.close_all()
|