XChat / app.py
wlike's picture
create app.py
7be5daa verified
raw
history blame
3.22 kB
# Copyright (c) XVERSE Inc. All Rights Reserved.
#
# -*- encoding: utf-8 -*-
import gradio as gr
import openai
import os
TITLE="XChat"
client = openai.Client(
base_url="https://api.xverse.cn/v1",
api_key=os.environ["API_KEY"]
)
def predict(msg, history=[]):
messages = []
tuples = []
if len(os.environ["SYSTEM_PROMPT"]) > 0:
messages.append({"role": "system", "content": os.environ["SYSTEM_PROMPT"]})
for i in range(0, len(history), 2):
messages.append({"role": "user", "content": history[i]})
messages.append({"role": "assistant", "content": history[i+1]})
tuples.append((history[i], history[i+1]))
messages.append({"role": "user", "content": msg})
response = client.chat.completions.create(
model=os.environ["MODEL"],
messages=messages,
max_tokens=int(os.environ["MAX_TOKENS"]),
top_p=float(os.environ["TOP_P"]),
temperature=float(os.environ["TEMPERATURE"]),
presence_penalty=float(os.environ["PRESENCE_PENALTY"]),
stream=True
)
### 非流式输出
#txt = response.choices[0].message.content
#tuples.append((msg, txt))
#history.append(msg)
#history.append(txt)
#return tuples, history
### 流式输出
snippet = ""
i = 0
for chunk in response:
i += 1
if chunk.choices[0].delta.content is not None:
snippet = snippet + chunk.choices[0].delta.content
if i == 1:
tuples.append((msg, snippet))
history.append(msg)
history.append(snippet)
else:
tuples[-1] = (msg, snippet)
history[-1] = snippet
yield tuples, history
examples = [["你是谁?", None], ["你会干什么?", None], ["写一篇爱护环境的小作文", None]]
def reset():
return None, []
def clear_textbox():
return gr.update(value="")
css = """
h1 {
text-align: center;
display: block;
}
"""
with gr.Blocks(css=css) as chat_demo:
gr.Markdown("""# <center><font size=8>{}</center>""".format(TITLE))
gr.Markdown("""<center><font size=4>\
<a href="https://github.com/xverse-ai">GitHub</a>&nbsp; | &nbsp;\
<a href="https://chat.xverse.cn">Web</a>&nbsp; | &nbsp;\
<a href="https://help.xverse.cn/docs/api-reference">API</a>\
</center>"""
)
chatbot = gr.Chatbot(elem_id="chatbot", height=800, bubble_full_width=False, likeable=False)
state = gr.State([])
with gr.Row():
txt = gr.Textbox(show_label=False, placeholder="Enter text and press enter", container=False)
with gr.Row():
submit_btn = gr.Button(value="Submit")
reset_btn = gr.Button(value="Reset")
txt.submit(fn=predict, inputs=[txt, state], outputs=[chatbot, state])
txt.submit(fn=clear_textbox, inputs=None, outputs=[txt])
submit_btn.click(fn=predict, inputs=[txt, state], outputs=[chatbot, state])
submit_btn.click(fn=clear_textbox, inputs=None, outputs=[txt])
reset_btn.click(fn=reset, inputs=None, outputs=[chatbot, state])
gr.Examples(examples=examples, inputs=[txt])
if __name__ == "__main__":
chat_demo.queue()
chat_demo.launch(share=True)