Spaces:
Running
on
Zero
Running
on
Zero
macadeliccc
commited on
Commit
·
3fbf6d6
1
Parent(s):
87956ea
test
Browse files
app.py
CHANGED
@@ -62,7 +62,7 @@ async def chat_with_ochat(message):
|
|
62 |
# Create a Gradio Blocks interface with session state
|
63 |
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
64 |
gr.Markdown("## vLLM OpenChat-3.5 Interface")
|
65 |
-
gr.Markdown("###
|
66 |
gr.Markdown("This will run better on your own machine: ```docker run -it -p 7860:7860 --platform=linux/amd64 --gpus all \
|
67 |
registry.hf.space/macadeliccc-openchat-3-5-chatbot:latest python app.py```")
|
68 |
|
@@ -73,11 +73,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as app:
|
|
73 |
|
74 |
history = State([]) # Session state for chat history
|
75 |
|
76 |
-
|
77 |
return "", history + [[message, None]]
|
78 |
|
79 |
|
80 |
-
|
81 |
if history and history[-1] and history[-1][0]:
|
82 |
user_message = history[-1][0]
|
83 |
bot_response = chat_with_ochat(user_message)
|
|
|
62 |
# Create a Gradio Blocks interface with session state
|
63 |
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
64 |
gr.Markdown("## vLLM OpenChat-3.5 Interface")
|
65 |
+
gr.Markdown("### the vLLM server cannot handle concurrent users in spaces. If you get an error, run it on docker.")
|
66 |
gr.Markdown("This will run better on your own machine: ```docker run -it -p 7860:7860 --platform=linux/amd64 --gpus all \
|
67 |
registry.hf.space/macadeliccc-openchat-3-5-chatbot:latest python app.py```")
|
68 |
|
|
|
73 |
|
74 |
history = State([]) # Session state for chat history
|
75 |
|
76 |
+
def user(message, history):
|
77 |
return "", history + [[message, None]]
|
78 |
|
79 |
|
80 |
+
def bot(history):
|
81 |
if history and history[-1] and history[-1][0]:
|
82 |
user_message = history[-1][0]
|
83 |
bot_response = chat_with_ochat(user_message)
|