eduardo-alvarez commited on
Commit
e5f8f36
Β·
2 Parent(s): 53b6e2c c27a9a2

Merge branch 'main' of hf.co:spaces/Intel/powered_by_intel_leaderboard

Browse files
Files changed (2) hide show
  1. app.py +27 -27
  2. requirements.txt +0 -2
app.py CHANGED
@@ -81,33 +81,33 @@ with demo:
81
  break
82
  response += char
83
  yield [(f"πŸ€– Response from LLM: {chat_model_selection}", response)] # Correct format for Gradio Chatbot
84
-
85
- with gr.Blocks() as chat_interface:
86
- chatbot = gr.Chatbot()
87
- msg = gr.Textbox()
88
- submit = gr.Button("Submit")
89
- clear = gr.Button("Clear")
90
-
91
- def user(user_message, history):
92
- return "", history + [[user_message, None]]
93
-
94
- def clear_chat(*args):
95
- return [] # Returning an empty list to signify clearing the chat, adjust as per Gradio's capabilities
96
-
97
- submit.click(
98
- fn=get_response,
99
- inputs=[msg, chatbot],
100
- outputs=chatbot
101
- )
102
-
103
- clear.click(
104
- fn=clear_chat,
105
- inputs=None,
106
- outputs=chatbot
107
- )
108
-
109
- chat_interface.queue()
110
- chat_interface.launch()
111
 
112
  with gr.Tabs(elem_classes="tab-buttons") as tabs:
113
  with gr.TabItem("πŸ† LLM Leadeboard", elem_id="llm-benchmark-table", id=0):
 
81
  break
82
  response += char
83
  yield [(f"πŸ€– Response from LLM: {chat_model_selection}", response)] # Correct format for Gradio Chatbot
84
+ #
85
+ #with gr.Blocks() as chat_interface:
86
+ # chatbot = gr.Chatbot()
87
+ # msg = gr.Textbox()
88
+ # submit = gr.Button("Submit")
89
+ # clear = gr.Button("Clear")
90
+ #
91
+ # def user(user_message, history):
92
+ # return "", history + [[user_message, None]]
93
+ #
94
+ # def clear_chat(*args):
95
+ # return [] # Returning an empty list to signify clearing the chat, adjust as per Gradio's capabilities
96
+ #
97
+ # submit.click(
98
+ # fn=get_response,
99
+ # inputs=[msg, chatbot],
100
+ # outputs=chatbot
101
+ # )
102
+ #
103
+ # clear.click(
104
+ # fn=clear_chat,
105
+ # inputs=None,
106
+ # outputs=chatbot
107
+ # )
108
+ #
109
+ #chat_interface.queue()
110
+ #chat_interface.launch()
111
 
112
  with gr.Tabs(elem_classes="tab-buttons") as tabs:
113
  with gr.TabItem("πŸ† LLM Leadeboard", elem_id="llm-benchmark-table", id=0):
requirements.txt DELETED
@@ -1,2 +0,0 @@
1
- gradio==4.16.0
2
- pandas==2.0.3