Blood076 commited on
Commit
a3e0939
·
verified ·
1 Parent(s): 05d244c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -10,10 +10,8 @@ For more information on huggingface_hub Inference API support, please check the
10
  #client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
11
  client = InferenceClient("meta-llama/Llama-3.2-11B-Vision-Instruct")
12
 
13
- user = "user"
14
 
15
  def respond(
16
- user,
17
  message,
18
  history: list[tuple[str, str]],
19
  system_message,
@@ -25,7 +23,7 @@ def respond(
25
 
26
  for val in history:
27
  if val[0]:
28
- messages.append({"role": user, "content": val[0]})
29
  if val[1]:
30
  messages.append({"role": "Narrator", "content": val[1]})
31
 
@@ -51,7 +49,6 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
51
  demo = gr.ChatInterface(
52
  respond,
53
  additional_inputs=[
54
- gr.Textbox(value="user", label="System message"),
55
  gr.Textbox(value="", label="System message"),
56
  gr.Slider(minimum=1, maximum=2048, value=2048, step=1, label="Max new tokens"),
57
  gr.Slider(minimum=0.1, maximum=4.0, value=0.6, step=0.1, label="Temperature"),
 
10
  #client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
11
  client = InferenceClient("meta-llama/Llama-3.2-11B-Vision-Instruct")
12
 
 
13
 
14
  def respond(
 
15
  message,
16
  history: list[tuple[str, str]],
17
  system_message,
 
23
 
24
  for val in history:
25
  if val[0]:
26
+ messages.append({"role": "user", "content": val[0]})
27
  if val[1]:
28
  messages.append({"role": "Narrator", "content": val[1]})
29
 
 
49
  demo = gr.ChatInterface(
50
  respond,
51
  additional_inputs=[
 
52
  gr.Textbox(value="", label="System message"),
53
  gr.Slider(minimum=1, maximum=2048, value=2048, step=1, label="Max new tokens"),
54
  gr.Slider(minimum=0.1, maximum=4.0, value=0.6, step=0.1, label="Temperature"),