merterbak commited on
Commit
dab1246
·
verified ·
1 Parent(s): ead416d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -21
app.py CHANGED
@@ -15,15 +15,6 @@ client = OpenAI(
15
  )
16
 
17
  def build_messages_from_history(history):
18
- """
19
- Convert the stored conversation (with user and assistant turns, including images) into a
20
- messages array suitable for the model. History is a list of tuples:
21
- [
22
- ((user_text, user_image_url), assistant_text),
23
- ...
24
- ]
25
- We return a list of messages starting with a system role, followed by alternating user/assistant.
26
- """
27
  messages = [
28
  {
29
  "role": "system",
@@ -64,10 +55,6 @@ def build_messages_from_history(history):
64
  return messages
65
 
66
  def create_response(history, user_text, user_image_path):
67
- """
68
- Given the current history, the user's new message (text), and optional uploaded image path,
69
- build a new set of messages including the latest user turn, then call the model and update history.
70
- """
71
  user_text = user_text.strip()
72
  user_image_url = ""
73
 
@@ -86,7 +73,7 @@ def create_response(history, user_text, user_image_path):
86
  user_image_url = f"data:image/jpeg;base64,{base64_image}"
87
 
88
  temp_history = history.copy()
89
- temp_history.append(((user_text, user_image_url), "")) # assistant response is empty for now
90
 
91
  messages = [
92
  {
@@ -95,8 +82,6 @@ def create_response(history, user_text, user_image_path):
95
  "You should use all previous messages in the conversation as context. Provide clear, positive, and useful responses."
96
  }
97
  ]
98
-
99
- # Add all previous turns except the one we just appended (since it has no assistant response yet)
100
  for ((old_user_text, old_user_image_url), old_assistant_text) in history:
101
  old_user_content = []
102
  if old_user_image_url:
@@ -136,7 +121,7 @@ def create_response(history, user_text, user_image_path):
136
  messages.append({"role": "user", "content": new_user_content})
137
 
138
  completion = client.chat.completions.create(
139
- model="grok-vision-beta",
140
  messages=messages,
141
  stream=False,
142
  temperature=0.01,
@@ -151,10 +136,6 @@ def create_response(history, user_text, user_image_path):
151
  return history, converted
152
 
153
  def chat(user_message, image, history):
154
- """
155
- Handle a new message from the user. The state 'history' is a list of ((user_text, user_image_url), assistant_text) tuples.
156
- Returns updated history and the entire conversation as displayed in the Chatbot.
157
- """
158
  history, assistant_output = create_response(history, user_message, image)
159
 
160
  display_chat = []
 
15
  )
16
 
17
  def build_messages_from_history(history):
 
 
 
 
 
 
 
 
 
18
  messages = [
19
  {
20
  "role": "system",
 
55
  return messages
56
 
57
  def create_response(history, user_text, user_image_path):
 
 
 
 
58
  user_text = user_text.strip()
59
  user_image_url = ""
60
 
 
73
  user_image_url = f"data:image/jpeg;base64,{base64_image}"
74
 
75
  temp_history = history.copy()
76
+ temp_history.append(((user_text, user_image_url), ""))
77
 
78
  messages = [
79
  {
 
82
  "You should use all previous messages in the conversation as context. Provide clear, positive, and useful responses."
83
  }
84
  ]
 
 
85
  for ((old_user_text, old_user_image_url), old_assistant_text) in history:
86
  old_user_content = []
87
  if old_user_image_url:
 
121
  messages.append({"role": "user", "content": new_user_content})
122
 
123
  completion = client.chat.completions.create(
124
+ model="grok-2-vision-1212",
125
  messages=messages,
126
  stream=False,
127
  temperature=0.01,
 
136
  return history, converted
137
 
138
  def chat(user_message, image, history):
 
 
 
 
139
  history, assistant_output = create_response(history, user_message, image)
140
 
141
  display_chat = []