hassanelmghari commited on
Commit
26df791
·
verified ·
1 Parent(s): 034341f

fixing bot_streaming function

Browse files
Files changed (1) hide show
  1. app.py +33 -30
app.py CHANGED
@@ -31,7 +31,7 @@ def encode_image(image_path, max_size=(800, 800), quality=85):
31
  img.save(buffered, format="JPEG", quality=quality)
32
  return base64.b64encode(buffered.getvalue()).decode('utf-8')
33
 
34
- def bot_streaming(message, history, together_api_key, max_new_tokens=250, temperature=0.7, max_history=5):
35
  if client is None:
36
  try:
37
  initialize_client(together_api_key)
@@ -39,46 +39,49 @@ def bot_streaming(message, history, together_api_key, max_new_tokens=250, temper
39
  yield f"Error initializing client: {str(e)}"
40
  return
41
 
42
- txt = message.get("text", "")
43
- messages = []
44
- images = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
  try:
47
- for i, msg in enumerate(history[-max_history:]):
48
- if isinstance(msg[0], tuple):
49
- messages.append({"role": "user", "content": [{"type": "text", "text": history[i+1][0]}, {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{encode_image(msg[0][0])}"}}]})
50
- messages.append({"role": "assistant", "content": [{"type": "text", "text": history[i+1][1]}]})
51
- elif isinstance(history[i-1][0], tuple) and isinstance(msg[0], str):
52
- pass
53
- elif isinstance(history[i-1][0], str) and isinstance(msg[0], str):
54
- messages.append({"role": "user", "content": [{"type": "text", "text": msg[0]}]})
55
- messages.append({"role": "assistant", "content": [{"type": "text", "text": msg[1]}]})
56
-
57
- if "files" in message and len(message["files"]) == 1:
58
- if isinstance(message["files"][0], str): # examples
59
- image_path = message["files"][0]
60
- else: # regular input
61
- image_path = message["files"][0]["path"]
62
- messages.append({"role": "user", "content": [{"type": "text", "text": txt}, {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{encode_image(image_path)}"}}]})
63
- else:
64
- messages.append({"role": "user", "content": [{"type": "text", "text": txt}]})
65
-
66
  stream = client.chat.completions.create(
67
- model="meta-llama/Llama-Vision-Free",
68
  messages=messages,
69
  max_tokens=max_new_tokens,
70
  temperature=temperature,
71
  stream=True,
72
  )
73
 
74
- buffer = ""
75
  for chunk in stream:
76
  if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content is not None:
77
- buffer += chunk.choices[0].delta.content
78
- time.sleep(0.01)
79
- yield buffer
80
 
81
- if not buffer:
82
  yield "No response generated. Please try again."
83
 
84
  except Exception as e:
@@ -94,7 +97,7 @@ with gr.Blocks() as demo:
94
  with gr.Row():
95
  together_api_key = gr.Textbox(
96
  label="Together API Key",
97
- placeholder="Enter your TOGETHER_API_KEY here",
98
  type="password"
99
  )
100
 
 
31
  img.save(buffered, format="JPEG", quality=quality)
32
  return base64.b64encode(buffered.getvalue()).decode('utf-8')
33
 
34
+ def bot_streaming(message, history, together_api_key, max_new_tokens=250, temperature=0.7):
35
  if client is None:
36
  try:
37
  initialize_client(together_api_key)
 
39
  yield f"Error initializing client: {str(e)}"
40
  return
41
 
42
+ prompt = "You are a helpful AI assistant. Analyze the image provided (if any) and respond to the user's query or comment."
43
+
44
+ messages = [{"role": "system", "content": prompt}]
45
+
46
+ # Add history to messages
47
+ for user_msg, assistant_msg in history:
48
+ messages.append({"role": "user", "content": user_msg})
49
+ messages.append({"role": "assistant", "content": assistant_msg})
50
+
51
+ # Prepare the current message
52
+ current_message = {"role": "user", "content": []}
53
+
54
+ # Add text content
55
+ if message.get("text"):
56
+ current_message["content"].append({"type": "text", "text": message["text"]})
57
+
58
+ # Add image content if present
59
+ if message.get("files") and len(message["files"]) > 0:
60
+ image_path = message["files"][0]["path"] if isinstance(message["files"][0], dict) else message["files"][0]
61
+ image_base64 = encode_image(image_path)
62
+ current_message["content"].append({
63
+ "type": "image_url",
64
+ "image_url": {"url": f"data:image/png;base64,{image_base64}"}
65
+ })
66
+
67
+ messages.append(current_message)
68
 
69
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  stream = client.chat.completions.create(
71
+ model="meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
72
  messages=messages,
73
  max_tokens=max_new_tokens,
74
  temperature=temperature,
75
  stream=True,
76
  )
77
 
78
+ response = ""
79
  for chunk in stream:
80
  if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content is not None:
81
+ response += chunk.choices[0].delta.content
82
+ yield response
 
83
 
84
+ if not response:
85
  yield "No response generated. Please try again."
86
 
87
  except Exception as e:
 
97
  with gr.Row():
98
  together_api_key = gr.Textbox(
99
  label="Together API Key",
100
+ placeholder="Enter your Together API key here",
101
  type="password"
102
  )
103