Yousefsalem commited on
Commit
322a467
·
verified ·
1 Parent(s): 2dbe17d

Update src/chatbot.py

Browse files
Files changed (1) hide show
  1. src/chatbot.py +8 -5
src/chatbot.py CHANGED
@@ -11,12 +11,12 @@ def process_input(user_input, session_id='1'):
11
  user_input (str): The user's input message.
12
  session_id (str): The session ID for the chat (default is "1").
13
  Returns:
14
- str: The generated response from the chatbot.
15
  """
16
  memory = get_by_session_id(session_id)
17
 
18
  if user_input.lower() == 'exit':
19
- return "Exiting the chat session."
20
 
21
  llm = route_llm(user_input)
22
 
@@ -28,10 +28,13 @@ def process_input(user_input, session_id='1'):
28
  verbose=True
29
  )
30
 
31
- response = conversation_chain.run({"input": user_input})
32
- memory.save_context({'input': user_input}, response)
33
 
34
- return response
 
 
 
35
 
36
  # Gradio interface function to handle input
37
  def chatbot_interface(user_input, chat_history=None, session_id="1"):
 
11
  user_input (str): The user's input message.
12
  session_id (str): The session ID for the chat (default is "1").
13
  Returns:
14
+ Generator: A generator that streams the chatbot's response tokens.
15
  """
16
  memory = get_by_session_id(session_id)
17
 
18
  if user_input.lower() == 'exit':
19
+ yield "Exiting the chat session."
20
 
21
  llm = route_llm(user_input)
22
 
 
28
  verbose=True
29
  )
30
 
31
+ # Stream response tokens
32
+ response_generator = conversation_chain.stream({"input": user_input})
33
 
34
+ for token in response_generator:
35
+ yield token # Stream each token
36
+
37
+ memory.save_context({'input': user_input}, ''.join(response_generator))
38
 
39
  # Gradio interface function to handle input
40
  def chatbot_interface(user_input, chat_history=None, session_id="1"):