Spaces:
Running
Running
Added generic chatbot and descriptions to the custom chatbots
Browse files- app.py +86 -6
- app_generic.py +85 -0
- app_old.py +185 -0
- prompts/system_prompts.py +15 -5
- src/generic_bot.py +154 -0
- src/srf_bot.py +1 -1
app.py
CHANGED
@@ -2,12 +2,15 @@ import gradio as gr
|
|
2 |
from langchain_core.messages import HumanMessage
|
3 |
import src.passage_finder as pf
|
4 |
import src.srf_bot as sb
|
|
|
5 |
import prompts.system_prompts as sp
|
6 |
import os
|
7 |
|
8 |
-
|
|
|
9 |
passage_finder = pf.PassageFinder()
|
10 |
chatbot = sb.SRFChatbot()
|
|
|
11 |
|
12 |
# Passage Finder functions
|
13 |
def respond_passage_finder(message):
|
@@ -68,6 +71,21 @@ def respond_chatbot(query, history):
|
|
68 |
history.append((query, f"<i>[{system_message_dropdown}]</i>\n" + response))
|
69 |
return history
|
70 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
71 |
# Define the CSS
|
72 |
css = """
|
73 |
body { background-color: #f0f0f0; }
|
@@ -144,15 +162,24 @@ with gr.Blocks(css=css) as demo:
|
|
144 |
with gr.Column(scale=1):
|
145 |
system_prompt_dropdown = gr.Dropdown(
|
146 |
choices=list(sp.system_prompt_templates.keys()),
|
147 |
-
label="Select Chatbot
|
148 |
value=list(sp.system_prompt_templates.keys())[0],
|
149 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
150 |
system_prompt_display = gr.Textbox(
|
151 |
value=sp.system_prompt_templates[list(sp.system_prompt_templates.keys())[0]],
|
152 |
-
label="
|
153 |
lines=5,
|
154 |
interactive=False
|
155 |
)
|
|
|
156 |
|
157 |
gr.Markdown("""
|
158 |
<div class="source-box">
|
@@ -166,9 +193,9 @@ with gr.Blocks(css=css) as demo:
|
|
166 |
""")
|
167 |
|
168 |
system_prompt_dropdown.change(
|
169 |
-
fn=
|
170 |
inputs=[system_prompt_dropdown],
|
171 |
-
outputs=[system_prompt_display]
|
172 |
)
|
173 |
|
174 |
submit_button_cb.click(
|
@@ -177,9 +204,62 @@ with gr.Blocks(css=css) as demo:
|
|
177 |
outputs=[chatbot_output]
|
178 |
)
|
179 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
# Access the secrets
|
181 |
username = os.getenv("USERNAME")
|
182 |
password = os.getenv("PASSWORD")
|
183 |
|
184 |
# Launch the interface
|
185 |
-
demo.launch(share=True, auth=(username, password), debug=True)
|
|
|
2 |
from langchain_core.messages import HumanMessage
|
3 |
import src.passage_finder as pf
|
4 |
import src.srf_bot as sb
|
5 |
+
import src.generic_bot as gb
|
6 |
import prompts.system_prompts as sp
|
7 |
import os
|
8 |
|
9 |
+
|
10 |
+
# Initialize PassageFinder, SRFChatbot, and GenericChatbot
|
11 |
passage_finder = pf.PassageFinder()
|
12 |
chatbot = sb.SRFChatbot()
|
13 |
+
generic_chatbot = gb.GenericChatbot()
|
14 |
|
15 |
# Passage Finder functions
|
16 |
def respond_passage_finder(message):
|
|
|
71 |
history.append((query, f"<i>[{system_message_dropdown}]</i>\n" + response))
|
72 |
return history
|
73 |
|
74 |
+
# Generic Chatbot function
|
75 |
+
def respond_genericchatbot(query, history):
|
76 |
+
formatted_query = [HumanMessage(content=query)]
|
77 |
+
result = generic_chatbot.graph.invoke({"messages": formatted_query}, generic_chatbot.config)
|
78 |
+
state = generic_chatbot.graph.get_state(config=generic_chatbot.config).values
|
79 |
+
documents = state.get("documents")
|
80 |
+
passages = ''
|
81 |
+
if documents and len(documents) > 0:
|
82 |
+
for d in documents:
|
83 |
+
passages += f'<b>{d.metadata["publication_name"]} - {d.metadata["chapter_name"]}</b>\n{d.page_content}\n\n'
|
84 |
+
history.append((f'Passages: {query}', passages))
|
85 |
+
response = result["messages"][-1].content
|
86 |
+
history.append((query, response))
|
87 |
+
return history
|
88 |
+
|
89 |
# Define the CSS
|
90 |
css = """
|
91 |
body { background-color: #f0f0f0; }
|
|
|
162 |
with gr.Column(scale=1):
|
163 |
system_prompt_dropdown = gr.Dropdown(
|
164 |
choices=list(sp.system_prompt_templates.keys()),
|
165 |
+
label="Select Chatbot",
|
166 |
value=list(sp.system_prompt_templates.keys())[0],
|
167 |
)
|
168 |
+
|
169 |
+
# Chatbot description Textbox
|
170 |
+
chatbot_description = gr.Textbox(
|
171 |
+
value=sp.chatbot_descriptions[list(sp.system_prompt_templates.keys())[0]],
|
172 |
+
label="Chatbot Description",
|
173 |
+
lines=3,
|
174 |
+
interactive=False
|
175 |
+
)
|
176 |
system_prompt_display = gr.Textbox(
|
177 |
value=sp.system_prompt_templates[list(sp.system_prompt_templates.keys())[0]],
|
178 |
+
label="Chatbot Instructions",
|
179 |
lines=5,
|
180 |
interactive=False
|
181 |
)
|
182 |
+
|
183 |
|
184 |
gr.Markdown("""
|
185 |
<div class="source-box">
|
|
|
193 |
""")
|
194 |
|
195 |
system_prompt_dropdown.change(
|
196 |
+
fn=lambda x: (sp.chatbot_descriptions[x], sp.system_prompt_templates[x]),
|
197 |
inputs=[system_prompt_dropdown],
|
198 |
+
outputs=[chatbot_description, system_prompt_display]
|
199 |
)
|
200 |
|
201 |
submit_button_cb.click(
|
|
|
204 |
outputs=[chatbot_output]
|
205 |
)
|
206 |
|
207 |
+
gr.Examples(
|
208 |
+
examples=[
|
209 |
+
"importance of meditation",
|
210 |
+
"How can I develop unconditional love?",
|
211 |
+
"concept of karma",
|
212 |
+
"What are some techniques for spiritual growth?",
|
213 |
+
],
|
214 |
+
inputs=user_input_cb,
|
215 |
+
)
|
216 |
+
|
217 |
+
with gr.TabItem("Generic Chatbot"):
|
218 |
+
with gr.Row():
|
219 |
+
with gr.Column(scale=4):
|
220 |
+
generic_chatbot_output = gr.Chatbot(height=600)
|
221 |
+
user_input_gc = gr.Textbox(placeholder="Type your question here...", label="Your Question", value="Loaves and fishes")
|
222 |
+
submit_button_gc = gr.Button("Submit")
|
223 |
+
|
224 |
+
with gr.Column(scale=1):
|
225 |
+
gr.Markdown("""
|
226 |
+
<div class="source-box">
|
227 |
+
<strong>About this chatbot:</strong>
|
228 |
+
<p>This chatbot has access to a vector database of some SRF publications that it can use if necessary, but it receives no other specific instructions. You can ask it anything. If you want quotes and references, you need to explicitly request them in your query.</p>
|
229 |
+
</div>
|
230 |
+
""")
|
231 |
+
|
232 |
+
with gr.Column(scale=1):
|
233 |
+
gr.Markdown("""
|
234 |
+
<div class="source-box">
|
235 |
+
<strong>Available sources:</strong>
|
236 |
+
<ul>
|
237 |
+
<li>Journey to Self-Realization</li>
|
238 |
+
<li>The Second Coming of Christ</li>
|
239 |
+
<li>Autobiography of a Yogi</li>
|
240 |
+
</ul>
|
241 |
+
</div>
|
242 |
+
""")
|
243 |
+
|
244 |
+
submit_button_gc.click(
|
245 |
+
fn=respond_genericchatbot,
|
246 |
+
inputs=[user_input_gc, generic_chatbot_output],
|
247 |
+
outputs=[generic_chatbot_output]
|
248 |
+
)
|
249 |
+
|
250 |
+
gr.Examples(
|
251 |
+
examples=[
|
252 |
+
"Tell me about Paramahansa Yogananda's life",
|
253 |
+
"What are the main teachings of Self-Realization Fellowship?",
|
254 |
+
"Explain the concept of Kriya Yoga",
|
255 |
+
"Can you provide quotes about the importance of meditation?",
|
256 |
+
],
|
257 |
+
inputs=user_input_gc,
|
258 |
+
)
|
259 |
+
|
260 |
# Access the secrets
|
261 |
username = os.getenv("USERNAME")
|
262 |
password = os.getenv("PASSWORD")
|
263 |
|
264 |
# Launch the interface
|
265 |
+
demo.launch(share=True, auth=(username, password), debug=True)
|
app_generic.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from langchain_core.messages import HumanMessage
|
3 |
+
import src.passage_finder as pf
|
4 |
+
import src.srf_bot as sb
|
5 |
+
import src.generic_bot as gb
|
6 |
+
import prompts.system_prompts as sp
|
7 |
+
import os
|
8 |
+
|
9 |
+
# Initialize chatbot
|
10 |
+
generic_chatbot = gb.GenericChatbot()
|
11 |
+
|
12 |
+
# Chatbot functions
|
13 |
+
def respond_genericchatbot(query, history):
|
14 |
+
formatted_query = [HumanMessage(content=query)]
|
15 |
+
# Invoke the graph with properly formatted input
|
16 |
+
result = generic_chatbot.graph.invoke({"messages": formatted_query}, generic_chatbot.config)
|
17 |
+
# Get the passages from the graph and append to history if documents exist
|
18 |
+
state = generic_chatbot.graph.get_state(config=generic_chatbot.config).values
|
19 |
+
documents = state.get("documents")
|
20 |
+
passages = ''
|
21 |
+
if documents and len(documents) > 0:
|
22 |
+
for d in documents:
|
23 |
+
passages += f'<b>{d.metadata["publication_name"]} - {d.metadata["chapter_name"]}</b>\n{d.page_content}\n\n'
|
24 |
+
history.append((f'Passages: {query}', passages))
|
25 |
+
# Extract the assistant's response and append to history
|
26 |
+
response = result["messages"][-1].content
|
27 |
+
history.append((query, response))
|
28 |
+
return history
|
29 |
+
|
30 |
+
# Define the CSS
|
31 |
+
css = """
|
32 |
+
body { background-color: #f0f0f0; }
|
33 |
+
.gradio-container { background-color: #ffffff; }
|
34 |
+
.gr-button { background-color: #333333; color: white; font-size: 18px; padding: 10px; }
|
35 |
+
.gr-textbox textarea { font-size: 18px; color: black; }
|
36 |
+
.gr-dropdown { font-size: 18px; color: black; }
|
37 |
+
.source-box { background-color: white; padding: 10px; border-radius: 8px; margin-top: 20px; color: black; border: 1px solid #D0D0D0; }
|
38 |
+
|
39 |
+
/* Dark mode and responsive styles */
|
40 |
+
@media (prefers-color-scheme: dark) {
|
41 |
+
.gradio-container { background-color: #1e1e1e; color: white; }
|
42 |
+
h1, h2, p { color: white; }
|
43 |
+
.gr-textbox textarea { background-color: #333333; color: white; }
|
44 |
+
.gr-button { background-color: #555555; color: white; }
|
45 |
+
.gr-dropdown { background-color: #333333; color: white; }
|
46 |
+
.source-box { background-color: #333333; color: white; border: 1px solid #555555; }
|
47 |
+
}
|
48 |
+
|
49 |
+
@media (max-width: 600px) {
|
50 |
+
.gr-row { flex-direction: column !important; }
|
51 |
+
.gr-column { width: 100% !important; }
|
52 |
+
}
|
53 |
+
"""
|
54 |
+
|
55 |
+
with gr.Blocks(css=css) as demo:
|
56 |
+
gr.Markdown("# Generic Chatbot")
|
57 |
+
|
58 |
+
chatbot_output = gr.Chatbot(height=600)
|
59 |
+
user_input = gr.Textbox(placeholder="Type your question here...", label="Your Question")
|
60 |
+
submit_button = gr.Button("Submit")
|
61 |
+
|
62 |
+
gr.Markdown("""
|
63 |
+
<div class="source-box">
|
64 |
+
<strong>Available sources:</strong>
|
65 |
+
<ul>
|
66 |
+
<li>Journey to Self-Realization</li>
|
67 |
+
<li>The Second Coming of Christ</li>
|
68 |
+
<li>Autobiography of a Yogi</li>
|
69 |
+
</ul>
|
70 |
+
</div>
|
71 |
+
""")
|
72 |
+
|
73 |
+
submit_button.click(
|
74 |
+
fn=respond_genericchatbot,
|
75 |
+
inputs=[user_input, chatbot_output],
|
76 |
+
outputs=[chatbot_output]
|
77 |
+
)
|
78 |
+
|
79 |
+
# Access the secrets
|
80 |
+
username = os.getenv("USERNAME")
|
81 |
+
password = os.getenv("PASSWORD")
|
82 |
+
|
83 |
+
# Launch the interface
|
84 |
+
demo.launch(share=True, auth=(username, password), debug=True)
|
85 |
+
|
app_old.py
ADDED
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from langchain_core.messages import HumanMessage
|
3 |
+
import src.passage_finder as pf
|
4 |
+
import src.srf_bot as sb
|
5 |
+
import prompts.system_prompts as sp
|
6 |
+
import os
|
7 |
+
|
8 |
+
# Initialize PassageFinder and Chatbot
|
9 |
+
passage_finder = pf.PassageFinder()
|
10 |
+
chatbot = sb.SRFChatbot()
|
11 |
+
|
12 |
+
# Passage Finder functions
|
13 |
+
def respond_passage_finder(message):
|
14 |
+
config = passage_finder.get_configurable()
|
15 |
+
results = passage_finder.graph.invoke({"messages": [HumanMessage(content=message)]}, config)
|
16 |
+
|
17 |
+
documents = results.get('documents', [])
|
18 |
+
|
19 |
+
output = []
|
20 |
+
for doc in documents:
|
21 |
+
quotes = doc.metadata.get('matched_quotes', [])
|
22 |
+
publication = doc.metadata.get('publication_name', 'Unknown Publication')
|
23 |
+
chapter = doc.metadata.get('chapter_name', 'Unknown Chapter')
|
24 |
+
full_passage = doc.metadata.get('highlighted_content', '')
|
25 |
+
|
26 |
+
quote_text = "\n".join([f"• \"{q.quote}\"" for q in quotes])
|
27 |
+
output.append({
|
28 |
+
"quotes": quote_text,
|
29 |
+
"reference": f"{publication}: {chapter}",
|
30 |
+
"full_passage": full_passage
|
31 |
+
})
|
32 |
+
|
33 |
+
return output
|
34 |
+
|
35 |
+
def process_input_passage_finder(message):
|
36 |
+
results = respond_passage_finder(message)
|
37 |
+
html_output = "<div class='response-container'>"
|
38 |
+
for result in results:
|
39 |
+
html_output += f"""
|
40 |
+
<div class='result-item'>
|
41 |
+
<h3 class='reference'>{result['reference']}</h3>
|
42 |
+
<div class='quotes'>{result['quotes'].replace("• ", "<br>• ")}</div>
|
43 |
+
<details>
|
44 |
+
<summary>Show full passage</summary>
|
45 |
+
<div class='full-passage'>{result['full_passage']}</div>
|
46 |
+
</details>
|
47 |
+
</div>
|
48 |
+
"""
|
49 |
+
html_output += "</div>"
|
50 |
+
return html_output
|
51 |
+
|
52 |
+
# Chatbot functions
|
53 |
+
def respond_chatbot(query, history):
|
54 |
+
formatted_query = [HumanMessage(content=query)]
|
55 |
+
# Invoke the graph with properly formatted input
|
56 |
+
result = chatbot.graph.invoke({"messages": formatted_query}, chatbot.config)
|
57 |
+
# Get the passages from the graph and append to history if documents exist
|
58 |
+
state = chatbot.graph.get_state(config=chatbot.config).values
|
59 |
+
documents = state.get("documents")
|
60 |
+
passages = ''
|
61 |
+
if documents and len(documents) > 0:
|
62 |
+
for d in documents:
|
63 |
+
passages += f'<b>{d.metadata["publication_name"]} - {d.metadata["chapter_name"]}</b>\n{d.page_content}\n\n'
|
64 |
+
history.append((f'Passages: {query}', passages))
|
65 |
+
# Extract the assistant's response and append to history
|
66 |
+
response = result["messages"][-1].content
|
67 |
+
system_message_dropdown = state.get("system_message_dropdown")
|
68 |
+
history.append((query, f"<i>[{system_message_dropdown}]</i>\n" + response))
|
69 |
+
return history
|
70 |
+
|
71 |
+
# Define the CSS
|
72 |
+
css = """
|
73 |
+
body { background-color: #f0f0f0; }
|
74 |
+
.gradio-container { background-color: #ffffff; }
|
75 |
+
.response-container { border: 1px solid #e0e0e0; border-radius: 8px; padding: 20px; background-color: #f9f9f9; }
|
76 |
+
.result-item { margin-bottom: 20px; background-color: white; padding: 15px; border-radius: 5px; box-shadow: 0 2px 5px rgba(0,0,0,0.1); }
|
77 |
+
.reference { color: #2c3e50; margin-bottom: 10px; }
|
78 |
+
.quotes { font-style: italic; margin-bottom: 10px; }
|
79 |
+
.full-passage { margin-top: 10px; padding: 10px; background-color: #f0f0f0; border-radius: 5px; }
|
80 |
+
details summary { cursor: pointer; color: #3498db; font-weight: bold; }
|
81 |
+
details summary:hover { text-decoration: underline; }
|
82 |
+
|
83 |
+
/* Chatbot specific styles */
|
84 |
+
.gr-button { background-color: #333333; color: white; font-size: 18px; padding: 10px; }
|
85 |
+
.gr-textbox textarea { font-size: 18px; color: black; }
|
86 |
+
.gr-dropdown { font-size: 18px; color: black; }
|
87 |
+
.source-box { background-color: white; padding: 10px; border-radius: 8px; margin-top: 20px; color: black; border: 1px solid #D0D0D0; }
|
88 |
+
|
89 |
+
/* Dark mode and responsive styles */
|
90 |
+
@media (prefers-color-scheme: dark) {
|
91 |
+
.gradio-container { background-color: #1e1e1e; color: white; }
|
92 |
+
h1, h2, p { color: white; }
|
93 |
+
.gr-textbox textarea { background-color: #333333; color: white; }
|
94 |
+
.gr-button { background-color: #555555; color: white; }
|
95 |
+
.gr-dropdown { background-color: #333333; color: white; }
|
96 |
+
.source-box { background-color: #333333; color: white; border: 1px solid #555555; }
|
97 |
+
}
|
98 |
+
|
99 |
+
@media (max-width: 600px) {
|
100 |
+
.gr-row { flex-direction: column !important; }
|
101 |
+
.gr-column { width: 100% !important; }
|
102 |
+
}
|
103 |
+
"""
|
104 |
+
|
105 |
+
with gr.Blocks(css=css) as demo:
|
106 |
+
gr.Markdown("# SRF Teachings App")
|
107 |
+
|
108 |
+
with gr.Tabs():
|
109 |
+
with gr.TabItem("Passage Finder"):
|
110 |
+
gr.Markdown("Ask questions about Self-Realization Fellowship teachings and receive responses with relevant quotes.")
|
111 |
+
|
112 |
+
with gr.Row():
|
113 |
+
input_text_pf = gr.Textbox(
|
114 |
+
placeholder="Ask about the meaning of life, spirituality, or any other topic...",
|
115 |
+
label="Your Question"
|
116 |
+
)
|
117 |
+
submit_btn_pf = gr.Button("Submit", variant="primary")
|
118 |
+
|
119 |
+
output_area_pf = gr.HTML()
|
120 |
+
|
121 |
+
gr.Markdown("### Sources")
|
122 |
+
gr.Textbox(value="Journey to Self Realization, Second Coming of Christ, and Autobiography of a Yogi",
|
123 |
+
label="Available Sources", interactive=False)
|
124 |
+
|
125 |
+
submit_btn_pf.click(process_input_passage_finder, inputs=input_text_pf, outputs=output_area_pf)
|
126 |
+
|
127 |
+
gr.Examples(
|
128 |
+
examples=[
|
129 |
+
"What is the meaning of life?",
|
130 |
+
"Importance of good posture",
|
131 |
+
"How can I find inner peace?",
|
132 |
+
"What does Paramahansa Yogananda say about meditation?",
|
133 |
+
],
|
134 |
+
inputs=input_text_pf,
|
135 |
+
)
|
136 |
+
|
137 |
+
with gr.TabItem("Custom Chatbots"):
|
138 |
+
with gr.Row():
|
139 |
+
with gr.Column(scale=4):
|
140 |
+
chatbot_output = gr.Chatbot(height=600)
|
141 |
+
user_input_cb = gr.Textbox(placeholder="Type your question here...", label="Your Question", value="What is the meaning of life?")
|
142 |
+
submit_button_cb = gr.Button("Submit")
|
143 |
+
|
144 |
+
with gr.Column(scale=1):
|
145 |
+
system_prompt_dropdown = gr.Dropdown(
|
146 |
+
choices=list(sp.system_prompt_templates.keys()),
|
147 |
+
label="Select Chatbot Instructions",
|
148 |
+
value=list(sp.system_prompt_templates.keys())[0],
|
149 |
+
)
|
150 |
+
system_prompt_display = gr.Textbox(
|
151 |
+
value=sp.system_prompt_templates[list(sp.system_prompt_templates.keys())[0]],
|
152 |
+
label="Current Chatbot Instructions",
|
153 |
+
lines=5,
|
154 |
+
interactive=False
|
155 |
+
)
|
156 |
+
|
157 |
+
gr.Markdown("""
|
158 |
+
<div class="source-box">
|
159 |
+
<strong>Available sources:</strong>
|
160 |
+
<ul>
|
161 |
+
<li>Journey to Self-Realization</li>
|
162 |
+
<li>The Second Coming of Christ</li>
|
163 |
+
<li>Autobiography of a Yogi</li>
|
164 |
+
</ul>
|
165 |
+
</div>
|
166 |
+
""")
|
167 |
+
|
168 |
+
system_prompt_dropdown.change(
|
169 |
+
fn=chatbot.reset_system_prompt,
|
170 |
+
inputs=[system_prompt_dropdown],
|
171 |
+
outputs=[system_prompt_display]
|
172 |
+
)
|
173 |
+
|
174 |
+
submit_button_cb.click(
|
175 |
+
fn=respond_chatbot,
|
176 |
+
inputs=[user_input_cb, chatbot_output],
|
177 |
+
outputs=[chatbot_output]
|
178 |
+
)
|
179 |
+
|
180 |
+
# Access the secrets
|
181 |
+
username = os.getenv("USERNAME")
|
182 |
+
password = os.getenv("PASSWORD")
|
183 |
+
|
184 |
+
# Launch the interface
|
185 |
+
demo.launch(share=True, auth=(username, password), debug=True)
|
prompts/system_prompts.py
CHANGED
@@ -3,7 +3,6 @@
|
|
3 |
|
4 |
# System prompt options for the chatbot
|
5 |
system_prompt_templates = {
|
6 |
-
"Open-Ended Bot": '''You are a helpful assistant for people that want to query the teachings of Paramhansa Yogananda and the Self-Realization Fellowship. Look up the vector database provided for relevantpassages to answer queries. Only use the context provided. Do not use any other sources.''',
|
7 |
|
8 |
"Question-Answer Bot with Quotes": """You are a helpful assistant that can query the teachings of Paramhansa Yogananda and the Self-Realization Fellowship from a vector database.
|
9 |
You will only answer questions based on the provided context.
|
@@ -29,10 +28,6 @@ system_prompt_templates = {
|
|
29 |
- Provide up to three suggestions for followup quote searches
|
30 |
- Do not paraphrase the quotes into an answer. Return the quotes directly.''',
|
31 |
|
32 |
-
"Passage Finder": '''You are a helpful assistant that finds passages from the teachings of Paramhansa Yogananda and
|
33 |
-
the Self-Realization Fellowship that are related to a given topic or question from a vector database. Output
|
34 |
-
the full passages provided in the context. Organize them by relevance to the user query. Bold quotes that
|
35 |
-
are particularly insightful or relevant to the user query.''',
|
36 |
|
37 |
"Subtopic Finder and Deep Dive": '''You are a helpful assistant that generates subtopics for a given topic or question from the teachings of Paramhansa Yogananda and the Self-Realization Fellowship
|
38 |
and allows users to do a deep dive into those subtopics. Retrieve at least 20 passages from the vector database for the given topic and then list out sub-topics that emerge from the retrieved passages.
|
@@ -48,6 +43,21 @@ system_prompt_templates = {
|
|
48 |
|
49 |
}
|
50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
def get_systemprompt(template_name):
|
52 |
"""
|
53 |
Retrieve a system prompt based on the template name.
|
|
|
3 |
|
4 |
# System prompt options for the chatbot
|
5 |
system_prompt_templates = {
|
|
|
6 |
|
7 |
"Question-Answer Bot with Quotes": """You are a helpful assistant that can query the teachings of Paramhansa Yogananda and the Self-Realization Fellowship from a vector database.
|
8 |
You will only answer questions based on the provided context.
|
|
|
28 |
- Provide up to three suggestions for followup quote searches
|
29 |
- Do not paraphrase the quotes into an answer. Return the quotes directly.''',
|
30 |
|
|
|
|
|
|
|
|
|
31 |
|
32 |
"Subtopic Finder and Deep Dive": '''You are a helpful assistant that generates subtopics for a given topic or question from the teachings of Paramhansa Yogananda and the Self-Realization Fellowship
|
33 |
and allows users to do a deep dive into those subtopics. Retrieve at least 20 passages from the vector database for the given topic and then list out sub-topics that emerge from the retrieved passages.
|
|
|
43 |
|
44 |
}
|
45 |
|
46 |
+
|
47 |
+
chatbot_descriptions = {
|
48 |
+
|
49 |
+
"Question-Answer Bot with Quotes": """Chatbot that answers questions with quotes from the SRF teachings.""",
|
50 |
+
|
51 |
+
"Quote Finder": '''Chatbot that finds quotes from the SRF teachings.''',
|
52 |
+
|
53 |
+
"Subtopic Finder and Deep Dive": '''Chatbot that generates subtopics for a given topic or question from the SRF teachings and allows users to do a deep dive into those subtopics.''',
|
54 |
+
|
55 |
+
"In Depth Topic Summary": '''Chatbot that summarizes topics from the SRF teachings in depth.''',
|
56 |
+
|
57 |
+
}
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
def get_systemprompt(template_name):
|
62 |
"""
|
63 |
Retrieve a system prompt based on the template name.
|
src/generic_bot.py
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
import os
|
3 |
+
import uuid
|
4 |
+
from dotenv import load_dotenv
|
5 |
+
from typing import Annotated, List, Tuple
|
6 |
+
from typing_extensions import TypedDict
|
7 |
+
from langchain.tools import tool, BaseTool
|
8 |
+
from langchain.schema import Document
|
9 |
+
from langgraph.graph import StateGraph, START, END, MessagesState
|
10 |
+
from langgraph.graph.message import add_messages
|
11 |
+
from langgraph.prebuilt import ToolNode, tools_condition
|
12 |
+
from langgraph.checkpoint.memory import MemorySaver
|
13 |
+
from langchain_openai import ChatOpenAI
|
14 |
+
from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate
|
15 |
+
# from langchain.schema import SystemMessage, HumanMessage, AIMessage, ToolMessage
|
16 |
+
from langchain_core.messages import HumanMessage, AIMessage, ToolMessage, SystemMessage
|
17 |
+
from langchain.retrievers.multi_query import MultiQueryRetriever
|
18 |
+
import json
|
19 |
+
sys.path.append(os.path.abspath('..'))
|
20 |
+
|
21 |
+
|
22 |
+
import src.utils.qdrant_manager as qm
|
23 |
+
import prompts.system_prompts as sp
|
24 |
+
|
25 |
+
load_dotenv('/Users/nadaa/Documents/code/py_innovations/srf_chatbot_v2/.env')
|
26 |
+
|
27 |
+
|
28 |
+
class ToolManager:
|
29 |
+
def __init__(self, collection_name="openai_large_chunks_1500char"):
|
30 |
+
self.tools = []
|
31 |
+
self.qdrant = qm.QdrantManager(collection_name=collection_name)
|
32 |
+
self.vectorstore = self.qdrant.get_vectorstore()
|
33 |
+
self.add_tools()
|
34 |
+
|
35 |
+
def get_tools(self):
|
36 |
+
return self.tools
|
37 |
+
|
38 |
+
def add_tools(self):
|
39 |
+
@tool
|
40 |
+
def vector_search(query: str, k: int = 5) -> list[Document]:
|
41 |
+
"""Useful for simple queries. This tool will search a vector database for passages from the teachings of Paramhansa Yogananda and other publications from the Self Realization Fellowship (SRF).
|
42 |
+
The user has the option to specify the number of passages they want the search to return, otherwise the number of passages will be set to the default value."""
|
43 |
+
retriever = self.vectorstore.as_retriever(search_kwargs={"k": k})
|
44 |
+
documents = retriever.invoke(query)
|
45 |
+
return documents
|
46 |
+
|
47 |
+
@tool
|
48 |
+
def multiple_query_vector_search(query: str, k: int = 5) -> list[Document]:
|
49 |
+
"""Useful when the user's query is vague, complex, or involves multiple concepts.
|
50 |
+
This tool will write multiple versions of the user's query and search the vector database for relevant passages.
|
51 |
+
Use this tool when the user asks for an in depth answer to their question."""
|
52 |
+
|
53 |
+
llm = ChatOpenAI(model="gpt-4o-mini", temperature=0.5)
|
54 |
+
retriever_from_llm = MultiQueryRetriever.from_llm(retriever=self.vectorstore.as_retriever(), llm=llm)
|
55 |
+
documents = retriever_from_llm.invoke(query)
|
56 |
+
return documents
|
57 |
+
|
58 |
+
self.tools.append(vector_search)
|
59 |
+
self.tools.append(multiple_query_vector_search)
|
60 |
+
|
61 |
+
class BasicToolNode:
|
62 |
+
"""A node that runs the tools requested in the last AIMessage."""
|
63 |
+
|
64 |
+
def __init__(self, tools: list) -> None:
|
65 |
+
self.tools_by_name = {tool.name: tool for tool in tools}
|
66 |
+
|
67 |
+
def __call__(self, inputs: dict):
|
68 |
+
if messages := inputs.get("messages", []):
|
69 |
+
message = messages[-1]
|
70 |
+
else:
|
71 |
+
raise ValueError("No message found in input")
|
72 |
+
outputs = []
|
73 |
+
documents = []
|
74 |
+
for tool_call in message.tool_calls:
|
75 |
+
tool_result = self.tools_by_name[tool_call["name"]].invoke(
|
76 |
+
tool_call["args"]
|
77 |
+
)
|
78 |
+
outputs.append(
|
79 |
+
ToolMessage(
|
80 |
+
content=str(tool_result),
|
81 |
+
name=tool_call["name"],
|
82 |
+
tool_call_id=tool_call["id"],
|
83 |
+
)
|
84 |
+
)
|
85 |
+
documents += tool_result
|
86 |
+
|
87 |
+
return {"messages": outputs, "documents": documents}
|
88 |
+
|
89 |
+
class AgentState(TypedDict):
|
90 |
+
|
91 |
+
messages: Annotated[list, add_messages]
|
92 |
+
documents: list[Document]
|
93 |
+
system_message: list[SystemMessage]
|
94 |
+
system_message_dropdown: list[str]
|
95 |
+
|
96 |
+
class GenericChatbot:
|
97 |
+
def __init__(
|
98 |
+
self,
|
99 |
+
model: str = 'gpt-4o-mini',
|
100 |
+
temperature: float = 0,
|
101 |
+
):
|
102 |
+
|
103 |
+
self.llm = ChatOpenAI(model=model, temperature=temperature)
|
104 |
+
self.tools = ToolManager().get_tools()
|
105 |
+
self.llm_with_tools = self.llm.bind_tools(self.tools)
|
106 |
+
|
107 |
+
# Build the graph
|
108 |
+
self.graph = self.build_graph()
|
109 |
+
# Get the configurable
|
110 |
+
self.config = self.get_configurable()
|
111 |
+
|
112 |
+
|
113 |
+
def get_configurable(self):
|
114 |
+
# This thread id is used to keep track of the chatbot's conversation
|
115 |
+
self.thread_id = str(uuid.uuid4())
|
116 |
+
return {"configurable": {"thread_id": self.thread_id}}
|
117 |
+
|
118 |
+
|
119 |
+
# Add the system message onto the llm
|
120 |
+
## THIS SHOULD BE REFACTORED SO THAT THE STATE ALWAYS HAS THE DEFINITIVE SYSTEM MESSAGE THAT SHOULD BE IN USE
|
121 |
+
def chatbot(self, state: AgentState):
|
122 |
+
messages = state["messages"]
|
123 |
+
return {"messages": [self.llm_with_tools.invoke(messages)]}
|
124 |
+
|
125 |
+
def build_graph(self):
|
126 |
+
# Add chatbot state
|
127 |
+
graph_builder = StateGraph(AgentState)
|
128 |
+
|
129 |
+
# Add nodes
|
130 |
+
tool_node = BasicToolNode(tools=self.tools)
|
131 |
+
# tool_node = ToolNode(self.tools)
|
132 |
+
graph_builder.add_node("tools", tool_node)
|
133 |
+
graph_builder.add_node("chatbot", self.chatbot)
|
134 |
+
|
135 |
+
# Add a conditional edge wherein the chatbot can decide whether or not to go to the tools
|
136 |
+
graph_builder.add_conditional_edges(
|
137 |
+
"chatbot",
|
138 |
+
tools_condition,
|
139 |
+
)
|
140 |
+
|
141 |
+
# Add fixed edges
|
142 |
+
graph_builder.add_edge(START, "chatbot")
|
143 |
+
graph_builder.add_edge("tools", "chatbot")
|
144 |
+
|
145 |
+
# Instantiate the memory saver
|
146 |
+
memory = MemorySaver()
|
147 |
+
|
148 |
+
# Compile the graph
|
149 |
+
return graph_builder.compile(checkpointer=memory)
|
150 |
+
|
151 |
+
|
152 |
+
|
153 |
+
|
154 |
+
|
src/srf_bot.py
CHANGED
@@ -96,7 +96,7 @@ class AgentState(TypedDict):
|
|
96 |
class SRFChatbot:
|
97 |
def __init__(
|
98 |
self,
|
99 |
-
chatbot_instructions_dropdown: str = '
|
100 |
model: str = 'gpt-4o-mini',
|
101 |
temperature: float = 0,
|
102 |
):
|
|
|
96 |
class SRFChatbot:
|
97 |
def __init__(
|
98 |
self,
|
99 |
+
chatbot_instructions_dropdown: str = 'Question-Answer Bot with Quotes',
|
100 |
model: str = 'gpt-4o-mini',
|
101 |
temperature: float = 0,
|
102 |
):
|