Spaces:
Sleeping
Sleeping
add new fields
Browse files
app.py
CHANGED
@@ -34,7 +34,7 @@ def add_text(history, text):
|
|
34 |
return history, gr.Textbox(value="", interactive=False)
|
35 |
|
36 |
|
37 |
-
def bot(history, api_kind):
|
38 |
query = history[-1][0]
|
39 |
|
40 |
if not query:
|
@@ -86,26 +86,67 @@ with gr.Blocks() as demo:
|
|
86 |
)
|
87 |
txt_btn = gr.Button(value="Submit text", scale=1)
|
88 |
|
89 |
-
api_kind = gr.Radio(choices=["HuggingFace",
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
"
|
|
|
|
|
|
|
|
|
|
|
94 |
value="MiniLM_CharacterTextSplitter",
|
95 |
label="Chunk table")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
|
98 |
prompt_html = gr.HTML()
|
99 |
# Turn off interactivity while generating if you click
|
100 |
txt_msg = txt_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
101 |
-
bot, [chatbot, api_kind, chunk_table], [chatbot, prompt_html])
|
102 |
|
103 |
# Turn it back on
|
104 |
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
|
105 |
|
106 |
# Turn off interactivity while generating if you hit enter
|
107 |
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
108 |
-
bot, [chatbot, api_kind, chunk_table], [chatbot, prompt_html])
|
109 |
|
110 |
# Turn it back on
|
111 |
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
|
|
|
34 |
return history, gr.Textbox(value="", interactive=False)
|
35 |
|
36 |
|
37 |
+
def bot(history, api_kind, chunk_table, embedding_model, llm_model, eross_encoder, top_k_param):
|
38 |
query = history[-1][0]
|
39 |
|
40 |
if not query:
|
|
|
86 |
)
|
87 |
txt_btn = gr.Button(value="Submit text", scale=1)
|
88 |
|
89 |
+
api_kind = gr.Radio(choices=["HuggingFace",
|
90 |
+
"OpenAI"], value="HuggingFace")
|
91 |
+
|
92 |
+
chunk_table = gr.Radio(choices=["BGE_CharacterTextSplitter",
|
93 |
+
"BGE_FixedSizeSplitter",
|
94 |
+
"BGE_RecursiveCharacterTextSplitter",
|
95 |
+
"MiniLM_CharacterTextSplitter",
|
96 |
+
"MiniLM_FixedSizeSplitter",
|
97 |
+
"MiniLM_RecursiveCharacterSplitter"
|
98 |
+
],
|
99 |
value="MiniLM_CharacterTextSplitter",
|
100 |
label="Chunk table")
|
101 |
+
embedding_model = gr.Radio(
|
102 |
+
choices=[
|
103 |
+
"BAAI/bge-large-en-v1.5",
|
104 |
+
"sentence-transformers/all-MiniLM-L6-v2",
|
105 |
+
],
|
106 |
+
value="sentence-transformers/all-MiniLM-L6-v2",
|
107 |
+
label='Embedding model'
|
108 |
+
)
|
109 |
+
llm_model = gr.Radio(
|
110 |
+
choices=[
|
111 |
+
"gpt-3.5-turbo",
|
112 |
+
"gpt-4-turbo-preview",
|
113 |
+
"mistralai/Mistral-7B-v0.1",
|
114 |
+
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
115 |
+
],
|
116 |
+
value="gpt-3.5-turbo",
|
117 |
+
label='LLM'
|
118 |
+
)
|
119 |
+
eross_encoder = gr.Radio(
|
120 |
+
choices=[
|
121 |
+
"BAAI/bge-reranker-large",
|
122 |
+
"cross-encoder/ms-marco-MiniLM-L-6-v2",
|
123 |
+
],
|
124 |
+
value="cross-encoder/ms-marco-MiniLM-L-6-v2",
|
125 |
+
label='Cross-encoder model'
|
126 |
+
)
|
127 |
+
top_k_param = gr.Radio(
|
128 |
+
choices=[
|
129 |
+
"5",
|
130 |
+
"10",
|
131 |
+
"20",
|
132 |
+
"50",
|
133 |
+
],
|
134 |
+
value="5",
|
135 |
+
label='top-K'
|
136 |
+
)
|
137 |
|
138 |
|
139 |
prompt_html = gr.HTML()
|
140 |
# Turn off interactivity while generating if you click
|
141 |
txt_msg = txt_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
142 |
+
bot, [chatbot, api_kind, chunk_table, embedding_model, llm_model, eross_encoder, top_k_param], [chatbot, prompt_html])
|
143 |
|
144 |
# Turn it back on
|
145 |
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
|
146 |
|
147 |
# Turn off interactivity while generating if you hit enter
|
148 |
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
149 |
+
bot, [chatbot, api_kind, chunk_table, embedding_model, llm_model, eross_encoder, top_k_param], [chatbot, prompt_html])
|
150 |
|
151 |
# Turn it back on
|
152 |
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
|