Ilyas KHIAT
commited on
Commit
·
f8ebdbc
1
Parent(s):
cee0ff2
paste
Browse files- .gitignore +2 -2
- chat_with_pps.py +5 -1
.gitignore
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
__pycache__/
|
2 |
-
.streamlit/
|
3 |
-
.streamlit
|
4 |
DATA_bziiit/vectorstore_op
|
|
|
1 |
__pycache__/
|
2 |
+
.streamlit/
|
3 |
+
.streamlit/
|
4 |
DATA_bziiit/vectorstore_op
|
chat_with_pps.py
CHANGED
@@ -7,6 +7,7 @@ from langchain_core.prompts import ChatPromptTemplate
|
|
7 |
from langchain_mistralai.chat_models import ChatMistralAI
|
8 |
from download_chart import construct_plot
|
9 |
from prompt import get_prompts_list
|
|
|
10 |
|
11 |
|
12 |
load_dotenv()
|
@@ -87,6 +88,7 @@ def choose_model(index):
|
|
87 |
def display_chat():
|
88 |
# app config
|
89 |
st.title("Chatbot")
|
|
|
90 |
|
91 |
models_name = {
|
92 |
"Mistral (FR)":1,
|
@@ -147,6 +149,7 @@ def display_chat():
|
|
147 |
if isinstance(last_message, HumanMessage):
|
148 |
with st.chat_message("AI"):
|
149 |
response = st.write_stream(get_response(last_message.content, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
|
|
|
150 |
st.session_state.chat_history.append(AIMessage(content=response))
|
151 |
|
152 |
|
@@ -165,7 +168,8 @@ def display_chat():
|
|
165 |
with st.chat_message("AI"):
|
166 |
st.markdown(f"**{st.session_state.model}**")
|
167 |
response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
|
168 |
-
|
|
|
169 |
display_chart()
|
170 |
|
171 |
st.session_state.chat_history.append(AIMessage(content=response))
|
|
|
7 |
from langchain_mistralai.chat_models import ChatMistralAI
|
8 |
from download_chart import construct_plot
|
9 |
from prompt import get_prompts_list
|
10 |
+
from st_copy_to_clipboard import st_copy_to_clipboard
|
11 |
|
12 |
|
13 |
load_dotenv()
|
|
|
88 |
def display_chat():
|
89 |
# app config
|
90 |
st.title("Chatbot")
|
91 |
+
|
92 |
|
93 |
models_name = {
|
94 |
"Mistral (FR)":1,
|
|
|
149 |
if isinstance(last_message, HumanMessage):
|
150 |
with st.chat_message("AI"):
|
151 |
response = st.write_stream(get_response(last_message.content, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
|
152 |
+
st_copy_to_clipboard(response)
|
153 |
st.session_state.chat_history.append(AIMessage(content=response))
|
154 |
|
155 |
|
|
|
168 |
with st.chat_message("AI"):
|
169 |
st.markdown(f"**{st.session_state.model}**")
|
170 |
response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
|
171 |
+
st_copy_to_clipboard(response)
|
172 |
+
if "cartographie" in response:
|
173 |
display_chart()
|
174 |
|
175 |
st.session_state.chat_history.append(AIMessage(content=response))
|