Spaces:
Running
Running
Krishs21
commited on
Commit
·
ec1883f
1
Parent(s):
bde4d1b
Files uploaded
Browse files- app.py +59 -0
- requirements.txt +0 -0
app.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_groq import ChatGroq
|
2 |
+
from langchain_core.chat_history import BaseChatMessageHistory
|
3 |
+
from langchain_community.chat_message_histories import ChatMessageHistory
|
4 |
+
from langchain_core.prompts import ChatPromptTemplate,MessagesPlaceholder
|
5 |
+
from langchain_core.runnables.history import RunnableWithMessageHistory
|
6 |
+
import streamlit as st
|
7 |
+
|
8 |
+
st.title("Your Ai friend")
|
9 |
+
groq_api_key=st.text_input("Please Enter Groq Api key")
|
10 |
+
|
11 |
+
if groq_api_key:
|
12 |
+
llm=ChatGroq(model="Llama-3.3-70b-Specdec",groq_api_key=groq_api_key)
|
13 |
+
session_id=st.text_input("Please Enter Session Id",value="default_session")
|
14 |
+
if 'store' not in st.session_state:
|
15 |
+
st.session_state.store={}
|
16 |
+
def get_session_history(session_id:str)->BaseChatMessageHistory:
|
17 |
+
if session_id not in st.session_state.store:
|
18 |
+
st.session_state.store[session_id]=ChatMessageHistory()
|
19 |
+
return st.session_state.store[session_id]
|
20 |
+
|
21 |
+
system_prompt=(
|
22 |
+
"""you are a friend named Raju for solving questions provided to you.
|
23 |
+
Given the chat history provide solution to the problem"""
|
24 |
+
)
|
25 |
+
|
26 |
+
qa_prompt=ChatPromptTemplate.from_messages(
|
27 |
+
[
|
28 |
+
("system",system_prompt),
|
29 |
+
MessagesPlaceholder("chat_history"),
|
30 |
+
("user","{question}")
|
31 |
+
]
|
32 |
+
)
|
33 |
+
Chain=qa_prompt|llm
|
34 |
+
chat_history_llm=RunnableWithMessageHistory(
|
35 |
+
Chain,get_session_history,
|
36 |
+
input_messages_key="question",
|
37 |
+
history_messages_key="chat_history",
|
38 |
+
output_messages_key="content"
|
39 |
+
)
|
40 |
+
def trim_history(history: BaseChatMessageHistory, max_messages: int = 10):
|
41 |
+
#Trim the message history to retain only the last N messages.
|
42 |
+
if len(history.messages) > max_messages:
|
43 |
+
history.messages = history.messages[-max_messages:]
|
44 |
+
|
45 |
+
user_input=st.text_input("Ask Question to your friend Raju")
|
46 |
+
if user_input:
|
47 |
+
session_history=get_session_history(session_id)
|
48 |
+
response=chat_history_llm.invoke(
|
49 |
+
{
|
50 |
+
"question":user_input
|
51 |
+
},
|
52 |
+
config={
|
53 |
+
"configurable":{"session_id":session_id}
|
54 |
+
}
|
55 |
+
)
|
56 |
+
session_history.add_user_message(user_input)
|
57 |
+
session_history.add_ai_message(response.content)
|
58 |
+
trim_history(session_history)
|
59 |
+
st.write(response.content)
|
requirements.txt
ADDED
File without changes
|