Ai_Friend / app.py
Krishs21
Files uploaded
ec1883f
from langchain_groq import ChatGroq
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate,MessagesPlaceholder
from langchain_core.runnables.history import RunnableWithMessageHistory
import streamlit as st
st.title("Your Ai friend")
groq_api_key=st.text_input("Please Enter Groq Api key")
if groq_api_key:
llm=ChatGroq(model="Llama-3.3-70b-Specdec",groq_api_key=groq_api_key)
session_id=st.text_input("Please Enter Session Id",value="default_session")
if 'store' not in st.session_state:
st.session_state.store={}
def get_session_history(session_id:str)->BaseChatMessageHistory:
if session_id not in st.session_state.store:
st.session_state.store[session_id]=ChatMessageHistory()
return st.session_state.store[session_id]
system_prompt=(
"""you are a friend named Raju for solving questions provided to you.
Given the chat history provide solution to the problem"""
)
qa_prompt=ChatPromptTemplate.from_messages(
[
("system",system_prompt),
MessagesPlaceholder("chat_history"),
("user","{question}")
]
)
Chain=qa_prompt|llm
chat_history_llm=RunnableWithMessageHistory(
Chain,get_session_history,
input_messages_key="question",
history_messages_key="chat_history",
output_messages_key="content"
)
def trim_history(history: BaseChatMessageHistory, max_messages: int = 10):
#Trim the message history to retain only the last N messages.
if len(history.messages) > max_messages:
history.messages = history.messages[-max_messages:]
user_input=st.text_input("Ask Question to your friend Raju")
if user_input:
session_history=get_session_history(session_id)
response=chat_history_llm.invoke(
{
"question":user_input
},
config={
"configurable":{"session_id":session_id}
}
)
session_history.add_user_message(user_input)
session_history.add_ai_message(response.content)
trim_history(session_history)
st.write(response.content)