File size: 2,311 Bytes
ec1883f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from langchain_groq import ChatGroq
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate,MessagesPlaceholder
from langchain_core.runnables.history import RunnableWithMessageHistory
import streamlit as st

st.title("Your Ai friend")
groq_api_key=st.text_input("Please Enter Groq Api key")

if groq_api_key:
    llm=ChatGroq(model="Llama-3.3-70b-Specdec",groq_api_key=groq_api_key)
    session_id=st.text_input("Please Enter Session Id",value="default_session")
    if 'store' not in st.session_state:
        st.session_state.store={}
    def get_session_history(session_id:str)->BaseChatMessageHistory:
        if session_id not in st.session_state.store:
            st.session_state.store[session_id]=ChatMessageHistory()
        return st.session_state.store[session_id]
    
    system_prompt=(
        """you are a friend named Raju for solving questions provided to you.
        Given the chat history provide solution to the problem"""
    )

    qa_prompt=ChatPromptTemplate.from_messages(
        [
            ("system",system_prompt),
            MessagesPlaceholder("chat_history"),
            ("user","{question}")
        ]
    )
    Chain=qa_prompt|llm
    chat_history_llm=RunnableWithMessageHistory(
        Chain,get_session_history,
        input_messages_key="question",
        history_messages_key="chat_history",
        output_messages_key="content"
    )
    def trim_history(history: BaseChatMessageHistory, max_messages: int = 10):
    #Trim the message history to retain only the last N messages.
        if len(history.messages) > max_messages:
            history.messages = history.messages[-max_messages:]

    user_input=st.text_input("Ask Question to your friend Raju")
    if user_input:
        session_history=get_session_history(session_id)
        response=chat_history_llm.invoke(
            {
                "question":user_input
            },
            config={
                "configurable":{"session_id":session_id}
            }
        )
        session_history.add_user_message(user_input)
        session_history.add_ai_message(response.content)
        trim_history(session_history)
        st.write(response.content)