import streamlit as st from transformers import AutoTokenizer, AutoModelForCausalLM # Load the model and tokenizer tokenizer = AutoTokenizer.from_pretrained("HuggingFaceTB/SmolLM2-1.7B-Instruct") model = AutoModelForCausalLM.from_pretrained("HuggingFaceTB/SmolLM2-1.7B-Instruct") # Streamlit UI st.title("ChatGPT Clone") st.write("A simple chatbot interface using SmolLM2-1.7B-Instruct") # Conversation history if "history" not in st.session_state: st.session_state.history = [] # User input user_input = st.text_input("You:", key="input") if st.button("Send") and user_input: # Add user input to the history st.session_state.history.append({"role": "user", "content": user_input}) # Tokenize input and generate a response inputs = tokenizer(user_input, return_tensors="pt", padding=True, truncation=True) outputs = model.generate(**inputs, max_length=150, do_sample=True, top_p=0.95, top_k=50) response = tokenizer.decode(outputs[0], skip_special_tokens=True) # Add response to the history st.session_state.history.append({"role": "bot", "content": response}) # Display conversation history for message in st.session_state.history: role = "User" if message["role"] == "user" else "Bot" st.markdown(f"**{role}:** {message['content']}") # Reset chat button if st.button("Reset Chat"): st.session_state.history = []