import streamlit as st from transformers import AutoModelForCausalLM, AutoTokenizer model_name = "deepseek-ai/DeepSeek-V3-Base" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True, quantization_config=None) st.title("DeepSeek Chatbot") prompt = st.text_input("Enter your message:") if prompt: inputs = tokenizer.encode(prompt, return_tensors="pt") outputs = model.generate(inputs) response = tokenizer.decode(outputs[0], skip_special_tokens=True) st.write(response)