aadhya / app.py
Prathmesh2008's picture
Update app.py
5c5ec0b verified
import gradio as gr
import random
from typing import List, Tuple
import time
class EmmaChat:
def __init__(self):
self.client = OpenAI(
base_url="https://integrate.api.nvidia.com/v1",
api_key="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"
)
self.system_message = """You are Aadhya, a 17-year-old who grew up in Mumbai. You have a passion for art and enjoy playing volleyball. You excel in math and physics and also have a strong interest in chemistry and biology. You constantly seek knowledge and are willing to work hard. Additionally, you are a good decision-maker and can anticipate many situations in advance. At your core, you are a kind-hearted person who strives to serve others and make a positive impact. You work toward a better future for both yourself and humanity."""
def generate_response(self, message: str, history: List[Tuple[str, str]]) -> str:
# Convert history to OpenAI message format
messages = [{"role": "system", "content": self.system_message}]
for user_msg, assistant_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": assistant_msg})
messages.append({"role": "user", "content": message})
try:
completion = self.client.chat.completions.create(
model="meta/llama-3.2-3b-instruct",
messages=messages,
temperature=0.2,
max_tokens=1024,
stream=True
)
# Stream the response
full_response = ""
for chunk in completion:
if chunk.choices[0].delta.content is not None:
full_response += chunk.choices[0].delta.content
yield full_response
except Exception as e:
yield f"I apologize, but I encountered an error: {str(e)}"
def create_chat_interface() -> gr.ChatInterface:
emma = EmmaChat()
# Custom CSS for the chat interface
custom_css = """
.message.user div.content {
background-color: #DCF8C6 !important;
}
.message.bot div.content {
background-color: #E8E8E8 !important;
}
.message.user, .message.bot {
padding: 1rem;
}
.avatar {
border-radius: 50%;
width: 40px;
height: 40px;
}
.message-wrap {
max-width: 800px;
margin: 0 auto;
}
"""
# Create the chat interface
chat_interface = gr.ChatInterface(
fn=emma.generate_response,
title="Chat with Aadhya πŸ‘©πŸ»",
description="""Aadhya is a 17-year-old from Mumbai with a passion for Art and a competitive spirit in volleyball. She excels in math, physics, chemistry, and biology, blending her analytical skills with a love for problem-solving. Driven by a desire to positively impact humanity, she is also committed to personal growth and excellence.""",
examples=[
["Hi, can you intro yourself?"],
["Is there any way I can get help from you? I'm glad to meet you."],
["I'm so glad to connect with you! Do you think we can work together on anything?"],
["How can I start a small garden at home?"]
],
theme=gr.themes.Soft(
primary_hue="pink",
secondary_hue="purple",
),
css=custom_css
# Removed: retry_btn="πŸ”„ Retry",
# Removed: undo_btn="↩️ Undo",
# Removed: clear_btn="πŸ—‘οΈ Clear",
)
return chat_interface
if __name__ == "__main__":
chat_interface = create_chat_interface()
chat_interface.queue()
chat_interface.launch(
share=True,
server_name="0.0.0.0",
server_port=7000,
show_api=False
)