File size: 3,851 Bytes
0f62467
d042af0
 
 
0f62467
d042af0
 
 
 
 
 
0f62467
d042af0
0f62467
d042af0
 
 
 
 
 
 
0f62467
d042af0
 
 
 
 
 
 
 
0f62467
d042af0
 
 
 
 
 
0f62467
d042af0
 
0f62467
d042af0
 
0f62467
d042af0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0f62467
 
d042af0
 
 
 
 
 
 
 
 
 
 
 
 
 
0f62467
d042af0
 
 
 
 
0f62467
d042af0
0f62467
 
d042af0
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import gradio as gr
import random
from typing import List, Tuple
import time

class EmmaChat:
    def __init__(self):
        self.client = OpenAI(
            base_url="https://integrate.api.nvidia.com/v1",
            api_key="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"
        )

        self.system_message = """You are Aadhya, a 17-year-old who grew up in Mumbai. You have a passion for art and enjoy playing volleyball. You excel in math and physics and also have a strong interest in chemistry and biology. You constantly seek knowledge and are willing to work hard. Additionally, you are a good decision-maker and can anticipate many situations in advance. At your core, you are a kind-hearted person who strives to serve others and make a positive impact. You work toward a better future for both yourself and humanity."""

    def generate_response(self, message: str, history: List[Tuple[str, str]]) -> str:
        # Convert history to OpenAI message format
        messages = [{"role": "system", "content": self.system_message}]
        for user_msg, assistant_msg in history:
            messages.append({"role": "user", "content": user_msg})
            messages.append({"role": "assistant", "content": assistant_msg})
        messages.append({"role": "user", "content": message})

        try:
            completion = self.client.chat.completions.create(
                model="meta/llama-3.2-3b-instruct",
                messages=messages,
                temperature=0.2,
                max_tokens=1024,
                stream=True
            )

            # Stream the response
            full_response = ""
            for chunk in completion:
                if chunk.choices[0].delta.content is not None:
                    full_response += chunk.choices[0].delta.content
                    yield full_response

        except Exception as e:
            yield f"I apologize, but I encountered an error: {str(e)}"

def create_chat_interface() -> gr.ChatInterface:
    emma = EmmaChat()

    # Custom CSS for the chat interface
    custom_css = """
    .message.user div.content {
        background-color: #DCF8C6 !important;
    }
    .message.bot div.content {
        background-color: #E8E8E8 !important;
    }
    .message.user, .message.bot {
        padding: 1rem;
    }
    .avatar {
        border-radius: 50%;
        width: 40px;
        height: 40px;
    }
    .message-wrap {
        max-width: 800px;
        margin: 0 auto;
    }
    """


    # Create the chat interface
    chat_interface = gr.ChatInterface(
        fn=emma.generate_response,
        title="Chat with Aadhya πŸ‘©πŸ»",
        description="""Aadhya is a 17-year-old from Mumbai with a passion for Art and a competitive spirit in volleyball. She excels in math, physics, chemistry, and biology, blending her analytical skills with a love for problem-solving. Driven by a desire to positively impact humanity, she is also committed to personal growth and excellence.""",
        examples=[
            ["Hi, can you intro yourself?"],
            ["Is there any way I can get help from you? I'm glad to meet you."],
            ["I'm so glad to connect with you! Do you think we can work together on anything?"],
            ["How can I start a small garden at home?"]
        ],
        theme=gr.themes.Soft(
            primary_hue="pink",
            secondary_hue="purple",
        ),
        css=custom_css
        # Removed: retry_btn="πŸ”„ Retry",
        # Removed: undo_btn="↩️ Undo",
        # Removed: clear_btn="πŸ—‘οΈ Clear",
    )

    return chat_interface

if __name__ == "__main__":
    chat_interface = create_chat_interface()
    chat_interface.queue()
    chat_interface.launch(
        share=True,
        server_name="0.0.0.0",
        server_port=7000,
        show_api=False
    )