Spaces:
Sleeping
Sleeping
File size: 1,928 Bytes
eafe3d9 f834661 22b180c f834661 22b180c f834661 aa95aba b1237e6 f834661 22b180c b7640aa b1237e6 22b180c f834661 22b180c b1237e6 f834661 b7640aa 2bdf687 f834661 22b180c b7640aa f834661 22b180c b1237e6 aa95aba 22b180c f834661 22b180c b1237e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
import os
import gradio as gr
from fastapi import FastAPI
from groq import Groq
import logging
from pydantic import BaseModel
# Set up logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
# Initialize Groq client
client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
app = FastAPI()
class ChatInput(BaseModel):
user_input: str
history: list
@app.post("/chat")
async def chat_endpoint(chat_input: ChatInput):
try:
messages = [{"role": "system", "content": "You are a helpful assistant specializing in cricket."}]
for h in chat_input.history:
messages.append({"role": "user", "content": h[0]})
messages.append({"role": "assistant", "content": h[1]})
messages.append({"role": "user", "content": chat_input.user_input})
chat_completion = client.chat.completions.create(
messages=messages,
model="llama-3.1-70b-versatile",
max_tokens=1000,
temperature=0.7,
)
response = chat_completion.choices[0].message.content
return {"response": response}
except Exception as e:
logging.error(f"An error occurred in chat_endpoint: {str(e)}")
return {"error": f"An error occurred: {str(e)}"}
def chat_with_ai(user_input, history):
from fastapi.testclient import TestClient
client = TestClient(app)
response = client.post("/chat", json={"user_input": user_input, "history": history})
return response.json()["response"]
# Gradio Interface
demo = gr.ChatInterface(
fn=chat_with_ai,
title="Cricket Guru",
description="Ask me anything about cricket!",
examples=["Who won the last Cricket World Cup?", "Explain LBW rule"],
)
# Mount Gradio app to FastAPI
app = gr.mount_gradio_app(app, demo, path="/")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860) |