Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from fastapi import FastAPI | |
from groq import Groq | |
import logging | |
from pydantic import BaseModel | |
# Set up logging | |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') | |
# Initialize Groq client | |
client = Groq(api_key=os.environ.get("GROQ_API_KEY")) | |
app = FastAPI() | |
class ChatInput(BaseModel): | |
user_input: str | |
history: list | |
async def chat_endpoint(chat_input: ChatInput): | |
try: | |
messages = [{"role": "system", "content": "You are a helpful assistant specializing in cricket."}] | |
for h in chat_input.history: | |
messages.append({"role": "user", "content": h[0]}) | |
messages.append({"role": "assistant", "content": h[1]}) | |
messages.append({"role": "user", "content": chat_input.user_input}) | |
chat_completion = client.chat.completions.create( | |
messages=messages, | |
model="llama-3.1-70b-versatile", | |
max_tokens=1000, | |
temperature=0.7, | |
) | |
response = chat_completion.choices[0].message.content | |
return {"response": response} | |
except Exception as e: | |
logging.error(f"An error occurred in chat_endpoint: {str(e)}") | |
return {"error": f"An error occurred: {str(e)}"} | |
def chat_with_ai(user_input, history): | |
from fastapi.testclient import TestClient | |
client = TestClient(app) | |
response = client.post("/chat", json={"user_input": user_input, "history": history}) | |
return response.json()["response"] | |
# Gradio Interface | |
demo = gr.ChatInterface( | |
fn=chat_with_ai, | |
title="Cricket Guru", | |
description="Ask me anything about cricket!", | |
examples=["Who won the last Cricket World Cup?", "Explain LBW rule"], | |
) | |
# Mount Gradio app to FastAPI | |
app = gr.mount_gradio_app(app, demo, path="/") | |
if __name__ == "__main__": | |
import uvicorn | |
uvicorn.run(app, host="0.0.0.0", port=7860) |