open-domain-qa / app.py
pratik-aivantage's picture
Create app.py
a8061c1 verified
raw
history blame
664 Bytes
import gradio as gr
from transformers import pipeline
# Load the pre-trained model
generator = pipeline("text-generation", model="EleutherAI/gpt-neo-2.7B")
# Define Gradio interface
def generate_response(prompt):
# Generate response based on the prompt
response = generator(prompt, max_length=50, do_sample=True, temperature=0.9)
return response[0]['generated_text']
# Create Gradio interface
iface = gr.Interface(
fn=generate_response,
inputs="text",
outputs="text",
title="OpenAI Text Generation Model",
description="Enter a prompt and get a generated text response.",
)
# Deploy the Gradio interface
iface.launch(share=True)