Spaces:
Sleeping
Sleeping
import gradio as gr | |
import json | |
from datetime import datetime | |
from theme import TufteInspired | |
from transformers import pipeline | |
# Load the model | |
model_id = "meta-llama/Meta-Llama-3-8B-Instruct" | |
tokenizer = AutoTokenizer.from_pretrained(model_id, add_special_tokens=True) | |
pipeline = transformers.pipeline( | |
"text-generation", | |
model=model_id, | |
model_kwargs={"torch_dtype": torch.bfloat16}, | |
device="cuda", | |
) | |
# Placeholder LLM function | |
def generate_blurb(): | |
# This is where you'd call your LLM model | |
return pipeline("Write a blurb for a made-up book")[0]["generated_text"] | |
# Function to log blurb and vote | |
def log_blurb_and_vote(blurb, vote): | |
log_entry = { | |
"timestamp": datetime.now().isoformat(), | |
"blurb": blurb, | |
"vote": vote | |
} | |
with open("blurb_log.jsonl", "a") as f: | |
f.write(json.dumps(log_entry) + "\n") | |
return f"Logged: {vote}" | |
# Create custom theme | |
tufte_theme = TufteInspired() | |
# Create Gradio interface | |
with gr.Blocks(theme=tufte_theme) as demo: | |
gr.Markdown("<h1 style='text-align: center;'>Would you read it?</h1>") | |
gr.Markdown("Click the button to generate a blurb for a made-up book, then vote on its quality.") | |
with gr.Row(): | |
generate_btn = gr.Button("Write a Blurb", variant="primary") | |
blurb_output = gr.Textbox(label="Generated Blurb", lines=5, interactive=False) | |
with gr.Row(): | |
upvote_btn = gr.Button("π would read") | |
downvote_btn = gr.Button("π wouldn't read") | |
vote_output = gr.Textbox(label="Vote Status", interactive=False) | |
generate_btn.click(generate_blurb, outputs=blurb_output) | |
upvote_btn.click(lambda x: log_blurb_and_vote(x, "upvote"), inputs=blurb_output, outputs=vote_output) | |
downvote_btn.click(lambda x: log_blurb_and_vote(x, "downvote"), inputs=blurb_output, outputs=vote_output) | |
if __name__ == "__main__": | |
demo.launch() |