import gradio as gr import json from datetime import datetime from theme import TufteInspired import glob import os import uuid from pathlib import Path import spaces import torch import transformers from huggingface_hub import CommitScheduler, hf_hub_download, login from transformers import AutoTokenizer, AutoModelForCausalLM from outlines import models, generate from gradio import update model_id = "meta-llama/Meta-Llama-3-8B-Instruct" tokenizer = AutoTokenizer.from_pretrained(model_id, add_special_tokens=True) @spaces.GPU(duration=120) def generate_blurb(history): model = models.transformers(model_id) generator = generate.text(model) resp = generator("Write a blurb for a book") return resp # Function to log blurb and vote def log_blurb_and_vote(blurb, vote): log_entry = {"timestamp": datetime.now().isoformat(), "blurb": blurb, "vote": vote} with open("blurb_log.jsonl", "a") as f: f.write(json.dumps(log_entry) + "\n") return f"Logged: {vote}" # Create custom theme tufte_theme = TufteInspired() # Create Gradio interface with gr.Blocks(theme=tufte_theme) as demo: gr.Markdown("