davanstrien HF staff commited on
Commit
e6d141a
Β·
1 Parent(s): 0537fbe

chore: Refactor code for improved UI layout and readability

Browse files
Files changed (1) hide show
  1. app.py +35 -116
app.py CHANGED
@@ -1,72 +1,23 @@
1
- import hashlib
2
  import json
3
- import os
4
- import random
5
- import uuid
6
  from datetime import datetime
7
- from pathlib import Path
8
-
9
- import gradio as gr
10
- from huggingface_hub import CommitScheduler, get_token, login
11
- from openai import OpenAI
12
-
13
- from prompts import basic_prompt, detailed_genre_description_prompt
14
  from theme import TufteInspired
 
 
 
 
 
 
 
15
 
16
  # Ensure you're logged in to Hugging Face
17
- login(os.getenv("HF_TOKEN"))
18
-
19
- # Define available models
20
- MODELS = [
21
- "meta-llama/Meta-Llama-3-70B-Instruct",
22
- "mistralai/Mixtral-8x7B-Instruct-v0.1",
23
- "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
24
- ]
25
 
26
-
27
- def get_random_model():
28
- return random.choice(MODELS)
29
-
30
-
31
- def create_client(model_id):
32
- return OpenAI(
33
- base_url=f"https://api-inference.huggingface.co/models/{model_id}/v1",
34
- api_key=get_token(),
35
- )
36
-
37
-
38
- # Set up dataset storage
39
- dataset_folder = Path("dataset")
40
- dataset_folder.mkdir(exist_ok=True)
41
-
42
-
43
- # Function to get the latest dataset file
44
- def get_latest_dataset_file():
45
- files = list(dataset_folder.glob("data_*.jsonl"))
46
- return max(files, key=os.path.getctime) if files else None
47
-
48
-
49
- # Check for existing dataset and create or append to it
50
- if latest_file := get_latest_dataset_file():
51
- dataset_file = latest_file
52
- print(f"Appending to existing dataset file: {dataset_file}")
53
- else:
54
- dataset_file = dataset_folder / f"data_{uuid.uuid4()}.jsonl"
55
- print(f"Creating new dataset file: {dataset_file}")
56
-
57
- # Set up CommitScheduler for dataset uploads
58
- repo_id = "davanstrien/summer-reading-preferences"
59
- scheduler = CommitScheduler(
60
- repo_id=repo_id,
61
- repo_type="dataset",
62
- folder_path=dataset_folder,
63
- path_in_repo="data",
64
- every=1, # Upload every minute
65
  )
66
 
67
- # Global dictionary to store votes
68
- votes = {}
69
-
70
 
71
  def generate_prompt():
72
  if random.choice([True, False]):
@@ -82,8 +33,6 @@ def get_and_store_prompt():
82
 
83
 
84
  def generate_blurb(prompt):
85
- model_id = get_random_model()
86
- client = create_client(model_id)
87
  max_tokens = random.randint(100, 1000)
88
  chat_completion = client.chat.completions.create(
89
  model="tgi",
@@ -96,44 +45,23 @@ def generate_blurb(prompt):
96
  full_text = ""
97
  for message in chat_completion:
98
  full_text += message.choices[0].delta.content
99
- yield full_text, model_id
100
- return full_text, model_id # Return final result with model_id
101
 
102
 
103
- def generate_vote_id(user_id, blurb):
104
- return hashlib.md5(f"{user_id}:{blurb}".encode()).hexdigest()
105
-
106
-
107
- def log_blurb_and_vote(
108
- prompt,
109
- blurb,
110
- vote,
111
- model_id,
112
- user_info: gr.OAuthProfile | None,
113
- ):
114
  user_id = user_info.username if user_info is not None else str(uuid.uuid4())
115
- vote_id = generate_vote_id(user_id, blurb)
116
-
117
- if vote_id in votes:
118
- gr.Info("You've already voted on this blurb!")
119
- return None, gr.Row.update(visible=False)
120
-
121
- votes[vote_id] = vote
122
-
123
  log_entry = {
124
  "timestamp": datetime.now().isoformat(),
125
  "prompt": prompt,
126
  "blurb": blurb,
127
  "vote": vote,
128
  "user_id": user_id,
129
- "model_id": model_id,
130
  }
131
- with scheduler.lock:
132
- with dataset_file.open("a") as f:
133
- f.write(json.dumps(log_entry) + "\n")
134
-
135
- gr.Info("Thank you for voting! Your feedback will be synced to the dataset.")
136
- return f"Logged: {vote} by user {user_id}", gr.Row.update(visible=False)
137
 
138
 
139
  # Create custom theme
@@ -144,41 +72,34 @@ with gr.Blocks(theme=tufte_theme) as demo:
144
  gr.Markdown("<h1 style='text-align: center;'>Would you read this book?</h1>")
145
  gr.Markdown(
146
  """<p style='text-align: center;'>Looking for your next summer read?
147
- Would you read a book based on this LLM generated blurb? <br> Your vote will be added to <a href="https://huggingface.co/datasets/your-username/your-dataset-repo">this</a> Hugging Face dataset</p>"""
148
  )
149
 
150
- with gr.Row():
151
- login_btn = gr.LoginButton(size="sm")
 
152
  with gr.Row():
153
  generate_btn = gr.Button("Create a book", variant="primary")
154
 
155
  prompt_state = gr.State()
156
  blurb_output = gr.Markdown(label="Book blurb")
157
- user_state = gr.State()
158
- model_state = gr.State()
159
 
160
  with gr.Row(visible=False) as voting_row:
161
  upvote_btn = gr.Button("πŸ‘ would read")
162
  downvote_btn = gr.Button("πŸ‘Ž wouldn't read")
163
 
164
- vote_output = gr.Textbox(label="Vote Status", interactive=False, visible=True)
165
 
166
- def generate_and_show(prompt, user_info):
167
- return "Generating...", gr.Row.update(visible=False), user_info, None
168
 
169
- def show_voting_buttons(blurb, model_id):
170
- return blurb, gr.Row.update(visible=True), model_id
171
 
172
  generate_btn.click(get_and_store_prompt, outputs=prompt_state).then(
173
- generate_and_show,
174
- inputs=[prompt_state, login_btn],
175
- outputs=[blurb_output, voting_row, user_state, model_state],
176
- ).then(
177
- generate_blurb, inputs=prompt_state, outputs=[blurb_output, model_state]
178
- ).then(
179
- show_voting_buttons,
180
- inputs=[blurb_output, model_state],
181
- outputs=[blurb_output, voting_row, model_state],
182
  )
183
 
184
  upvote_btn.click(
@@ -187,10 +108,9 @@ with gr.Blocks(theme=tufte_theme) as demo:
187
  prompt_state,
188
  blurb_output,
189
  gr.Textbox(value="upvote", visible=False),
190
- model_state,
191
- user_state,
192
  ],
193
- outputs=[vote_output, voting_row],
194
  )
195
  downvote_btn.click(
196
  log_blurb_and_vote,
@@ -198,10 +118,9 @@ with gr.Blocks(theme=tufte_theme) as demo:
198
  prompt_state,
199
  blurb_output,
200
  gr.Textbox(value="downvote", visible=False),
201
- model_state,
202
- user_state,
203
  ],
204
- outputs=[vote_output, voting_row],
205
  )
206
 
207
  if __name__ == "__main__":
 
1
+ import gradio as gr
2
  import json
 
 
 
3
  from datetime import datetime
 
 
 
 
 
 
 
4
  from theme import TufteInspired
5
+ import uuid
6
+ from huggingface_hub import InferenceClient
7
+ from openai import OpenAI
8
+ from huggingface_hub import get_token, login
9
+ from prompts import detailed_genre_description_prompt, basic_prompt
10
+ import random
11
+ import os
12
 
13
  # Ensure you're logged in to Hugging Face
14
+ login(get_token())
 
 
 
 
 
 
 
15
 
16
+ client = OpenAI(
17
+ base_url="https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct/v1",
18
+ api_key=get_token(),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  )
20
 
 
 
 
21
 
22
  def generate_prompt():
23
  if random.choice([True, False]):
 
33
 
34
 
35
  def generate_blurb(prompt):
 
 
36
  max_tokens = random.randint(100, 1000)
37
  chat_completion = client.chat.completions.create(
38
  model="tgi",
 
45
  full_text = ""
46
  for message in chat_completion:
47
  full_text += message.choices[0].delta.content
48
+ yield full_text
 
49
 
50
 
51
+ # Function to log blurb and vote
52
+ def log_blurb_and_vote(prompt, blurb, vote, user_info: gr.OAuthProfile | None, *args):
 
 
 
 
 
 
 
 
 
53
  user_id = user_info.username if user_info is not None else str(uuid.uuid4())
 
 
 
 
 
 
 
 
54
  log_entry = {
55
  "timestamp": datetime.now().isoformat(),
56
  "prompt": prompt,
57
  "blurb": blurb,
58
  "vote": vote,
59
  "user_id": user_id,
 
60
  }
61
+ with open("blurb_log.jsonl", "a") as f:
62
+ f.write(json.dumps(log_entry) + "\n")
63
+ gr.Info("Thank you for voting!")
64
+ return f"Logged: {vote} by user {user_id}"
 
 
65
 
66
 
67
  # Create custom theme
 
72
  gr.Markdown("<h1 style='text-align: center;'>Would you read this book?</h1>")
73
  gr.Markdown(
74
  """<p style='text-align: center;'>Looking for your next summer read?
75
+ Would you read a book based on this LLM generated blurb? <br> Your vote will be added to <a href="https://example.com">this</a> Hugging Face dataset</p>"""
76
  )
77
 
78
+ # Add the login button
79
+ login_btn = gr.LoginButton()
80
+
81
  with gr.Row():
82
  generate_btn = gr.Button("Create a book", variant="primary")
83
 
84
  prompt_state = gr.State()
85
  blurb_output = gr.Markdown(label="Book blurb")
 
 
86
 
87
  with gr.Row(visible=False) as voting_row:
88
  upvote_btn = gr.Button("πŸ‘ would read")
89
  downvote_btn = gr.Button("πŸ‘Ž wouldn't read")
90
 
91
+ vote_output = gr.Textbox(label="Vote Status", interactive=False, visible=False)
92
 
93
+ def generate_and_show(prompt):
94
+ return gr.Markdown.update(value="Generating..."), gr.Row(visible=False)
95
 
96
+ def show_voting_buttons(blurb):
97
+ return blurb, gr.Row(visible=True)
98
 
99
  generate_btn.click(get_and_store_prompt, outputs=prompt_state).then(
100
+ generate_and_show, inputs=prompt_state, outputs=[blurb_output, voting_row]
101
+ ).then(generate_blurb, inputs=prompt_state, outputs=blurb_output).then(
102
+ show_voting_buttons, inputs=blurb_output, outputs=[blurb_output, voting_row]
 
 
 
 
 
 
103
  )
104
 
105
  upvote_btn.click(
 
108
  prompt_state,
109
  blurb_output,
110
  gr.Textbox(value="upvote", visible=False),
111
+ login_btn,
 
112
  ],
113
+ outputs=vote_output,
114
  )
115
  downvote_btn.click(
116
  log_blurb_and_vote,
 
118
  prompt_state,
119
  blurb_output,
120
  gr.Textbox(value="downvote", visible=False),
121
+ login_btn,
 
122
  ],
123
+ outputs=vote_output,
124
  )
125
 
126
  if __name__ == "__main__":