Adrien Banse commited on
Commit
d23d1fc
ยท
1 Parent(s): bb4818f

feat: custom number of parameters

Browse files
Files changed (2) hide show
  1. README.md +4 -2
  2. app.py +57 -27
README.md CHANGED
@@ -28,6 +28,8 @@ To do:
28
  - [X] Follow us on LinkedIn
29
  - Share the results of a simulation (e.g. export an image generated with plotly for instance?)
30
  - [x] Add an advanced/expert tab
31
- - True number of tokens
32
- - Expose more inputs like the electricity mix
 
 
33
  - [ ] Idea : "estimate a given prompt impact" function which allows to enter a prompt in a text field and estimate its impacts
 
28
  - [X] Follow us on LinkedIn
29
  - Share the results of a simulation (e.g. export an image generated with plotly for instance?)
30
  - [x] Add an advanced/expert tab
31
+ - [x] True number of tokens
32
+ - [x] Expose more inputs like the electricity mix
33
+ - [ ] Examples of electricity mixes
34
+ - [x] Custom number of parameters
35
  - [ ] Idea : "estimate a given prompt impact" function which allows to enter a prompt in a text field and estimate its impacts
app.py CHANGED
@@ -28,7 +28,7 @@ MODELS = [
28
  ("Anthropic / Claude 3 Sonnet", "anthropic/claude-3-sonnet-20240229"),
29
  ("Anthropic / Claude 3 Haiku", "anthropic/claude-3-haiku-20240307"),
30
  ("Anthropic / Claude 2.1", "anthropic/claude-2.1"),
31
- ("Anthropic / Claude 2", "anthropic/claude-2"),
32
  ("Anthropic / Claude Instant 1.2", "anthropic/claude-instant-1.2"),
33
  ("Mistral AI / Mistral 7B", "mistralai/open-mistral-7b"),
34
  ("Mistral AI / Mixtral 8x7B", "mistralai/open-mixtral-8x7b"),
@@ -67,7 +67,6 @@ def format_indicator(name: str, value: str, unit: str) -> str:
67
 
68
 
69
  def form_output(impacts):
70
-
71
  energy_ = q(impacts.energy.value, impacts.energy.unit)
72
  eq_energy_ = q(impacts.energy.value * 2, 'km')
73
  if energy_ < q("1 kWh"):
@@ -90,8 +89,8 @@ def form_output(impacts):
90
  format_indicator("๐ŸŒ GHG Emissions", f"{gwp_.magnitude:.3g}", gwp_.units),
91
  format_indicator("๐Ÿชจ Abiotic Resources", f"{adpe_.magnitude:.3g}", adpe_.units),
92
  format_indicator("โ›ฝ๏ธ Primary Energy", f"{pe_.magnitude:.3g}", pe_.units),
93
- format_indicator("๐Ÿ”‹ Equivalent energy : distance with a small electric car", f"{eq_energy_.magnitude:.3g}", eq_energy_.units),
94
- format_indicator("๐Ÿฐ Equivalent emissions for 1000 prompts : watching GoT in streaming", f"{eq_gwp_.magnitude:.3g}", eq_gwp_.units)
95
  )
96
 
97
 
@@ -110,16 +109,13 @@ def form(
110
 
111
 
112
  def form_expert(
113
- model_name: str,
 
114
  prompt_generated_tokens: int,
115
  mix_gwp: float,
116
  mix_adpe: float,
117
  mix_pe: float
118
- ):
119
- provider, model_name = model_name.split('/', 1)
120
- model = models.find_model(provider=provider, model_name=model_name)
121
- model_active_params = model.active_parameters or _avg(model.active_parameters_range) # TODO: handle ranges
122
- model_total_params = model.total_parameters or _avg(model.total_parameters_range)
123
  impacts = compute_llm_impacts_expert(
124
  model_active_parameter_count=model_active_params,
125
  model_total_parameter_count=model_total_params,
@@ -132,10 +128,27 @@ def form_expert(
132
  return form_output(impacts)
133
 
134
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  with gr.Blocks() as demo:
136
 
137
  ### TITLE
138
-
139
  gr.Markdown("""
140
  # ๐ŸŒฑ EcoLogits Calculator
141
 
@@ -144,7 +157,7 @@ with gr.Blocks() as demo:
144
 
145
  Read the documentation:
146
  [ecologits.ai](https://ecologits.ai) | โญ๏ธ us on GitHub: [genai-impact/ecologits](https://github.com/genai-impact/ecologits) |
147
- Follow us on Linkedin โœ…: [GenAI Impact](https://www.linkedin.com/company/genai-impact/posts/?feedView=all)
148
  """)
149
 
150
  ### SIMPLE CALCULATOR
@@ -159,7 +172,7 @@ with gr.Blocks() as demo:
159
  label="Model name",
160
  value="openai/gpt-3.5-turbo",
161
  filterable=True,
162
- )
163
  prompt = gr.Dropdown(
164
  PROMPTS,
165
  label="Example prompt",
@@ -205,11 +218,28 @@ with gr.Blocks() as demo:
205
  ## ๐Ÿค“ Expert mode
206
  """)
207
  model = gr.Dropdown(
208
- MODELS,
209
  label="Model name",
210
  value="openai/gpt-3.5-turbo",
211
  filterable=True,
 
212
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
213
  tokens = gr.Number(
214
  label="Output tokens",
215
  value=100
@@ -248,13 +278,13 @@ with gr.Blocks() as demo:
248
  submit_btn = gr.Button("Submit")
249
  submit_btn.click(
250
  fn=form_expert,
251
- inputs=[model, tokens, mix_gwp, mix_adpe, mix_pe],
252
  outputs=[energy, gwp, adpe, pe]
253
  )
254
 
255
  ### METHOD QUICK EXPLANATION
256
  with gr.Tab('Methodology'):
257
- gr.Markdown("""##๐Ÿ“– Coming soon""")
258
 
259
  ### INFORMATION ABOUT INDICATORS
260
  with gr.Accordion("๐Ÿ“Š More about the indicators", open = False):
@@ -269,20 +299,20 @@ with gr.Blocks() as demo:
269
  with gr.Accordion("๐Ÿ“‰ How to reduce / limit these impacts ?", open = False):
270
  gr.Markdown("""
271
 
272
- * โ“ **Fundamental rule** : Show **sobriety** on the uses of (generative) AI :
273
- * Questionning the usefulness of the project ;
274
- * Estimating impacts of the project ;
275
- * Evaluating the project purpose ;
276
- * Restricting the use case to the desired purposes
277
 
278
- * ๐Ÿฆพ On the hardware side :
279
- * If you can, try to relocate the computing in low emissions and/or energy efficient datacenters
280
 
281
  * ๐Ÿค– On the ML side :
282
- * Develop a zero-shot learning approach for general tasks ;
283
- * Prefer the smaller and yet well-peforming models (using number of parameters for example)
284
- * If a specialization is needed, always prefer fine-tuning an existing model than re-training one from scratch ;
285
- * During model inference, try caching the most popular prompts ("hey, tell me a joke about ...")
286
 
287
  """)
288
 
 
28
  ("Anthropic / Claude 3 Sonnet", "anthropic/claude-3-sonnet-20240229"),
29
  ("Anthropic / Claude 3 Haiku", "anthropic/claude-3-haiku-20240307"),
30
  ("Anthropic / Claude 2.1", "anthropic/claude-2.1"),
31
+ ("Anthropic / Claude 2.0", "anthropic/claude-2.0"),
32
  ("Anthropic / Claude Instant 1.2", "anthropic/claude-instant-1.2"),
33
  ("Mistral AI / Mistral 7B", "mistralai/open-mistral-7b"),
34
  ("Mistral AI / Mixtral 8x7B", "mistralai/open-mixtral-8x7b"),
 
67
 
68
 
69
  def form_output(impacts):
 
70
  energy_ = q(impacts.energy.value, impacts.energy.unit)
71
  eq_energy_ = q(impacts.energy.value * 2, 'km')
72
  if energy_ < q("1 kWh"):
 
89
  format_indicator("๐ŸŒ GHG Emissions", f"{gwp_.magnitude:.3g}", gwp_.units),
90
  format_indicator("๐Ÿชจ Abiotic Resources", f"{adpe_.magnitude:.3g}", adpe_.units),
91
  format_indicator("โ›ฝ๏ธ Primary Energy", f"{pe_.magnitude:.3g}", pe_.units),
92
+ format_indicator("๐Ÿ”‹ Equivalent energy: distance with a small electric car", f"{eq_energy_.magnitude:.3g}", eq_energy_.units),
93
+ format_indicator("๐Ÿฐ Equivalent emissions for 1000 prompts: watching GoT in streaming", f"{eq_gwp_.magnitude:.3g}", eq_gwp_.units)
94
  )
95
 
96
 
 
109
 
110
 
111
  def form_expert(
112
+ model_active_params: float,
113
+ model_total_params: float,
114
  prompt_generated_tokens: int,
115
  mix_gwp: float,
116
  mix_adpe: float,
117
  mix_pe: float
118
+ ):
 
 
 
 
119
  impacts = compute_llm_impacts_expert(
120
  model_active_parameter_count=model_active_params,
121
  model_total_parameter_count=model_total_params,
 
128
  return form_output(impacts)
129
 
130
 
131
+ CUSTOM = "Custom"
132
+ def custom():
133
+ return CUSTOM
134
+
135
+ def model_active_params_fn(model_name: str, n_param: float):
136
+ if model_name == CUSTOM:
137
+ return n_param
138
+ provider, model_name = model_name.split('/', 1)
139
+ model = models.find_model(provider=provider, model_name=model_name)
140
+ return model.active_parameters or _avg(model.active_parameters_range)
141
+
142
+ def model_total_params_fn(model_name: str, n_param: float):
143
+ if model_name == CUSTOM:
144
+ return n_param
145
+ provider, model_name = model_name.split('/', 1)
146
+ model = models.find_model(provider=provider, model_name=model_name)
147
+ return model.total_parameters or _avg(model.total_parameters_range)
148
+
149
  with gr.Blocks() as demo:
150
 
151
  ### TITLE
 
152
  gr.Markdown("""
153
  # ๐ŸŒฑ EcoLogits Calculator
154
 
 
157
 
158
  Read the documentation:
159
  [ecologits.ai](https://ecologits.ai) | โญ๏ธ us on GitHub: [genai-impact/ecologits](https://github.com/genai-impact/ecologits) |
160
+ โœ… Follow us on Linkedin: [GenAI Impact](https://www.linkedin.com/company/genai-impact/posts/?feedView=all)
161
  """)
162
 
163
  ### SIMPLE CALCULATOR
 
172
  label="Model name",
173
  value="openai/gpt-3.5-turbo",
174
  filterable=True,
175
+ )
176
  prompt = gr.Dropdown(
177
  PROMPTS,
178
  label="Example prompt",
 
218
  ## ๐Ÿค“ Expert mode
219
  """)
220
  model = gr.Dropdown(
221
+ MODELS + [CUSTOM],
222
  label="Model name",
223
  value="openai/gpt-3.5-turbo",
224
  filterable=True,
225
+ interactive=True
226
  )
227
+ model_active_params = gr.Number(
228
+ label="Number of millions of active parameters",
229
+ value=45.0,
230
+ interactive=True
231
+ )
232
+ model_total_params = gr.Number(
233
+ label="Number of millions of total parameters",
234
+ value=45.0,
235
+ interactive=True
236
+ )
237
+
238
+ model.change(fn=model_active_params_fn, inputs=[model, model_active_params], outputs=[model_active_params])
239
+ model.change(fn=model_total_params_fn, inputs=[model, model_total_params], outputs=[model_total_params])
240
+ model_active_params.input(fn=custom, outputs=[model])
241
+ model_total_params.input(fn=custom, outputs=[model])
242
+
243
  tokens = gr.Number(
244
  label="Output tokens",
245
  value=100
 
278
  submit_btn = gr.Button("Submit")
279
  submit_btn.click(
280
  fn=form_expert,
281
+ inputs=[model_active_params, model_total_params, tokens, mix_gwp, mix_adpe, mix_pe],
282
  outputs=[energy, gwp, adpe, pe]
283
  )
284
 
285
  ### METHOD QUICK EXPLANATION
286
  with gr.Tab('Methodology'):
287
+ gr.Markdown("""๐Ÿ“– Coming soon""")
288
 
289
  ### INFORMATION ABOUT INDICATORS
290
  with gr.Accordion("๐Ÿ“Š More about the indicators", open = False):
 
299
  with gr.Accordion("๐Ÿ“‰ How to reduce / limit these impacts ?", open = False):
300
  gr.Markdown("""
301
 
302
+ * โ“ **Fundamental rule**: Show **sobriety** on the uses of (generative) AI
303
+ * Questionning the usefulness of the project;
304
+ * Estimating impacts of the project;
305
+ * Evaluating the project purpose;
306
+ * Restricting the use case to the desired purposes.
307
 
308
+ * ๐Ÿฆพ On the hardware side
309
+ * If you can, try to relocate the computing in low emissions and/or energy efficient datacenters.
310
 
311
  * ๐Ÿค– On the ML side :
312
+ * Develop a zero-shot learning approach for general tasks;
313
+ * Prefer the smaller and yet well-peforming models (using number of parameters for example);
314
+ * If a specialization is needed, always prefer fine-tuning an existing model than re-training one from scratch;
315
+ * During model inference, try caching the most popular prompts ("hey, tell me a joke about ...").
316
 
317
  """)
318