vdwow commited on
Commit
f8690bd
1 Parent(s): 1d2dea8

fix: small plot

Browse files
Files changed (1) hide show
  1. app.py +549 -546
app.py CHANGED
@@ -1,546 +1,549 @@
1
- import gradio as gr
2
-
3
- import requests
4
- from bs4 import BeautifulSoup
5
-
6
- import tiktoken
7
-
8
- from ecologits.tracers.utils import compute_llm_impacts, _avg
9
- from ecologits.impacts.llm import compute_llm_impacts as compute_llm_impacts_expert
10
- from ecologits.impacts.llm import IF_ELECTRICITY_MIX_GWP, IF_ELECTRICITY_MIX_ADPE, IF_ELECTRICITY_MIX_PE
11
- from ecologits.model_repository import models
12
-
13
- from src.assets import custom_css
14
- from src.electricity_mix import COUNTRY_CODES, find_electricity_mix
15
- from src.content import (
16
- HERO_TEXT,
17
- ABOUT_TEXT,
18
- CITATION_LABEL,
19
- CITATION_TEXT,
20
- LICENCE_TEXT, METHODOLOGY_TEXT
21
- )
22
- from src.constants import (
23
- PROVIDERS,
24
- OPENAI_MODELS,
25
- ANTHROPIC_MODELS,
26
- COHERE_MODELS,
27
- META_MODELS,
28
- MISTRALAI_MODELS,
29
- PROMPTS,
30
- CLOSED_SOURCE_MODELS,
31
- MODELS,
32
- )
33
- from src.utils import (
34
- format_impacts,
35
- format_impacts_expert,
36
- format_energy_eq_physical_activity,
37
- PhysicalActivity,
38
- format_energy_eq_electric_vehicle,
39
- format_gwp_eq_streaming, format_energy_eq_electricity_production, EnergyProduction,
40
- format_gwp_eq_airplane_paris_nyc, format_energy_eq_electricity_consumption_ireland,
41
- df_elec_mix_for_plot
42
- )
43
-
44
- CUSTOM = "Custom"
45
-
46
- tokenizer = tiktoken.get_encoding('cl100k_base')
47
-
48
- def model_list(provider: str) -> gr.Dropdown:
49
- if provider == "openai":
50
- return gr.Dropdown(
51
- OPENAI_MODELS,
52
- label="Model",
53
- value=OPENAI_MODELS[0][1],
54
- filterable=True,
55
- )
56
- elif provider == "anthropic":
57
- return gr.Dropdown(
58
- ANTHROPIC_MODELS,
59
- label="Model",
60
- value=ANTHROPIC_MODELS[0][1],
61
- filterable=True,
62
- )
63
- elif provider == "cohere":
64
- return gr.Dropdown(
65
- COHERE_MODELS,
66
- label="Model",
67
- value=COHERE_MODELS[0][1],
68
- filterable=True,
69
- )
70
- elif provider == "huggingface_hub/meta":
71
- return gr.Dropdown(
72
- META_MODELS,
73
- label="Model",
74
- value=META_MODELS[0][1],
75
- filterable=True,
76
- )
77
- elif provider == "mistralai":
78
- return gr.Dropdown(
79
- MISTRALAI_MODELS,
80
- label="Model",
81
- value=MISTRALAI_MODELS[0][1],
82
- filterable=True,
83
- )
84
-
85
-
86
- def custom():
87
- return CUSTOM
88
-
89
- def tiktoken_len(text):
90
- tokens = tokenizer.encode(
91
- text,
92
- disallowed_special=()
93
- )
94
- return len(tokens)
95
-
96
- def model_active_params_fn(model_name: str, n_param: float):
97
- if model_name == CUSTOM:
98
- return n_param
99
- provider, model_name = model_name.split('/', 1)
100
- model = models.find_model(provider=provider, model_name=model_name)
101
- return model.active_parameters or _avg(model.active_parameters_range)
102
-
103
-
104
- def model_total_params_fn(model_name: str, n_param: float):
105
- if model_name == CUSTOM:
106
- return n_param
107
- provider, model_name = model_name.split('/', 1)
108
- model = models.find_model(provider=provider, model_name=model_name)
109
- return model.total_parameters or _avg(model.total_parameters_range)
110
-
111
-
112
- def mix_fn(country_code: str, mix_adpe: float, mix_pe: float, mix_gwp: float):
113
- if country_code == CUSTOM:
114
- return mix_adpe, mix_pe, mix_gwp
115
- return find_electricity_mix(country_code)
116
-
117
- with gr.Blocks(css=custom_css) as demo:
118
- gr.Markdown(HERO_TEXT)
119
-
120
- with gr.Tab("🧮 Calculator"):
121
- with gr.Row():
122
- gr.Markdown("# Estimate the environmental impacts of LLM inference")
123
- with gr.Row():
124
- input_provider = gr.Dropdown(
125
- PROVIDERS,
126
- label="Provider",
127
- value=PROVIDERS[0][1],
128
- filterable=True,
129
- )
130
-
131
- input_model = gr.Dropdown(
132
- OPENAI_MODELS,
133
- label="Model",
134
- value=OPENAI_MODELS[0][1],
135
- filterable=True,
136
- )
137
- input_provider.change(model_list, input_provider, input_model)
138
-
139
- input_prompt = gr.Dropdown(
140
- PROMPTS,
141
- label="Example prompt",
142
- value=400,
143
- )
144
-
145
-
146
- @gr.render(inputs=[input_provider, input_model, input_prompt])
147
- def render_simple(provider, model, prompt):
148
- if provider.startswith("huggingface_hub"):
149
- provider = provider.split("/")[0]
150
- if models.find_model(provider, model) is not None:
151
- impacts = compute_llm_impacts(
152
- provider=provider,
153
- model_name=model,
154
- output_token_count=prompt,
155
- request_latency=100000
156
- )
157
- impacts = format_impacts(impacts)
158
-
159
- # Inference impacts
160
- with gr.Blocks():
161
- if f"{provider}/{model}" in CLOSED_SOURCE_MODELS:
162
- with gr.Row():
163
- gr.Markdown("""<p> ⚠️ You have selected a closed-source model. Please be aware that
164
- some providers do not fully disclose information about such models. Consequently, our
165
- estimates have a lower precision for closed-source models. For further details, refer to
166
- our FAQ in the About section.
167
- </p>""", elem_classes="warning-box")
168
-
169
- with gr.Row():
170
- gr.Markdown("""
171
- ## Environmental impacts
172
-
173
- To understand how the environmental impacts are computed go to the 📖 Methodology tab.
174
- """)
175
- with gr.Row():
176
- with gr.Column(scale=1, min_width=220):
177
- gr.Markdown(f"""
178
- <h2 align="center">⚡️ Energy</h2>
179
- $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
180
- <p align="center"><i>Evaluates the electricity consumption<i></p><br>
181
- """)
182
- with gr.Column(scale=1, min_width=220):
183
- gr.Markdown(f"""
184
- <h2 align="center">🌍️ GHG Emissions</h2>
185
- $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
186
- <p align="center"><i>Evaluates the effect on global warming<i></p><br>
187
- """)
188
- with gr.Column(scale=1, min_width=220):
189
- gr.Markdown(f"""
190
- <h2 align="center">🪨 Abiotic Resources</h2>
191
- $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
192
- <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
193
- """)
194
- with gr.Column(scale=1, min_width=220):
195
- gr.Markdown(f"""
196
- <h2 align="center">⛽️ Primary Energy</h2>
197
- $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
198
- <p align="center"><i>Evaluates the use of energy resources<i></p><br>
199
- """)
200
-
201
- # Impacts equivalents
202
- with gr.Blocks():
203
- with gr.Row():
204
- gr.Markdown("""
205
- ---
206
- ## That's equivalent to...
207
-
208
- Making this request to the LLM is equivalent to the following actions.
209
- """)
210
- with gr.Row():
211
- physical_activity, distance = format_energy_eq_physical_activity(impacts.energy)
212
- if physical_activity == PhysicalActivity.WALKING:
213
- physical_activity = "🚶 " + physical_activity.capitalize()
214
- if physical_activity == PhysicalActivity.RUNNING:
215
- physical_activity = "🏃 " + physical_activity.capitalize()
216
- with gr.Column(scale=1, min_width=300):
217
- gr.Markdown(f"""
218
- <h2 align="center">{physical_activity} $$ \Large {distance.magnitude:.3g}\ {distance.units} $$ </h2>
219
- <p align="center"><i>Based on energy consumption<i></p><br>
220
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
221
-
222
- ev_eq = format_energy_eq_electric_vehicle(impacts.energy)
223
- with gr.Column(scale=1, min_width=300):
224
- gr.Markdown(f"""
225
- <h2 align="center">🔋 Electric Vehicle $$ \Large {ev_eq.magnitude:.3g}\ {ev_eq.units} $$ </h2>
226
- <p align="center"><i>Based on energy consumption<i></p><br>
227
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
228
-
229
- streaming_eq = format_gwp_eq_streaming(impacts.gwp)
230
- with gr.Column(scale=1, min_width=300):
231
- gr.Markdown(f"""
232
- <h2 align="center">⏯️ Streaming $$ \Large {streaming_eq.magnitude:.3g}\ {streaming_eq.units} $$ </h2>
233
- <p align="center"><i>Based on GHG emissions<i></p><br>
234
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
235
-
236
- # Bigger scale impacts equivalent
237
- with gr.Blocks():
238
- with gr.Row():
239
- gr.Markdown("""
240
- ## What if 1% of the planet does this request everyday for 1 year?
241
-
242
- If this use case is largely deployed around the world the equivalent impacts would be. (The
243
- impacts of this request x 1% of 8 billion people x 365 days in a year.)
244
- """)
245
- with gr.Row():
246
- electricity_production, count = format_energy_eq_electricity_production(impacts.energy)
247
- if electricity_production == EnergyProduction.NUCLEAR:
248
- emoji = "☢️"
249
- name = "Nuclear power plants"
250
- if electricity_production == EnergyProduction.WIND:
251
- emoji = "💨️ "
252
- name = "Wind turbines"
253
- with gr.Column(scale=1, min_width=300):
254
- gr.Markdown(f"""
255
- <h2 align="center">{emoji} $$ \Large {count.magnitude:.0f} $$ {name} <span style="font-size: 12px">(yearly)</span></h2>
256
- <p align="center"><i>Based on electricity consumption<i></p><br>
257
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
258
-
259
- ireland_count = format_energy_eq_electricity_consumption_ireland(impacts.energy)
260
- with gr.Column(scale=1, min_width=300):
261
- gr.Markdown(f"""
262
- <h2 align="center">🇮🇪 $$ \Large {ireland_count.magnitude:.2g} $$ x Ireland <span style="font-size: 12px">(yearly ⚡️ cons.)</span></h2>
263
- <p align="center"><i>Based on electricity consumption<i></p><br>
264
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
265
-
266
- paris_nyc_airplane = format_gwp_eq_airplane_paris_nyc(impacts.gwp)
267
- with gr.Column(scale=1, min_width=300):
268
- gr.Markdown(f"""
269
- <h2 align="center">✈️ $$ \Large {paris_nyc_airplane.magnitude:,.0f} $$ Paris ↔ NYC </h2>
270
- <p align="center"><i>Based on GHG emissions<i></p><br>
271
- """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
272
-
273
- with gr.Tab("🤓 Expert Mode"):
274
-
275
- with gr.Row():
276
- gr.Markdown("# 🤓 Expert mode")
277
-
278
- model = gr.Dropdown(
279
- MODELS + [CUSTOM],
280
- label="Model name",
281
- value="openai/gpt-3.5-turbo",
282
- filterable=True,
283
- interactive=True
284
- )
285
- input_model_active_params = gr.Number(
286
- label="Number of billions of active parameters",
287
- value=45.0,
288
- interactive=True
289
- )
290
- input_model_total_params = gr.Number(
291
- label="Number of billions of total parameters",
292
- value=45.0,
293
- interactive=True
294
- )
295
-
296
- model.change(fn=model_active_params_fn,
297
- inputs=[model, input_model_active_params],
298
- outputs=[input_model_active_params])
299
- model.change(fn=model_total_params_fn,
300
- inputs=[model, input_model_total_params],
301
- outputs=[input_model_total_params])
302
- input_model_active_params.input(fn=custom, outputs=[model])
303
- input_model_total_params.input(fn=custom, outputs=[model])
304
-
305
- input_tokens = gr.Number(
306
- label="Output tokens",
307
- value=100
308
- )
309
-
310
- mix = gr.Dropdown(
311
- COUNTRY_CODES + [CUSTOM],
312
- label="Location",
313
- value="WOR",
314
- filterable=True,
315
- interactive=True
316
- )
317
- input_mix_gwp = gr.Number(
318
- label="Electricity mix - GHG emissions [kgCO2eq / kWh]",
319
- value=IF_ELECTRICITY_MIX_GWP,
320
- interactive=True
321
- )
322
- input_mix_adpe = gr.Number(
323
- label="Electricity mix - Abiotic resources [kgSbeq / kWh]",
324
- value=IF_ELECTRICITY_MIX_ADPE,
325
- interactive=True
326
- )
327
- input_mix_pe = gr.Number(
328
- label="Electricity mix - Primary energy [MJ / kWh]",
329
- value=IF_ELECTRICITY_MIX_PE,
330
- interactive=True
331
- )
332
-
333
- mix.change(fn=mix_fn,
334
- inputs=[mix, input_mix_adpe, input_mix_pe, input_mix_gwp],
335
- outputs=[input_mix_adpe, input_mix_pe, input_mix_gwp])
336
- input_mix_gwp.input(fn=custom, outputs=mix)
337
- input_mix_adpe.input(fn=custom, outputs=mix)
338
- input_mix_pe.input(fn=custom, outputs=mix)
339
-
340
-
341
- @gr.render(inputs=[
342
- input_model_active_params,
343
- input_model_total_params,
344
- input_tokens,
345
- input_mix_gwp,
346
- input_mix_adpe,
347
- input_mix_pe
348
- ])
349
- def render_expert(
350
- model_active_params,
351
- model_total_params,
352
- tokens,
353
- mix_gwp,
354
- mix_adpe,
355
- mix_pe
356
- ):
357
- impacts = compute_llm_impacts_expert(
358
- model_active_parameter_count=model_active_params,
359
- model_total_parameter_count=model_total_params,
360
- output_token_count=tokens,
361
- request_latency=100000,
362
- if_electricity_mix_gwp=mix_gwp,
363
- if_electricity_mix_adpe=mix_adpe,
364
- if_electricity_mix_pe=mix_pe
365
- )
366
- impacts, usage, embodied = format_impacts_expert(impacts)
367
-
368
- with gr.Blocks():
369
-
370
- with gr.Row():
371
- gr.Markdown(f"""
372
- <h2 align = "center">Environmental impacts</h2>
373
- """)
374
-
375
- with gr.Row():
376
- with gr.Column(scale=1, min_width=220):
377
- gr.Markdown(f"""
378
- <h2 align="center">⚡️ Energy</h2>
379
- $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
380
- <p align="center"><i>Evaluates the electricity consumption<i></p><br>
381
- """)
382
-
383
- with gr.Column(scale=1, min_width=220):
384
- gr.Markdown(f"""
385
- <h2 align="center">🌍️ GHG Emissions</h2>
386
- $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
387
- <p align="center"><i>Evaluates the effect on global warming<i></p><br>
388
- $$ \Large {100*usage.gwp.value / (usage.gwp.value + embodied.gwp.value):.3} $$
389
- <p align="center"><i>% of GWP by usage (vs embodied)<i></p><br>
390
- """)
391
-
392
- with gr.Column(scale=1, min_width=220):
393
- gr.Markdown(f"""
394
- <h2 align="center">🪨 Abiotic Resources</h2>
395
- $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
396
- <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
397
- $$ \Large {100*usage.adpe.value / (usage.adpe.value + embodied.adpe.value):.3} $$
398
- <p align="center"><i>% of ADPE by usage (vs embodied)<i></p><br>
399
- """)
400
-
401
- with gr.Column(scale=1, min_width=220):
402
- gr.Markdown(f"""
403
- <h2 align="center">⛽️ Primary Energy</h2>
404
- $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
405
- <p align="center"><i>Evaluates the use of energy resources<i></p><br>
406
- $$ \Large {100*usage.pe.value / (usage.pe.value + embodied.pe.value):.3} $$
407
- <p align="center"><i>% of PE by usage (vs embodied)<i></p><br>
408
- """)
409
-
410
- with gr.Row():
411
- gr.Markdown(f"""
412
- <h2 align="center">How can location impact the footprint ?</h2>
413
- """)
414
-
415
- with gr.Row():
416
- gr.BarPlot(df_elec_mix_for_plot,
417
- x='country',
418
- y='electricity_mix',
419
- sort='y',
420
- x_title=None,
421
- y_title='electricity mix in gCO2eq / kWh')
422
-
423
- with gr.Tab("🔍 Evaluate your own usage"):
424
-
425
- with gr.Row():
426
- gr.Markdown("""
427
- # 🔍 Evaluate your own usage
428
- ⚠️ For now, only ChatGPT conversation import is available.
429
- You can always try out other models - however results might be inaccurate due to fixed parameters, such as tokenization method.
430
- """)
431
-
432
- def process_input(text):
433
-
434
- r = requests.get(text, verify=False)
435
-
436
- soup = BeautifulSoup(r.text, "html.parser")
437
- list_text = str(soup).split('parts":["')
438
- s = ''
439
- for item in list_text[1:int(len(list_text)/2)]:
440
- if list_text.index(item)%2 == 1:
441
- s = s + item.split('"]')[0]
442
-
443
- amout_token = tiktoken_len(s)
444
-
445
- return amout_token
446
-
447
- def compute_own_impacts(amount_token, model):
448
- provider = model.split('/')[0].lower()
449
- model = model.split('/')[1]
450
- impacts = compute_llm_impacts(
451
- provider=provider,
452
- model_name=model,
453
- output_token_count=amount_token,
454
- request_latency=100000
455
- )
456
-
457
- impacts = format_impacts(impacts)
458
-
459
- energy = f"""
460
- <h2 align="center">⚡️ Energy</h2>
461
- $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
462
- <p align="center"><i>Evaluates the electricity consumption<i></p><br>
463
- """
464
-
465
- gwp = f"""
466
- <h2 align="center">🌍️ GHG Emissions</h2>
467
- $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
468
- <p align="center"><i>Evaluates the effect on global warming<i></p><br>
469
- """
470
-
471
- adp = f"""
472
- <h2 align="center">🪨 Abiotic Resources</h2>
473
- $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
474
- <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
475
- """
476
-
477
- pe = f"""
478
- <h2 align="center">⛽️ Primary Energy</h2>
479
- $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
480
- <p align="center"><i>Evaluates the use of energy resources<i></p><br>
481
- """
482
-
483
- return energy, gwp, adp, pe
484
-
485
- def combined_function(text, model):
486
- n_token = process_input(text)
487
- energy, gwp, adp, pe = compute_own_impacts(n_token, model)
488
- return n_token, energy, gwp, adp, pe
489
-
490
- with gr.Blocks():
491
-
492
- text_input = gr.Textbox(label="Paste the URL here (must be on https://chatgpt.com/share/xxxx format)")
493
- model = gr.Dropdown(
494
- MODELS,
495
- label="Model name",
496
- value="openai/gpt-4o",
497
- filterable=True,
498
- interactive=True
499
- )
500
-
501
- process_button = gr.Button("Estimate this usage footprint")
502
-
503
- with gr.Accordion("ℹ️ Infos", open=False):
504
- n_token = gr.Textbox(label="Total amount of tokens :")
505
-
506
- with gr.Row():
507
- with gr.Column(scale=1, min_width=220):
508
- energy = gr.Markdown()
509
- with gr.Column(scale=1, min_width=220):
510
- gwp = gr.Markdown()
511
- with gr.Column(scale=1, min_width=220):
512
- adp = gr.Markdown()
513
- with gr.Column(scale=1, min_width=220):
514
- pe = gr.Markdown()
515
-
516
- process_button.click(
517
- fn=combined_function,
518
- inputs=[text_input, model],
519
- outputs=[n_token, energy, gwp, adp, pe]
520
- )
521
-
522
- with gr.Tab("📖 Methodology"):
523
- gr.Markdown(METHODOLOGY_TEXT,
524
- elem_classes="descriptive-text",
525
- latex_delimiters=[
526
- {"left": "$$", "right": "$$", "display": True},
527
- {"left": "$", "right": "$", "display": False}
528
- ])
529
-
530
- with gr.Tab("ℹ️ About"):
531
- gr.Markdown(ABOUT_TEXT, elem_classes="descriptive-text",)
532
-
533
- with gr.Accordion("📚 Citation", open=False):
534
- gr.Textbox(
535
- value=CITATION_TEXT,
536
- label=CITATION_LABEL,
537
- interactive=False,
538
- show_copy_button=True,
539
- lines=len(CITATION_TEXT.split('\n')),
540
- )
541
-
542
- # License
543
- gr.Markdown(LICENCE_TEXT)
544
-
545
- if __name__ == '__main__':
546
- demo.launch()
 
 
 
 
1
+ import gradio as gr
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+
6
+ import tiktoken
7
+
8
+ from ecologits.tracers.utils import compute_llm_impacts, _avg
9
+ from ecologits.impacts.llm import compute_llm_impacts as compute_llm_impacts_expert
10
+ from ecologits.impacts.llm import IF_ELECTRICITY_MIX_GWP, IF_ELECTRICITY_MIX_ADPE, IF_ELECTRICITY_MIX_PE
11
+ from ecologits.model_repository import models
12
+
13
+ from src.assets import custom_css
14
+ from src.electricity_mix import COUNTRY_CODES, find_electricity_mix
15
+ from src.content import (
16
+ HERO_TEXT,
17
+ ABOUT_TEXT,
18
+ CITATION_LABEL,
19
+ CITATION_TEXT,
20
+ LICENCE_TEXT, METHODOLOGY_TEXT
21
+ )
22
+ from src.constants import (
23
+ PROVIDERS,
24
+ OPENAI_MODELS,
25
+ ANTHROPIC_MODELS,
26
+ COHERE_MODELS,
27
+ META_MODELS,
28
+ MISTRALAI_MODELS,
29
+ PROMPTS,
30
+ CLOSED_SOURCE_MODELS,
31
+ MODELS,
32
+ )
33
+ from src.utils import (
34
+ format_impacts,
35
+ format_impacts_expert,
36
+ format_energy_eq_physical_activity,
37
+ PhysicalActivity,
38
+ format_energy_eq_electric_vehicle,
39
+ format_gwp_eq_streaming, format_energy_eq_electricity_production, EnergyProduction,
40
+ format_gwp_eq_airplane_paris_nyc, format_energy_eq_electricity_consumption_ireland,
41
+ df_elec_mix_for_plot
42
+ )
43
+
44
+ CUSTOM = "Custom"
45
+
46
+ tokenizer = tiktoken.get_encoding('cl100k_base')
47
+
48
+ def model_list(provider: str) -> gr.Dropdown:
49
+ if provider == "openai":
50
+ return gr.Dropdown(
51
+ OPENAI_MODELS,
52
+ label="Model",
53
+ value=OPENAI_MODELS[0][1],
54
+ filterable=True,
55
+ )
56
+ elif provider == "anthropic":
57
+ return gr.Dropdown(
58
+ ANTHROPIC_MODELS,
59
+ label="Model",
60
+ value=ANTHROPIC_MODELS[0][1],
61
+ filterable=True,
62
+ )
63
+ elif provider == "cohere":
64
+ return gr.Dropdown(
65
+ COHERE_MODELS,
66
+ label="Model",
67
+ value=COHERE_MODELS[0][1],
68
+ filterable=True,
69
+ )
70
+ elif provider == "huggingface_hub/meta":
71
+ return gr.Dropdown(
72
+ META_MODELS,
73
+ label="Model",
74
+ value=META_MODELS[0][1],
75
+ filterable=True,
76
+ )
77
+ elif provider == "mistralai":
78
+ return gr.Dropdown(
79
+ MISTRALAI_MODELS,
80
+ label="Model",
81
+ value=MISTRALAI_MODELS[0][1],
82
+ filterable=True,
83
+ )
84
+
85
+
86
+ def custom():
87
+ return CUSTOM
88
+
89
+ def tiktoken_len(text):
90
+ tokens = tokenizer.encode(
91
+ text,
92
+ disallowed_special=()
93
+ )
94
+ return len(tokens)
95
+
96
+ def model_active_params_fn(model_name: str, n_param: float):
97
+ if model_name == CUSTOM:
98
+ return n_param
99
+ provider, model_name = model_name.split('/', 1)
100
+ model = models.find_model(provider=provider, model_name=model_name)
101
+ return model.active_parameters or _avg(model.active_parameters_range)
102
+
103
+
104
+ def model_total_params_fn(model_name: str, n_param: float):
105
+ if model_name == CUSTOM:
106
+ return n_param
107
+ provider, model_name = model_name.split('/', 1)
108
+ model = models.find_model(provider=provider, model_name=model_name)
109
+ return model.total_parameters or _avg(model.total_parameters_range)
110
+
111
+
112
+ def mix_fn(country_code: str, mix_adpe: float, mix_pe: float, mix_gwp: float):
113
+ if country_code == CUSTOM:
114
+ return mix_adpe, mix_pe, mix_gwp
115
+ return find_electricity_mix(country_code)
116
+
117
+ with gr.Blocks(css=custom_css) as demo:
118
+ gr.Markdown(HERO_TEXT)
119
+
120
+ with gr.Tab("🧮 Calculator"):
121
+ with gr.Row():
122
+ gr.Markdown("# Estimate the environmental impacts of LLM inference")
123
+ with gr.Row():
124
+ input_provider = gr.Dropdown(
125
+ PROVIDERS,
126
+ label="Provider",
127
+ value=PROVIDERS[0][1],
128
+ filterable=True,
129
+ )
130
+
131
+ input_model = gr.Dropdown(
132
+ OPENAI_MODELS,
133
+ label="Model",
134
+ value=OPENAI_MODELS[0][1],
135
+ filterable=True,
136
+ )
137
+ input_provider.change(model_list, input_provider, input_model)
138
+
139
+ input_prompt = gr.Dropdown(
140
+ PROMPTS,
141
+ label="Example prompt",
142
+ value=400,
143
+ )
144
+
145
+
146
+ @gr.render(inputs=[input_provider, input_model, input_prompt])
147
+ def render_simple(provider, model, prompt):
148
+ if provider.startswith("huggingface_hub"):
149
+ provider = provider.split("/")[0]
150
+ if models.find_model(provider, model) is not None:
151
+ impacts = compute_llm_impacts(
152
+ provider=provider,
153
+ model_name=model,
154
+ output_token_count=prompt,
155
+ request_latency=100000
156
+ )
157
+ impacts = format_impacts(impacts)
158
+
159
+ # Inference impacts
160
+ with gr.Blocks():
161
+ if f"{provider}/{model}" in CLOSED_SOURCE_MODELS:
162
+ with gr.Row():
163
+ gr.Markdown("""<p> ⚠️ You have selected a closed-source model. Please be aware that
164
+ some providers do not fully disclose information about such models. Consequently, our
165
+ estimates have a lower precision for closed-source models. For further details, refer to
166
+ our FAQ in the About section.
167
+ </p>""", elem_classes="warning-box")
168
+
169
+ with gr.Row():
170
+ gr.Markdown("""
171
+ ## Environmental impacts
172
+
173
+ To understand how the environmental impacts are computed go to the 📖 Methodology tab.
174
+ """)
175
+ with gr.Row():
176
+ with gr.Column(scale=1, min_width=220):
177
+ gr.Markdown(f"""
178
+ <h2 align="center">⚡️ Energy</h2>
179
+ $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
180
+ <p align="center"><i>Evaluates the electricity consumption<i></p><br>
181
+ """)
182
+ with gr.Column(scale=1, min_width=220):
183
+ gr.Markdown(f"""
184
+ <h2 align="center">🌍️ GHG Emissions</h2>
185
+ $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
186
+ <p align="center"><i>Evaluates the effect on global warming<i></p><br>
187
+ """)
188
+ with gr.Column(scale=1, min_width=220):
189
+ gr.Markdown(f"""
190
+ <h2 align="center">🪨 Abiotic Resources</h2>
191
+ $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
192
+ <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
193
+ """)
194
+ with gr.Column(scale=1, min_width=220):
195
+ gr.Markdown(f"""
196
+ <h2 align="center">⛽️ Primary Energy</h2>
197
+ $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
198
+ <p align="center"><i>Evaluates the use of energy resources<i></p><br>
199
+ """)
200
+
201
+ # Impacts equivalents
202
+ with gr.Blocks():
203
+ with gr.Row():
204
+ gr.Markdown("""
205
+ ---
206
+ ## That's equivalent to...
207
+
208
+ Making this request to the LLM is equivalent to the following actions.
209
+ """)
210
+ with gr.Row():
211
+ physical_activity, distance = format_energy_eq_physical_activity(impacts.energy)
212
+ if physical_activity == PhysicalActivity.WALKING:
213
+ physical_activity = "🚶 " + physical_activity.capitalize()
214
+ if physical_activity == PhysicalActivity.RUNNING:
215
+ physical_activity = "🏃 " + physical_activity.capitalize()
216
+ with gr.Column(scale=1, min_width=300):
217
+ gr.Markdown(f"""
218
+ <h2 align="center">{physical_activity} $$ \Large {distance.magnitude:.3g}\ {distance.units} $$ </h2>
219
+ <p align="center"><i>Based on energy consumption<i></p><br>
220
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
221
+
222
+ ev_eq = format_energy_eq_electric_vehicle(impacts.energy)
223
+ with gr.Column(scale=1, min_width=300):
224
+ gr.Markdown(f"""
225
+ <h2 align="center">🔋 Electric Vehicle $$ \Large {ev_eq.magnitude:.3g}\ {ev_eq.units} $$ </h2>
226
+ <p align="center"><i>Based on energy consumption<i></p><br>
227
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
228
+
229
+ streaming_eq = format_gwp_eq_streaming(impacts.gwp)
230
+ with gr.Column(scale=1, min_width=300):
231
+ gr.Markdown(f"""
232
+ <h2 align="center">⏯️ Streaming $$ \Large {streaming_eq.magnitude:.3g}\ {streaming_eq.units} $$ </h2>
233
+ <p align="center"><i>Based on GHG emissions<i></p><br>
234
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
235
+
236
+ # Bigger scale impacts equivalent
237
+ with gr.Blocks():
238
+ with gr.Row():
239
+ gr.Markdown("""
240
+ ## What if 1% of the planet does this request everyday for 1 year?
241
+
242
+ If this use case is largely deployed around the world the equivalent impacts would be. (The
243
+ impacts of this request x 1% of 8 billion people x 365 days in a year.)
244
+ """)
245
+ with gr.Row():
246
+ electricity_production, count = format_energy_eq_electricity_production(impacts.energy)
247
+ if electricity_production == EnergyProduction.NUCLEAR:
248
+ emoji = "☢️"
249
+ name = "Nuclear power plants"
250
+ if electricity_production == EnergyProduction.WIND:
251
+ emoji = "💨️ "
252
+ name = "Wind turbines"
253
+ with gr.Column(scale=1, min_width=300):
254
+ gr.Markdown(f"""
255
+ <h2 align="center">{emoji} $$ \Large {count.magnitude:.0f} $$ {name} <span style="font-size: 12px">(yearly)</span></h2>
256
+ <p align="center"><i>Based on electricity consumption<i></p><br>
257
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
258
+
259
+ ireland_count = format_energy_eq_electricity_consumption_ireland(impacts.energy)
260
+ with gr.Column(scale=1, min_width=300):
261
+ gr.Markdown(f"""
262
+ <h2 align="center">🇮🇪 $$ \Large {ireland_count.magnitude:.2g} $$ x Ireland <span style="font-size: 12px">(yearly ⚡️ cons.)</span></h2>
263
+ <p align="center"><i>Based on electricity consumption<i></p><br>
264
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
265
+
266
+ paris_nyc_airplane = format_gwp_eq_airplane_paris_nyc(impacts.gwp)
267
+ with gr.Column(scale=1, min_width=300):
268
+ gr.Markdown(f"""
269
+ <h2 align="center">✈️ $$ \Large {paris_nyc_airplane.magnitude:,.0f} $$ Paris ↔ NYC </h2>
270
+ <p align="center"><i>Based on GHG emissions<i></p><br>
271
+ """, latex_delimiters=[{"left": "$$", "right": "$$", "display": False}])
272
+
273
+ with gr.Tab("🤓 Expert Mode"):
274
+
275
+ with gr.Row():
276
+ gr.Markdown("# 🤓 Expert mode")
277
+
278
+ model = gr.Dropdown(
279
+ MODELS + [CUSTOM],
280
+ label="Model name",
281
+ value="openai/gpt-3.5-turbo",
282
+ filterable=True,
283
+ interactive=True
284
+ )
285
+ input_model_active_params = gr.Number(
286
+ label="Number of billions of active parameters",
287
+ value=45.0,
288
+ interactive=True
289
+ )
290
+ input_model_total_params = gr.Number(
291
+ label="Number of billions of total parameters",
292
+ value=45.0,
293
+ interactive=True
294
+ )
295
+
296
+ model.change(fn=model_active_params_fn,
297
+ inputs=[model, input_model_active_params],
298
+ outputs=[input_model_active_params])
299
+ model.change(fn=model_total_params_fn,
300
+ inputs=[model, input_model_total_params],
301
+ outputs=[input_model_total_params])
302
+ input_model_active_params.input(fn=custom, outputs=[model])
303
+ input_model_total_params.input(fn=custom, outputs=[model])
304
+
305
+ input_tokens = gr.Number(
306
+ label="Output tokens",
307
+ value=100
308
+ )
309
+
310
+ mix = gr.Dropdown(
311
+ COUNTRY_CODES + [CUSTOM],
312
+ label="Location",
313
+ value="WOR",
314
+ filterable=True,
315
+ interactive=True
316
+ )
317
+ input_mix_gwp = gr.Number(
318
+ label="Electricity mix - GHG emissions [kgCO2eq / kWh]",
319
+ value=IF_ELECTRICITY_MIX_GWP,
320
+ interactive=True
321
+ )
322
+ input_mix_adpe = gr.Number(
323
+ label="Electricity mix - Abiotic resources [kgSbeq / kWh]",
324
+ value=IF_ELECTRICITY_MIX_ADPE,
325
+ interactive=True
326
+ )
327
+ input_mix_pe = gr.Number(
328
+ label="Electricity mix - Primary energy [MJ / kWh]",
329
+ value=IF_ELECTRICITY_MIX_PE,
330
+ interactive=True
331
+ )
332
+
333
+ mix.change(fn=mix_fn,
334
+ inputs=[mix, input_mix_adpe, input_mix_pe, input_mix_gwp],
335
+ outputs=[input_mix_adpe, input_mix_pe, input_mix_gwp])
336
+ input_mix_gwp.input(fn=custom, outputs=mix)
337
+ input_mix_adpe.input(fn=custom, outputs=mix)
338
+ input_mix_pe.input(fn=custom, outputs=mix)
339
+
340
+
341
+ @gr.render(inputs=[
342
+ input_model_active_params,
343
+ input_model_total_params,
344
+ input_tokens,
345
+ input_mix_gwp,
346
+ input_mix_adpe,
347
+ input_mix_pe
348
+ ])
349
+ def render_expert(
350
+ model_active_params,
351
+ model_total_params,
352
+ tokens,
353
+ mix_gwp,
354
+ mix_adpe,
355
+ mix_pe
356
+ ):
357
+ impacts = compute_llm_impacts_expert(
358
+ model_active_parameter_count=model_active_params,
359
+ model_total_parameter_count=model_total_params,
360
+ output_token_count=tokens,
361
+ request_latency=100000,
362
+ if_electricity_mix_gwp=mix_gwp,
363
+ if_electricity_mix_adpe=mix_adpe,
364
+ if_electricity_mix_pe=mix_pe
365
+ )
366
+ impacts, usage, embodied = format_impacts_expert(impacts)
367
+
368
+ with gr.Blocks():
369
+
370
+ with gr.Row():
371
+ gr.Markdown(f"""
372
+ <h2 align = "center">Environmental impacts</h2>
373
+ """)
374
+
375
+ with gr.Row():
376
+ with gr.Column(scale=1, min_width=220):
377
+ gr.Markdown(f"""
378
+ <h2 align="center">⚡️ Energy</h2>
379
+ $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
380
+ <p align="center"><i>Evaluates the electricity consumption<i></p><br>
381
+ """)
382
+
383
+ with gr.Column(scale=1, min_width=220):
384
+ gr.Markdown(f"""
385
+ <h2 align="center">🌍️ GHG Emissions</h2>
386
+ $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
387
+ <p align="center"><i>Evaluates the effect on global warming<i></p><br>
388
+ $$ \Large {100*usage.gwp.value / (usage.gwp.value + embodied.gwp.value):.3} $$
389
+ <p align="center"><i>% of GWP by usage (vs embodied)<i></p><br>
390
+ """)
391
+
392
+ with gr.Column(scale=1, min_width=220):
393
+ gr.Markdown(f"""
394
+ <h2 align="center">🪨 Abiotic Resources</h2>
395
+ $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
396
+ <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
397
+ $$ \Large {100*usage.adpe.value / (usage.adpe.value + embodied.adpe.value):.3} $$
398
+ <p align="center"><i>% of ADPE by usage (vs embodied)<i></p><br>
399
+ """)
400
+
401
+ with gr.Column(scale=1, min_width=220):
402
+ gr.Markdown(f"""
403
+ <h2 align="center">⛽️ Primary Energy</h2>
404
+ $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
405
+ <p align="center"><i>Evaluates the use of energy resources<i></p><br>
406
+ $$ \Large {100*usage.pe.value / (usage.pe.value + embodied.pe.value):.3} $$
407
+ <p align="center"><i>% of PE by usage (vs embodied)<i></p><br>
408
+ """)
409
+
410
+ with gr.Row():
411
+ gr.Markdown(f"""
412
+ <h2 align="center">How can location impact the footprint ?</h2>
413
+ """)
414
+
415
+ with gr.Row():
416
+ gr.BarPlot(df_elec_mix_for_plot,
417
+ x='country',
418
+ y='electricity_mix',
419
+ sort='y',
420
+ scale=1,
421
+ height=250,
422
+ min_width=400,
423
+ x_title=None,
424
+ y_title='electricity mix in gCO2eq / kWh')
425
+
426
+ with gr.Tab("🔍 Evaluate your own usage"):
427
+
428
+ with gr.Row():
429
+ gr.Markdown("""
430
+ # 🔍 Evaluate your own usage
431
+ ⚠️ For now, only ChatGPT conversation import is available.
432
+ You can always try out other models - however results might be inaccurate due to fixed parameters, such as tokenization method.
433
+ """)
434
+
435
+ def process_input(text):
436
+
437
+ r = requests.get(text, verify=False)
438
+
439
+ soup = BeautifulSoup(r.text, "html.parser")
440
+ list_text = str(soup).split('parts":["')
441
+ s = ''
442
+ for item in list_text[1:int(len(list_text)/2)]:
443
+ if list_text.index(item)%2 == 1:
444
+ s = s + item.split('"]')[0]
445
+
446
+ amout_token = tiktoken_len(s)
447
+
448
+ return amout_token
449
+
450
+ def compute_own_impacts(amount_token, model):
451
+ provider = model.split('/')[0].lower()
452
+ model = model.split('/')[1]
453
+ impacts = compute_llm_impacts(
454
+ provider=provider,
455
+ model_name=model,
456
+ output_token_count=amount_token,
457
+ request_latency=100000
458
+ )
459
+
460
+ impacts = format_impacts(impacts)
461
+
462
+ energy = f"""
463
+ <h2 align="center">⚡️ Energy</h2>
464
+ $$ \Large {impacts.energy.magnitude:.3g} \ \large {impacts.energy.units} $$
465
+ <p align="center"><i>Evaluates the electricity consumption<i></p><br>
466
+ """
467
+
468
+ gwp = f"""
469
+ <h2 align="center">🌍️ GHG Emissions</h2>
470
+ $$ \Large {impacts.gwp.magnitude:.3g} \ \large {impacts.gwp.units} $$
471
+ <p align="center"><i>Evaluates the effect on global warming<i></p><br>
472
+ """
473
+
474
+ adp = f"""
475
+ <h2 align="center">🪨 Abiotic Resources</h2>
476
+ $$ \Large {impacts.adpe.magnitude:.3g} \ \large {impacts.adpe.units} $$
477
+ <p align="center"><i>Evaluates the use of metals and minerals<i></p><br>
478
+ """
479
+
480
+ pe = f"""
481
+ <h2 align="center">⛽️ Primary Energy</h2>
482
+ $$ \Large {impacts.pe.magnitude:.3g} \ \large {impacts.pe.units} $$
483
+ <p align="center"><i>Evaluates the use of energy resources<i></p><br>
484
+ """
485
+
486
+ return energy, gwp, adp, pe
487
+
488
+ def combined_function(text, model):
489
+ n_token = process_input(text)
490
+ energy, gwp, adp, pe = compute_own_impacts(n_token, model)
491
+ return n_token, energy, gwp, adp, pe
492
+
493
+ with gr.Blocks():
494
+
495
+ text_input = gr.Textbox(label="Paste the URL here (must be on https://chatgpt.com/share/xxxx format)")
496
+ model = gr.Dropdown(
497
+ MODELS,
498
+ label="Model name",
499
+ value="openai/gpt-4o",
500
+ filterable=True,
501
+ interactive=True
502
+ )
503
+
504
+ process_button = gr.Button("Estimate this usage footprint")
505
+
506
+ with gr.Accordion("ℹ️ Infos", open=False):
507
+ n_token = gr.Textbox(label="Total amount of tokens :")
508
+
509
+ with gr.Row():
510
+ with gr.Column(scale=1, min_width=220):
511
+ energy = gr.Markdown()
512
+ with gr.Column(scale=1, min_width=220):
513
+ gwp = gr.Markdown()
514
+ with gr.Column(scale=1, min_width=220):
515
+ adp = gr.Markdown()
516
+ with gr.Column(scale=1, min_width=220):
517
+ pe = gr.Markdown()
518
+
519
+ process_button.click(
520
+ fn=combined_function,
521
+ inputs=[text_input, model],
522
+ outputs=[n_token, energy, gwp, adp, pe]
523
+ )
524
+
525
+ with gr.Tab("📖 Methodology"):
526
+ gr.Markdown(METHODOLOGY_TEXT,
527
+ elem_classes="descriptive-text",
528
+ latex_delimiters=[
529
+ {"left": "$$", "right": "$$", "display": True},
530
+ {"left": "$", "right": "$", "display": False}
531
+ ])
532
+
533
+ with gr.Tab("ℹ️ About"):
534
+ gr.Markdown(ABOUT_TEXT, elem_classes="descriptive-text",)
535
+
536
+ with gr.Accordion("📚 Citation", open=False):
537
+ gr.Textbox(
538
+ value=CITATION_TEXT,
539
+ label=CITATION_LABEL,
540
+ interactive=False,
541
+ show_copy_button=True,
542
+ lines=len(CITATION_TEXT.split('\n')),
543
+ )
544
+
545
+ # License
546
+ gr.Markdown(LICENCE_TEXT)
547
+
548
+ if __name__ == '__main__':
549
+ demo.launch()