sharpenb's picture
Upload folder using huggingface_hub (#3)
140b373 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 11.945297241210938,
"base_token_generation_latency_sync": 36.61009635925293,
"base_token_generation_latency_async": 36.64432633668184,
"base_token_generation_throughput_sync": 0.027314869378847112,
"base_token_generation_throughput_async": 0.027289354177565443,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 120.30351409912109,
"base_inference_latency_async": 37.97504901885986,
"base_inference_throughput_sync": 0.008312309141494195,
"base_inference_throughput_async": 0.026333079899471933,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 12.385092735290527,
"smashed_token_generation_latency_sync": 43.115352630615234,
"smashed_token_generation_latency_async": 42.25224871188402,
"smashed_token_generation_throughput_sync": 0.023193594369211368,
"smashed_token_generation_throughput_async": 0.023667379381839538,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 163.38503723144532,
"smashed_inference_latency_async": 69.0446138381958,
"smashed_inference_throughput_sync": 0.006120511504266062,
"smashed_inference_throughput_async": 0.014483388991695618,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}