sharpenb's picture
Upload folder using huggingface_hub (#2)
9b19fb4 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 11.575923919677734,
"base_token_generation_latency_sync": 40.255954360961915,
"base_token_generation_latency_async": 38.7702826410532,
"base_token_generation_throughput_sync": 0.024841045650870145,
"base_token_generation_throughput_async": 0.025792950989248574,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.62886428833008,
"base_inference_latency_async": 38.57831954956055,
"base_inference_throughput_sync": 0.0084296516366327,
"base_inference_throughput_async": 0.025921294957270663,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 12.261353492736816,
"smashed_token_generation_latency_sync": 166.16905364990234,
"smashed_token_generation_latency_async": 166.36428833007812,
"smashed_token_generation_throughput_sync": 0.006017967714414962,
"smashed_token_generation_throughput_async": 0.006010905405467378,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 263.9147018432617,
"smashed_inference_latency_async": 195.67320346832275,
"smashed_inference_throughput_sync": 0.0037891030435807153,
"smashed_inference_throughput_async": 0.005110561805474241,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}