sharpenb's picture
Upload folder using huggingface_hub (#2)
993f6d1 verified
raw
history blame contribute delete
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.629734992980957,
"base_token_generation_latency_sync": 38.046831893920896,
"base_token_generation_latency_async": 37.76412159204483,
"base_token_generation_throughput_sync": 0.026283397334845623,
"base_token_generation_throughput_async": 0.026480160476197973,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 117.62565231323242,
"base_inference_latency_async": 38.59529495239258,
"base_inference_throughput_sync": 0.008501546901836004,
"base_inference_throughput_async": 0.025909893971104592,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 103123.2578125,
"smashed_token_generation_latency_sync": 167.67705993652345,
"smashed_token_generation_latency_async": 168.52510422468185,
"smashed_token_generation_throughput_sync": 0.005963845026735109,
"smashed_token_generation_throughput_async": 0.0059338340397450525,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 262.8842468261719,
"smashed_inference_latency_async": 209.8944902420044,
"smashed_inference_throughput_sync": 0.0038039555890971074,
"smashed_inference_throughput_async": 0.004764298476091578,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}