sharpenb's picture
Upload folder using huggingface_hub (#1)
3c4310f verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 55.25741424560547,
"base_token_generation_latency_async": 54.7143429517746,
"base_token_generation_throughput_sync": 0.018097118977649745,
"base_token_generation_throughput_async": 0.018276743282495476,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 53.19772186279297,
"base_inference_latency_async": 52.31618881225586,
"base_inference_throughput_sync": 0.018797797442890316,
"base_inference_throughput_async": 0.019114542223032403,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 166.15823211669922,
"smashed_token_generation_latency_async": 166.91399328410625,
"smashed_token_generation_throughput_sync": 0.006018359651886896,
"smashed_token_generation_throughput_async": 0.005991109435012368,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 173.4734848022461,
"smashed_inference_latency_async": 141.62616729736328,
"smashed_inference_throughput_sync": 0.005764569733178336,
"smashed_inference_throughput_async": 0.007060842068121245,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}