sharpenb's picture
Upload folder using huggingface_hub (#2)
b8cc813 verified
raw
history blame
1.69 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 6.637787342071533,
"base_token_generation_latency_sync": 39.689447021484376,
"base_token_generation_latency_async": 40.261987783014774,
"base_token_generation_throughput_sync": 0.0251956143268685,
"base_token_generation_throughput_async": 0.02483732311949753,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.99740066528321,
"base_inference_latency_async": 39.22982215881348,
"base_inference_throughput_sync": 0.008403544904420288,
"base_inference_throughput_async": 0.02549081145338145,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 20686.58203125,
"smashed_token_generation_latency_sync": 169.3347152709961,
"smashed_token_generation_latency_async": 169.75544020533562,
"smashed_token_generation_throughput_sync": 0.005905463616244563,
"smashed_token_generation_throughput_async": 0.005890827409068029,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 265.2768264770508,
"smashed_inference_latency_async": 211.8065595626831,
"smashed_inference_throughput_sync": 0.003769647026015333,
"smashed_inference_throughput_async": 0.004721289095411868,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}