Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- base_results.json +9 -9
- config.json +1 -1
- plots.png +0 -0
- smashed_results.json +9 -9
.gitattributes
CHANGED
|
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
banner.png filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
banner.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
plots.png filter=lfs diff=lfs merge=lfs -text
|
base_results.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
| 1 |
{
|
| 2 |
"perplexity_y_gt": 38109.7109375,
|
| 3 |
-
"inference_elapsed_time_ms_@1":
|
| 4 |
-
"inference_latency_ms_@1": 41.
|
| 5 |
-
"inference_throughput_batches_per_ms_@1": 0.
|
| 6 |
-
"Loading model_emissions":
|
| 7 |
-
"Loading model_energy_consumed": 2.
|
| 8 |
-
"Inference_emissions": 1.
|
| 9 |
-
"Inference_energy_consumed":
|
| 10 |
-
"tracker_emissions": 2.
|
| 11 |
-
"tracker_energy_consumed": 0.
|
| 12 |
"disk_memory": 3158.1982421875
|
| 13 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"perplexity_y_gt": 38109.7109375,
|
| 3 |
+
"inference_elapsed_time_ms_@1": 413.0109443664551,
|
| 4 |
+
"inference_latency_ms_@1": 41.301094436645506,
|
| 5 |
+
"inference_throughput_batches_per_ms_@1": 0.02421243343887573,
|
| 6 |
+
"Loading model_emissions": 5.771226897863607e-08,
|
| 7 |
+
"Loading model_energy_consumed": 2.427722377133629e-05,
|
| 8 |
+
"Inference_emissions": 1.491522043356255e-07,
|
| 9 |
+
"Inference_energy_consumed": 6.274231640389114e-05,
|
| 10 |
+
"tracker_emissions": 2.404339955210359e-07,
|
| 11 |
+
"tracker_energy_consumed": 0.00010114088416210811,
|
| 12 |
"disk_memory": 3158.1982421875
|
| 13 |
}
|
config.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "/tmp/models/
|
| 3 |
"architectures": [
|
| 4 |
"LlamaForCausalLM"
|
| 5 |
],
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "/tmp/models/tmpdoc9e6ytd4j9lnop",
|
| 3 |
"architectures": [
|
| 4 |
"LlamaForCausalLM"
|
| 5 |
],
|
plots.png
CHANGED
|
|
Git LFS Details
|
smashed_results.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
| 1 |
{
|
| 2 |
"perplexity_y_gt": 22252.1484375,
|
| 3 |
-
"inference_elapsed_time_ms_@1":
|
| 4 |
-
"inference_latency_ms_@1":
|
| 5 |
-
"inference_throughput_batches_per_ms_@1": 0.
|
| 6 |
-
"Loading model_emissions": 1.
|
| 7 |
-
"Loading model_energy_consumed": 4.
|
| 8 |
-
"Inference_emissions": 3.
|
| 9 |
-
"Inference_energy_consumed": 0.
|
| 10 |
-
"tracker_emissions": 5.
|
| 11 |
-
"tracker_energy_consumed": 0.
|
| 12 |
"disk_memory": 3150.1982421875
|
| 13 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"perplexity_y_gt": 22252.1484375,
|
| 3 |
+
"inference_elapsed_time_ms_@1": 1414.6948547363281,
|
| 4 |
+
"inference_latency_ms_@1": 141.4694854736328,
|
| 5 |
+
"inference_throughput_batches_per_ms_@1": 0.007068662168750029,
|
| 6 |
+
"Loading model_emissions": 1.3719643498171522e-05,
|
| 7 |
+
"Loading model_energy_consumed": 4.7178155458714676e-05,
|
| 8 |
+
"Inference_emissions": 3.8438811896213414e-05,
|
| 9 |
+
"Inference_energy_consumed": 0.00013218071180417604,
|
| 10 |
+
"tracker_emissions": 5.652322119307323e-05,
|
| 11 |
+
"tracker_energy_consumed": 0.0001943681201941962,
|
| 12 |
"disk_memory": 3150.1982421875
|
| 13 |
}
|