sharpenb commited on
Commit
9b41bac
·
verified ·
1 Parent(s): 06706a3

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. .gitattributes +1 -0
  2. base_results.json +9 -9
  3. config.json +1 -1
  4. plots.png +0 -0
  5. smashed_results.json +9 -9
.gitattributes CHANGED
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  banner.png filter=lfs diff=lfs merge=lfs -text
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  banner.png filter=lfs diff=lfs merge=lfs -text
37
+ plots.png filter=lfs diff=lfs merge=lfs -text
base_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "perplexity_y_gt": 38109.7109375,
3
- "inference_elapsed_time_ms_@1": 411.52214431762695,
4
- "inference_latency_ms_@1": 41.152214431762694,
5
- "inference_throughput_batches_per_ms_@1": 0.024300028900222818,
6
- "Loading model_emissions": 6.982773212868697e-06,
7
- "Loading model_energy_consumed": 2.4011874668140837e-05,
8
- "Inference_emissions": 1.743701285954221e-05,
9
- "Inference_energy_consumed": 5.996118656674475e-05,
10
- "tracker_emissions": 2.925166457244822e-05,
11
- "tracker_energy_consumed": 0.00010058858882222871,
12
  "disk_memory": 3158.1982421875
13
  }
 
1
  {
2
  "perplexity_y_gt": 38109.7109375,
3
+ "inference_elapsed_time_ms_@1": 413.0109443664551,
4
+ "inference_latency_ms_@1": 41.301094436645506,
5
+ "inference_throughput_batches_per_ms_@1": 0.02421243343887573,
6
+ "Loading model_emissions": 5.771226897863607e-08,
7
+ "Loading model_energy_consumed": 2.427722377133629e-05,
8
+ "Inference_emissions": 1.491522043356255e-07,
9
+ "Inference_energy_consumed": 6.274231640389114e-05,
10
+ "tracker_emissions": 2.404339955210359e-07,
11
+ "tracker_energy_consumed": 0.00010114088416210811,
12
  "disk_memory": 3158.1982421875
13
  }
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/tmp/models/tmp2ydkucwg5lc33vlc",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/tmp/models/tmpdoc9e6ytd4j9lnop",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
plots.png CHANGED

Git LFS Details

  • SHA256: 59b9af5a86663682518f332f2e4b1d1a1146a2056c0bdce864efaceda7103ff3
  • Pointer size: 131 Bytes
  • Size of remote file: 126 kB
smashed_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "perplexity_y_gt": 22252.1484375,
3
- "inference_elapsed_time_ms_@1": 1430.5411987304688,
4
- "inference_latency_ms_@1": 143.0541198730469,
5
- "inference_throughput_batches_per_ms_@1": 0.006990361416276918,
6
- "Loading model_emissions": 1.3689458760454972e-05,
7
- "Loading model_energy_consumed": 4.707435828288706e-05,
8
- "Inference_emissions": 3.829836649971085e-05,
9
- "Inference_energy_consumed": 0.00013169775794677133,
10
- "tracker_emissions": 5.627399208629691e-05,
11
- "tracker_energy_consumed": 0.00019351108848299342,
12
  "disk_memory": 3150.1982421875
13
  }
 
1
  {
2
  "perplexity_y_gt": 22252.1484375,
3
+ "inference_elapsed_time_ms_@1": 1414.6948547363281,
4
+ "inference_latency_ms_@1": 141.4694854736328,
5
+ "inference_throughput_batches_per_ms_@1": 0.007068662168750029,
6
+ "Loading model_emissions": 1.3719643498171522e-05,
7
+ "Loading model_energy_consumed": 4.7178155458714676e-05,
8
+ "Inference_emissions": 3.8438811896213414e-05,
9
+ "Inference_energy_consumed": 0.00013218071180417604,
10
+ "tracker_emissions": 5.652322119307323e-05,
11
+ "tracker_energy_consumed": 0.0001943681201941962,
12
  "disk_memory": 3150.1982421875
13
  }