alrope commited on
Commit
b9db2c9
·
verified ·
1 Parent(s): b3dccc9

Upload folder using huggingface_hub

Browse files
Files changed (34) hide show
  1. temperature=0.0/Llama-2-13b-chat-hf.json +39 -0
  2. temperature=0.0/Llama-2-70b-chat-hf.json +39 -0
  3. temperature=0.0/Llama-2-7b-chat-hf.json +39 -0
  4. temperature=0.0/Llama-3.1-70B-Instruct.json +39 -0
  5. temperature=0.0/Llama-3.1-Tulu-3-70B-DPO.json +39 -0
  6. temperature=0.0/Llama-3.1-Tulu-3-70B.json +39 -0
  7. temperature=0.0/Llama-3.1-Tulu-3-8B.json +39 -0
  8. temperature=0.0/Meta-Llama-3.1-8B-Instruct.json +39 -0
  9. temperature=0.0/Mistral-7B-Instruct-v0.3.json +39 -0
  10. temperature=0.0/Mistral-Large-Instruct-2407.json +39 -0
  11. temperature=0.0/Mistral-Small-Instruct-2409.json +39 -0
  12. temperature=0.0/OLMo-2-1124-13B-Instruct.json +39 -0
  13. temperature=0.0/OLMo-2-1124-7B-Instruct.json +39 -0
  14. temperature=0.0/OLMo-7B-0724-Instruct-hf.json +39 -0
  15. temperature=0.0/OLMo-7B-SFT-hf.json +39 -0
  16. temperature=0.0/Phi-3-medium-4k-instruct.json +39 -0
  17. temperature=0.0/Qwen1.5-110B-Chat.json +39 -0
  18. temperature=0.0/Qwen2-72B-Instruct.json +39 -0
  19. temperature=0.0/Qwen2.5-72B-Instruct.json +39 -0
  20. temperature=0.0/WizardLM-13B-V1.2.json +39 -0
  21. temperature=0.0/Yi-1.5-34B-Chat.json +39 -0
  22. temperature=0.0/dolly-v2-12b.json +39 -0
  23. temperature=0.0/dolly-v2-7b.json +39 -0
  24. temperature=0.0/gpt4all-13b-snoozy.json +39 -0
  25. temperature=0.0/koala-13B-HF.json +39 -0
  26. temperature=0.0/koala-7B-HF.json +39 -0
  27. temperature=0.0/mpt-7b-chat.json +39 -0
  28. temperature=0.0/oasst-sft-1-pythia-12b.json +39 -0
  29. temperature=0.0/tulu-2-dpo-13b.json +39 -0
  30. temperature=0.0/tulu-2-dpo-70b.json +39 -0
  31. temperature=0.0/tulu-2-dpo-7b.json +39 -0
  32. temperature=0.0/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm.json +39 -0
  33. temperature=0.0/vicuna-13b-v1.5.json +39 -0
  34. temperature=0.0/vicuna-7b-v1.5.json +39 -0
temperature=0.0/Llama-2-13b-chat-hf.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-13b-chat-hf",
3
+ "generation": 0.179,
4
+ "open_qa": 0.343,
5
+ "brainstorm": 0.245,
6
+ "rewrite": 0.191,
7
+ "summarize": 0.307,
8
+ "classify": 0.249,
9
+ "closed_qa": 0.24,
10
+ "extract": 0.22,
11
+ "reasoning_over_numerical_data": 0.125,
12
+ "multi-document_synthesis": 0.104,
13
+ "fact_checking_or_attributed_qa": 0.198,
14
+ "average": 0.1956,
15
+ "generation_rank": 14,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 17,
18
+ "rewrite_rank": 14,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 18,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 16,
26
+ "average_rank": 17,
27
+ "generation_confi": "+3.0 / -3.1",
28
+ "open_qa_confi": "+8.8 / -8.8",
29
+ "brainstorm_confi": "+3.4 / -3.3",
30
+ "rewrite_confi": "+2.9 / -2.8",
31
+ "summarize_confi": "+6.4 / -6.4",
32
+ "classify_confi": "+6.0 / -5.7",
33
+ "closed_qa_confi": "+5.9 / -5.4",
34
+ "extract_confi": "+5.4 / -5.2",
35
+ "reasoning_over_numerical_data_confi": "+2.7 / -2.7",
36
+ "multi-document_synthesis_confi": "+2.9 / -2.6",
37
+ "fact_checking_or_attributed_qa_confi": "+3.6 / -3.6",
38
+ "average_confi": "+1.17 / -1.17"
39
+ }
temperature=0.0/Llama-2-70b-chat-hf.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-70b-chat-hf",
3
+ "generation": 0.239,
4
+ "open_qa": 0.377,
5
+ "brainstorm": 0.306,
6
+ "rewrite": 0.22,
7
+ "summarize": 0.275,
8
+ "classify": 0.296,
9
+ "closed_qa": 0.339,
10
+ "extract": 0.238,
11
+ "reasoning_over_numerical_data": 0.197,
12
+ "multi-document_synthesis": 0.13,
13
+ "fact_checking_or_attributed_qa": 0.222,
14
+ "average": 0.239,
15
+ "generation_rank": 14,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 14,
18
+ "rewrite_rank": 14,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 12,
24
+ "multi-document_synthesis_rank": 16,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 14,
27
+ "generation_confi": "+3.5 / -3.4",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+3.7 / -3.6",
30
+ "rewrite_confi": "+3.1 / -3.1",
31
+ "summarize_confi": "+6.2 / -5.9",
32
+ "classify_confi": "+6.5 / -6.0",
33
+ "closed_qa_confi": "+6.2 / -6.2",
34
+ "extract_confi": "+5.2 / -5.2",
35
+ "reasoning_over_numerical_data_confi": "+3.4 / -3.3",
36
+ "multi-document_synthesis_confi": "+3.1 / -2.9",
37
+ "fact_checking_or_attributed_qa_confi": "+4.0 / -3.7",
38
+ "average_confi": "+1.28 / -1.28"
39
+ }
temperature=0.0/Llama-2-7b-chat-hf.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-7b-chat-hf",
3
+ "generation": 0.166,
4
+ "open_qa": 0.299,
5
+ "brainstorm": 0.221,
6
+ "rewrite": 0.144,
7
+ "summarize": 0.223,
8
+ "classify": 0.194,
9
+ "closed_qa": 0.218,
10
+ "extract": 0.131,
11
+ "reasoning_over_numerical_data": 0.075,
12
+ "multi-document_synthesis": 0.097,
13
+ "fact_checking_or_attributed_qa": 0.115,
14
+ "average": 0.1538,
15
+ "generation_rank": 19,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 17,
18
+ "rewrite_rank": 20,
19
+ "summarize_rank": 18,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 22,
26
+ "average_rank": 20,
27
+ "generation_confi": "+3.2 / -2.9",
28
+ "open_qa_confi": "+9.3 / -8.8",
29
+ "brainstorm_confi": "+3.3 / -3.3",
30
+ "rewrite_confi": "+2.6 / -2.4",
31
+ "summarize_confi": "+5.9 / -5.4",
32
+ "classify_confi": "+5.5 / -5.0",
33
+ "closed_qa_confi": "+5.7 / -5.2",
34
+ "extract_confi": "+4.7 / -4.2",
35
+ "reasoning_over_numerical_data_confi": "+2.2 / -2.1",
36
+ "multi-document_synthesis_confi": "+2.6 / -2.6",
37
+ "fact_checking_or_attributed_qa_confi": "+2.8 / -2.8",
38
+ "average_confi": "+1.08 / -1.03"
39
+ }
temperature=0.0/Llama-3.1-70B-Instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-3.1-70B-Instruct",
3
+ "generation": 0.445,
4
+ "open_qa": 0.706,
5
+ "brainstorm": 0.486,
6
+ "rewrite": 0.48,
7
+ "summarize": 0.493,
8
+ "classify": 0.507,
9
+ "closed_qa": 0.512,
10
+ "extract": 0.502,
11
+ "reasoning_over_numerical_data": 0.521,
12
+ "multi-document_synthesis": 0.453,
13
+ "fact_checking_or_attributed_qa": 0.496,
14
+ "average": 0.4898,
15
+ "generation_rank": 1,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 1,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 1,
22
+ "extract_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 5,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 1,
27
+ "generation_confi": "+3.8 / -3.9",
28
+ "open_qa_confi": "+8.3 / -8.8",
29
+ "brainstorm_confi": "+4.2 / -4.1",
30
+ "rewrite_confi": "+3.7 / -3.8",
31
+ "summarize_confi": "+6.7 / -6.7",
32
+ "classify_confi": "+6.7 / -6.5",
33
+ "closed_qa_confi": "+6.2 / -6.4",
34
+ "extract_confi": "+6.2 / -5.9",
35
+ "reasoning_over_numerical_data_confi": "+4.0 / -4.1",
36
+ "multi-document_synthesis_confi": "+4.6 / -4.4",
37
+ "fact_checking_or_attributed_qa_confi": "+4.5 / -4.5",
38
+ "average_confi": "+1.44 / -1.44"
39
+ }
temperature=0.0/Llama-3.1-Tulu-3-70B-DPO.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/Llama-3.1-Tulu-3-70B-DPO",
3
+ "generation": 0.06,
4
+ "open_qa": 0.539,
5
+ "brainstorm": 0.02,
6
+ "rewrite": 0.084,
7
+ "summarize": 0.015,
8
+ "classify": 0.264,
9
+ "closed_qa": 0.297,
10
+ "extract": 0.22,
11
+ "reasoning_over_numerical_data": 0.2,
12
+ "multi-document_synthesis": 0.029,
13
+ "fact_checking_or_attributed_qa": 0.141,
14
+ "average": 0.1191,
15
+ "generation_rank": 23,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 26,
18
+ "rewrite_rank": 24,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 12,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 16,
26
+ "average_rank": 22,
27
+ "generation_confi": "+2.0 / -1.8",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+1.2 / -1.0",
30
+ "rewrite_confi": "+2.1 / -2.0",
31
+ "summarize_confi": "+2.0 / -1.5",
32
+ "classify_confi": "+6.0 / -5.7",
33
+ "closed_qa_confi": "+6.4 / -6.2",
34
+ "extract_confi": "+5.7 / -5.4",
35
+ "reasoning_over_numerical_data_confi": "+3.4 / -3.3",
36
+ "multi-document_synthesis_confi": "+1.5 / -1.3",
37
+ "fact_checking_or_attributed_qa_confi": "+3.3 / -3.0",
38
+ "average_confi": "+0.94 / -0.96"
39
+ }
temperature=0.0/Llama-3.1-Tulu-3-70B.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/Llama-3.1-Tulu-3-70B",
3
+ "generation": 0.448,
4
+ "open_qa": 0.456,
5
+ "brainstorm": 0.504,
6
+ "rewrite": 0.434,
7
+ "summarize": 0.364,
8
+ "classify": 0.45,
9
+ "closed_qa": 0.354,
10
+ "extract": 0.399,
11
+ "reasoning_over_numerical_data": 0.439,
12
+ "multi-document_synthesis": 0.53,
13
+ "fact_checking_or_attributed_qa": 0.319,
14
+ "average": 0.4368,
15
+ "generation_rank": 1,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 5,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 4,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 3,
26
+ "average_rank": 4,
27
+ "generation_confi": "+4.0 / -4.0",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+4.0 / -4.0",
30
+ "rewrite_confi": "+3.6 / -3.7",
31
+ "summarize_confi": "+6.7 / -6.7",
32
+ "classify_confi": "+6.5 / -7.0",
33
+ "closed_qa_confi": "+6.4 / -6.4",
34
+ "extract_confi": "+6.7 / -6.4",
35
+ "reasoning_over_numerical_data_confi": "+4.0 / -4.1",
36
+ "multi-document_synthesis_confi": "+4.6 / -4.6",
37
+ "fact_checking_or_attributed_qa_confi": "+4.3 / -4.1",
38
+ "average_confi": "+1.48 / -1.47"
39
+ }
temperature=0.0/Llama-3.1-Tulu-3-8B.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/Llama-3.1-Tulu-3-8B",
3
+ "generation": 0.363,
4
+ "open_qa": 0.402,
5
+ "brainstorm": 0.391,
6
+ "rewrite": 0.341,
7
+ "summarize": 0.302,
8
+ "classify": 0.331,
9
+ "closed_qa": 0.243,
10
+ "extract": 0.262,
11
+ "reasoning_over_numerical_data": 0.279,
12
+ "multi-document_synthesis": 0.45,
13
+ "fact_checking_or_attributed_qa": 0.247,
14
+ "average": 0.3354,
15
+ "generation_rank": 7,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 9,
18
+ "rewrite_rank": 9,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 5,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 8,
27
+ "generation_confi": "+3.8 / -3.8",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+4.0 / -3.8",
30
+ "rewrite_confi": "+3.6 / -3.5",
31
+ "summarize_confi": "+6.4 / -5.9",
32
+ "classify_confi": "+6.5 / -6.2",
33
+ "closed_qa_confi": "+5.9 / -5.7",
34
+ "extract_confi": "+6.2 / -5.9",
35
+ "reasoning_over_numerical_data_confi": "+3.7 / -3.7",
36
+ "multi-document_synthesis_confi": "+4.4 / -4.6",
37
+ "fact_checking_or_attributed_qa_confi": "+4.1 / -3.8",
38
+ "average_confi": "+1.40 / -1.41"
39
+ }
temperature=0.0/Meta-Llama-3.1-8B-Instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
3
+ "generation": 0.398,
4
+ "open_qa": 0.564,
5
+ "brainstorm": 0.482,
6
+ "rewrite": 0.386,
7
+ "summarize": 0.379,
8
+ "classify": 0.4,
9
+ "closed_qa": 0.389,
10
+ "extract": 0.334,
11
+ "reasoning_over_numerical_data": 0.311,
12
+ "multi-document_synthesis": 0.36,
13
+ "fact_checking_or_attributed_qa": 0.334,
14
+ "average": 0.3857,
15
+ "generation_rank": 7,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 5,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 1,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 11,
25
+ "fact_checking_or_attributed_qa_rank": 3,
26
+ "average_rank": 7,
27
+ "generation_confi": "+3.8 / -3.7",
28
+ "open_qa_confi": "+9.3 / -9.8",
29
+ "brainstorm_confi": "+4.0 / -4.0",
30
+ "rewrite_confi": "+3.7 / -3.6",
31
+ "summarize_confi": "+6.9 / -6.4",
32
+ "classify_confi": "+6.5 / -6.5",
33
+ "closed_qa_confi": "+6.4 / -6.7",
34
+ "extract_confi": "+5.7 / -5.7",
35
+ "reasoning_over_numerical_data_confi": "+3.7 / -3.8",
36
+ "multi-document_synthesis_confi": "+4.6 / -4.4",
37
+ "fact_checking_or_attributed_qa_confi": "+4.3 / -4.2",
38
+ "average_confi": "+1.48 / -1.46"
39
+ }
temperature=0.0/Mistral-7B-Instruct-v0.3.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-7B-Instruct-v0.3",
3
+ "generation": 0.19,
4
+ "open_qa": 0.426,
5
+ "brainstorm": 0.292,
6
+ "rewrite": 0.209,
7
+ "summarize": 0.225,
8
+ "classify": 0.256,
9
+ "closed_qa": 0.252,
10
+ "extract": 0.171,
11
+ "reasoning_over_numerical_data": 0.177,
12
+ "multi-document_synthesis": 0.23,
13
+ "fact_checking_or_attributed_qa": 0.228,
14
+ "average": 0.2266,
15
+ "generation_rank": 14,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 14,
18
+ "rewrite_rank": 14,
19
+ "summarize_rank": 18,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 12,
24
+ "multi-document_synthesis_rank": 12,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 14,
27
+ "generation_confi": "+3.2 / -3.0",
28
+ "open_qa_confi": "+9.8 / -9.3",
29
+ "brainstorm_confi": "+3.7 / -3.7",
30
+ "rewrite_confi": "+3.1 / -3.0",
31
+ "summarize_confi": "+5.9 / -5.4",
32
+ "classify_confi": "+6.2 / -5.7",
33
+ "closed_qa_confi": "+6.2 / -5.9",
34
+ "extract_confi": "+5.2 / -5.0",
35
+ "reasoning_over_numerical_data_confi": "+3.3 / -3.2",
36
+ "multi-document_synthesis_confi": "+4.0 / -3.8",
37
+ "fact_checking_or_attributed_qa_confi": "+3.9 / -3.8",
38
+ "average_confi": "+1.24 / -1.29"
39
+ }
temperature=0.0/Mistral-Large-Instruct-2407.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-Large-Instruct-2407",
3
+ "generation": 0.492,
4
+ "open_qa": 0.471,
5
+ "brainstorm": 0.513,
6
+ "rewrite": 0.51,
7
+ "summarize": 0.485,
8
+ "classify": 0.47,
9
+ "closed_qa": 0.307,
10
+ "extract": 0.426,
11
+ "reasoning_over_numerical_data": 0.452,
12
+ "multi-document_synthesis": 0.6,
13
+ "fact_checking_or_attributed_qa": 0.43,
14
+ "average": 0.4839,
15
+ "generation_rank": 1,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 1,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 1,
27
+ "generation_confi": "+4.1 / -4.0",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+3.9 / -3.9",
30
+ "rewrite_confi": "+3.6 / -3.7",
31
+ "summarize_confi": "+6.9 / -6.9",
32
+ "classify_confi": "+6.7 / -6.5",
33
+ "closed_qa_confi": "+6.2 / -5.9",
34
+ "extract_confi": "+6.4 / -6.2",
35
+ "reasoning_over_numerical_data_confi": "+4.0 / -4.0",
36
+ "multi-document_synthesis_confi": "+4.4 / -4.6",
37
+ "fact_checking_or_attributed_qa_confi": "+4.4 / -4.3",
38
+ "average_confi": "+1.49 / -1.53"
39
+ }
temperature=0.0/Mistral-Small-Instruct-2409.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-Small-Instruct-2409",
3
+ "generation": 0.414,
4
+ "open_qa": 0.461,
5
+ "brainstorm": 0.467,
6
+ "rewrite": 0.458,
7
+ "summarize": 0.399,
8
+ "classify": 0.418,
9
+ "closed_qa": 0.287,
10
+ "extract": 0.354,
11
+ "reasoning_over_numerical_data": 0.393,
12
+ "multi-document_synthesis": 0.541,
13
+ "fact_checking_or_attributed_qa": 0.392,
14
+ "average": 0.4287,
15
+ "generation_rank": 1,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 1,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 4,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 3,
26
+ "average_rank": 4,
27
+ "generation_confi": "+3.9 / -4.1",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+4.1 / -3.9",
30
+ "rewrite_confi": "+3.7 / -3.7",
31
+ "summarize_confi": "+6.7 / -6.7",
32
+ "classify_confi": "+6.5 / -6.5",
33
+ "closed_qa_confi": "+6.2 / -5.9",
34
+ "extract_confi": "+6.2 / -6.2",
35
+ "reasoning_over_numerical_data_confi": "+4.1 / -3.9",
36
+ "multi-document_synthesis_confi": "+4.6 / -4.6",
37
+ "fact_checking_or_attributed_qa_confi": "+4.4 / -4.5",
38
+ "average_confi": "+1.48 / -1.43"
39
+ }
temperature=0.0/OLMo-2-1124-13B-Instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-2-1124-13B-Instruct",
3
+ "generation": 0.376,
4
+ "open_qa": 0.431,
5
+ "brainstorm": 0.386,
6
+ "rewrite": 0.399,
7
+ "summarize": 0.391,
8
+ "classify": 0.438,
9
+ "closed_qa": 0.319,
10
+ "extract": 0.248,
11
+ "reasoning_over_numerical_data": 0.237,
12
+ "multi-document_synthesis": 0.422,
13
+ "fact_checking_or_attributed_qa": 0.295,
14
+ "average": 0.356,
15
+ "generation_rank": 7,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 9,
18
+ "rewrite_rank": 5,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 12,
24
+ "multi-document_synthesis_rank": 5,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 8,
27
+ "generation_confi": "+4.0 / -3.9",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+3.9 / -3.8",
30
+ "rewrite_confi": "+3.6 / -3.7",
31
+ "summarize_confi": "+6.4 / -6.4",
32
+ "classify_confi": "+6.7 / -6.7",
33
+ "closed_qa_confi": "+6.4 / -6.2",
34
+ "extract_confi": "+5.9 / -5.5",
35
+ "reasoning_over_numerical_data_confi": "+3.5 / -3.5",
36
+ "multi-document_synthesis_confi": "+4.6 / -4.4",
37
+ "fact_checking_or_attributed_qa_confi": "+4.3 / -4.0",
38
+ "average_confi": "+1.48 / -1.44"
39
+ }
temperature=0.0/OLMo-2-1124-7B-Instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-2-1124-7B-Instruct",
3
+ "generation": 0.318,
4
+ "open_qa": 0.348,
5
+ "brainstorm": 0.369,
6
+ "rewrite": 0.319,
7
+ "summarize": 0.252,
8
+ "classify": 0.254,
9
+ "closed_qa": 0.183,
10
+ "extract": 0.183,
11
+ "reasoning_over_numerical_data": 0.173,
12
+ "multi-document_synthesis": 0.393,
13
+ "fact_checking_or_attributed_qa": 0.208,
14
+ "average": 0.2849,
15
+ "generation_rank": 12,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 9,
18
+ "rewrite_rank": 9,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 12,
24
+ "multi-document_synthesis_rank": 5,
25
+ "fact_checking_or_attributed_qa_rank": 16,
26
+ "average_rank": 12,
27
+ "generation_confi": "+3.7 / -3.7",
28
+ "open_qa_confi": "+9.3 / -8.3",
29
+ "brainstorm_confi": "+3.8 / -3.8",
30
+ "rewrite_confi": "+3.4 / -3.4",
31
+ "summarize_confi": "+5.9 / -5.9",
32
+ "classify_confi": "+6.0 / -5.7",
33
+ "closed_qa_confi": "+5.2 / -5.0",
34
+ "extract_confi": "+5.4 / -5.0",
35
+ "reasoning_over_numerical_data_confi": "+3.4 / -3.1",
36
+ "multi-document_synthesis_confi": "+4.4 / -4.4",
37
+ "fact_checking_or_attributed_qa_confi": "+3.7 / -3.7",
38
+ "average_confi": "+1.36 / -1.35"
39
+ }
temperature=0.0/OLMo-7B-0724-Instruct-hf.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-0724-Instruct-hf",
3
+ "generation": 0.064,
4
+ "open_qa": 0.157,
5
+ "brainstorm": 0.127,
6
+ "rewrite": 0.053,
7
+ "summarize": 0.104,
8
+ "classify": 0.087,
9
+ "closed_qa": 0.059,
10
+ "extract": 0.05,
11
+ "reasoning_over_numerical_data": 0.052,
12
+ "multi-document_synthesis": 0.079,
13
+ "fact_checking_or_attributed_qa": 0.06,
14
+ "average": 0.075,
15
+ "generation_rank": 23,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 21,
18
+ "rewrite_rank": 28,
19
+ "summarize_rank": 23,
20
+ "classify_rank": 30,
21
+ "closed_qa_rank": 29,
22
+ "extract_rank": 32,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 26,
27
+ "generation_confi": "+2.0 / -1.8",
28
+ "open_qa_confi": "+7.8 / -6.9",
29
+ "brainstorm_confi": "+2.7 / -2.7",
30
+ "rewrite_confi": "+1.7 / -1.6",
31
+ "summarize_confi": "+4.5 / -4.0",
32
+ "classify_confi": "+4.0 / -3.5",
33
+ "closed_qa_confi": "+3.5 / -3.0",
34
+ "extract_confi": "+3.2 / -2.7",
35
+ "reasoning_over_numerical_data_confi": "+1.9 / -1.9",
36
+ "multi-document_synthesis_confi": "+2.6 / -2.4",
37
+ "fact_checking_or_attributed_qa_confi": "+2.2 / -2.1",
38
+ "average_confi": "+0.82 / -0.80"
39
+ }
temperature=0.0/OLMo-7B-SFT-hf.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-SFT",
3
+ "generation": 0.052,
4
+ "open_qa": 0.348,
5
+ "brainstorm": 0.02,
6
+ "rewrite": 0.05,
7
+ "summarize": 0.054,
8
+ "classify": 0.187,
9
+ "closed_qa": 0.156,
10
+ "extract": 0.072,
11
+ "reasoning_over_numerical_data": 0.047,
12
+ "multi-document_synthesis": 0.024,
13
+ "fact_checking_or_attributed_qa": 0.081,
14
+ "average": 0.0661,
15
+ "generation_rank": 28,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 26,
18
+ "rewrite_rank": 28,
19
+ "summarize_rank": 23,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 22,
26
+ "average_rank": 26,
27
+ "generation_confi": "+1.8 / -1.7",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+1.2 / -1.0",
30
+ "rewrite_confi": "+1.7 / -1.6",
31
+ "summarize_confi": "+3.5 / -3.0",
32
+ "classify_confi": "+5.2 / -5.2",
33
+ "closed_qa_confi": "+5.0 / -4.7",
34
+ "extract_confi": "+3.5 / -3.2",
35
+ "reasoning_over_numerical_data_confi": "+1.9 / -1.7",
36
+ "multi-document_synthesis_confi": "+1.5 / -1.3",
37
+ "fact_checking_or_attributed_qa_confi": "+2.6 / -2.5",
38
+ "average_confi": "+0.74 / -0.72"
39
+ }
temperature=0.0/Phi-3-medium-4k-instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "microsoft/Phi-3-medium-4k-instruct",
3
+ "generation": 0.286,
4
+ "open_qa": 0.564,
5
+ "brainstorm": 0.337,
6
+ "rewrite": 0.294,
7
+ "summarize": 0.356,
8
+ "classify": 0.415,
9
+ "closed_qa": 0.334,
10
+ "extract": 0.248,
11
+ "reasoning_over_numerical_data": 0.385,
12
+ "multi-document_synthesis": 0.181,
13
+ "fact_checking_or_attributed_qa": 0.255,
14
+ "average": 0.3091,
15
+ "generation_rank": 12,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 14,
18
+ "rewrite_rank": 9,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 4,
24
+ "multi-document_synthesis_rank": 16,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 12,
27
+ "generation_confi": "+3.7 / -3.4",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+3.8 / -3.7",
30
+ "rewrite_confi": "+3.4 / -3.3",
31
+ "summarize_confi": "+6.9 / -6.4",
32
+ "classify_confi": "+6.5 / -6.5",
33
+ "closed_qa_confi": "+6.2 / -6.2",
34
+ "extract_confi": "+5.7 / -5.4",
35
+ "reasoning_over_numerical_data_confi": "+4.0 / -3.9",
36
+ "multi-document_synthesis_confi": "+3.5 / -3.5",
37
+ "fact_checking_or_attributed_qa_confi": "+3.9 / -4.0",
38
+ "average_confi": "+1.33 / -1.35"
39
+ }
temperature=0.0/Qwen1.5-110B-Chat.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen1.5-110B-Chat",
3
+ "generation": 0.414,
4
+ "open_qa": 0.593,
5
+ "brainstorm": 0.462,
6
+ "rewrite": 0.376,
7
+ "summarize": 0.371,
8
+ "classify": 0.453,
9
+ "closed_qa": 0.317,
10
+ "extract": 0.317,
11
+ "reasoning_over_numerical_data": 0.365,
12
+ "multi-document_synthesis": 0.481,
13
+ "fact_checking_or_attributed_qa": 0.387,
14
+ "average": 0.4076,
15
+ "generation_rank": 1,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 5,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 4,
24
+ "multi-document_synthesis_rank": 5,
25
+ "fact_checking_or_attributed_qa_rank": 3,
26
+ "average_rank": 4,
27
+ "generation_confi": "+3.9 / -3.9",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+3.8 / -4.0",
30
+ "rewrite_confi": "+3.6 / -3.7",
31
+ "summarize_confi": "+6.4 / -6.4",
32
+ "classify_confi": "+6.7 / -6.7",
33
+ "closed_qa_confi": "+6.2 / -5.9",
34
+ "extract_confi": "+6.2 / -6.2",
35
+ "reasoning_over_numerical_data_confi": "+3.8 / -3.9",
36
+ "multi-document_synthesis_confi": "+4.6 / -4.6",
37
+ "fact_checking_or_attributed_qa_confi": "+4.3 / -4.4",
38
+ "average_confi": "+1.47 / -1.43"
39
+ }
temperature=0.0/Qwen2-72B-Instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen2-72B-Instruct",
3
+ "generation": 0.327,
4
+ "open_qa": 0.549,
5
+ "brainstorm": 0.431,
6
+ "rewrite": 0.324,
7
+ "summarize": 0.26,
8
+ "classify": 0.4,
9
+ "closed_qa": 0.262,
10
+ "extract": 0.342,
11
+ "reasoning_over_numerical_data": 0.373,
12
+ "multi-document_synthesis": 0.263,
13
+ "fact_checking_or_attributed_qa": 0.271,
14
+ "average": 0.3371,
15
+ "generation_rank": 7,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 9,
18
+ "rewrite_rank": 9,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 4,
24
+ "multi-document_synthesis_rank": 12,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 8,
27
+ "generation_confi": "+3.8 / -3.7",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+4.0 / -4.2",
30
+ "rewrite_confi": "+3.4 / -3.4",
31
+ "summarize_confi": "+6.2 / -5.9",
32
+ "classify_confi": "+6.5 / -6.5",
33
+ "closed_qa_confi": "+5.9 / -5.7",
34
+ "extract_confi": "+6.4 / -6.4",
35
+ "reasoning_over_numerical_data_confi": "+3.9 / -4.0",
36
+ "multi-document_synthesis_confi": "+4.2 / -4.0",
37
+ "fact_checking_or_attributed_qa_confi": "+4.0 / -4.0",
38
+ "average_confi": "+1.43 / -1.42"
39
+ }
temperature=0.0/Qwen2.5-72B-Instruct.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen2.5-72B-Instruct",
3
+ "generation": 0.454,
4
+ "open_qa": 0.49,
5
+ "brainstorm": 0.503,
6
+ "rewrite": 0.454,
7
+ "summarize": 0.431,
8
+ "classify": 0.468,
9
+ "closed_qa": 0.285,
10
+ "extract": 0.433,
11
+ "reasoning_over_numerical_data": 0.452,
12
+ "multi-document_synthesis": 0.603,
13
+ "fact_checking_or_attributed_qa": 0.4,
14
+ "average": 0.4621,
15
+ "generation_rank": 1,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 1,
19
+ "summarize_rank": 1,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 3,
26
+ "average_rank": 1,
27
+ "generation_confi": "+3.9 / -4.0",
28
+ "open_qa_confi": "+9.3 / -9.8",
29
+ "brainstorm_confi": "+4.1 / -4.2",
30
+ "rewrite_confi": "+3.7 / -3.7",
31
+ "summarize_confi": "+6.9 / -6.9",
32
+ "classify_confi": "+6.5 / -6.5",
33
+ "closed_qa_confi": "+5.9 / -5.7",
34
+ "extract_confi": "+6.4 / -6.7",
35
+ "reasoning_over_numerical_data_confi": "+4.0 / -4.0",
36
+ "multi-document_synthesis_confi": "+4.4 / -4.6",
37
+ "fact_checking_or_attributed_qa_confi": "+4.4 / -4.4",
38
+ "average_confi": "+1.54 / -1.49"
39
+ }
temperature=0.0/WizardLM-13B-V1.2.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "WizardLMTeam/WizardLM-13B-V1.2",
3
+ "generation": 0.163,
4
+ "open_qa": 0.353,
5
+ "brainstorm": 0.231,
6
+ "rewrite": 0.186,
7
+ "summarize": 0.213,
8
+ "classify": 0.266,
9
+ "closed_qa": 0.243,
10
+ "extract": 0.146,
11
+ "reasoning_over_numerical_data": 0.11,
12
+ "multi-document_synthesis": 0.132,
13
+ "fact_checking_or_attributed_qa": 0.141,
14
+ "average": 0.179,
15
+ "generation_rank": 19,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 17,
18
+ "rewrite_rank": 14,
19
+ "summarize_rank": 18,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 18,
24
+ "multi-document_synthesis_rank": 16,
25
+ "fact_checking_or_attributed_qa_rank": 16,
26
+ "average_rank": 17,
27
+ "generation_confi": "+3.0 / -2.8",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+3.4 / -3.3",
30
+ "rewrite_confi": "+2.9 / -2.9",
31
+ "summarize_confi": "+5.9 / -5.4",
32
+ "classify_confi": "+6.0 / -5.7",
33
+ "closed_qa_confi": "+5.7 / -5.4",
34
+ "extract_confi": "+5.0 / -4.5",
35
+ "reasoning_over_numerical_data_confi": "+2.6 / -2.5",
36
+ "multi-document_synthesis_confi": "+3.3 / -3.1",
37
+ "fact_checking_or_attributed_qa_confi": "+3.2 / -3.0",
38
+ "average_confi": "+1.14 / -1.14"
39
+ }
temperature=0.0/Yi-1.5-34B-Chat.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "01-ai/Yi-1.5-34B-Chat",
3
+ "generation": 0.353,
4
+ "open_qa": 0.446,
5
+ "brainstorm": 0.489,
6
+ "rewrite": 0.297,
7
+ "summarize": 0.282,
8
+ "classify": 0.391,
9
+ "closed_qa": 0.275,
10
+ "extract": 0.267,
11
+ "reasoning_over_numerical_data": 0.307,
12
+ "multi-document_synthesis": 0.417,
13
+ "fact_checking_or_attributed_qa": 0.299,
14
+ "average": 0.351,
15
+ "generation_rank": 7,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 1,
18
+ "rewrite_rank": 9,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 1,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 5,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 8,
27
+ "generation_confi": "+3.8 / -3.8",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+4.2 / -4.0",
30
+ "rewrite_confi": "+3.4 / -3.3",
31
+ "summarize_confi": "+6.4 / -5.9",
32
+ "classify_confi": "+6.7 / -6.2",
33
+ "closed_qa_confi": "+5.9 / -5.9",
34
+ "extract_confi": "+5.9 / -5.7",
35
+ "reasoning_over_numerical_data_confi": "+3.7 / -3.7",
36
+ "multi-document_synthesis_confi": "+4.4 / -4.4",
37
+ "fact_checking_or_attributed_qa_confi": "+4.2 / -4.1",
38
+ "average_confi": "+1.44 / -1.40"
39
+ }
temperature=0.0/dolly-v2-12b.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "databricks/dolly-v2-12b",
3
+ "generation": 0.02,
4
+ "open_qa": 0.225,
5
+ "brainstorm": 0.002,
6
+ "rewrite": 0.024,
7
+ "summarize": 0.005,
8
+ "classify": 0.06,
9
+ "closed_qa": 0.131,
10
+ "extract": 0.079,
11
+ "reasoning_over_numerical_data": 0.036,
12
+ "multi-document_synthesis": 0.009,
13
+ "fact_checking_or_attributed_qa": 0.041,
14
+ "average": 0.0353,
15
+ "generation_rank": 33,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 32,
18
+ "rewrite_rank": 32,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 30,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 33,
24
+ "multi-document_synthesis_rank": 32,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 31,
27
+ "generation_confi": "+1.2 / -1.0",
28
+ "open_qa_confi": "+7.8 / -7.8",
29
+ "brainstorm_confi": "+0.3 / -0.2",
30
+ "rewrite_confi": "+1.1 / -1.0",
31
+ "summarize_confi": "+1.0 / -0.5",
32
+ "classify_confi": "+3.5 / -3.0",
33
+ "closed_qa_confi": "+4.7 / -4.5",
34
+ "extract_confi": "+3.7 / -3.5",
35
+ "reasoning_over_numerical_data_confi": "+1.7 / -1.5",
36
+ "multi-document_synthesis_confi": "+0.9 / -0.7",
37
+ "fact_checking_or_attributed_qa_confi": "+1.9 / -1.7",
38
+ "average_confi": "+0.55 / -0.54"
39
+ }
temperature=0.0/dolly-v2-7b.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "databricks/dolly-v2-7b",
3
+ "generation": 0.022,
4
+ "open_qa": 0.255,
5
+ "brainstorm": 0.003,
6
+ "rewrite": 0.016,
7
+ "summarize": 0.0,
8
+ "classify": 0.05,
9
+ "closed_qa": 0.097,
10
+ "extract": 0.087,
11
+ "reasoning_over_numerical_data": 0.039,
12
+ "multi-document_synthesis": 0.002,
13
+ "fact_checking_or_attributed_qa": 0.055,
14
+ "average": 0.0344,
15
+ "generation_rank": 28,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 32,
18
+ "rewrite_rank": 32,
19
+ "summarize_rank": 34,
20
+ "classify_rank": 30,
21
+ "closed_qa_rank": 29,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 32,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 31,
27
+ "generation_confi": "+1.3 / -1.2",
28
+ "open_qa_confi": "+8.8 / -7.8",
29
+ "brainstorm_confi": "+0.5 / -0.3",
30
+ "rewrite_confi": "+1.0 / -0.9",
31
+ "summarize_confi": "+0.0 / -0.0",
32
+ "classify_confi": "+3.5 / -2.5",
33
+ "closed_qa_confi": "+4.2 / -4.0",
34
+ "extract_confi": "+4.2 / -3.7",
35
+ "reasoning_over_numerical_data_confi": "+1.7 / -1.5",
36
+ "multi-document_synthesis_confi": "+0.4 / -0.2",
37
+ "fact_checking_or_attributed_qa_confi": "+2.2 / -1.9",
38
+ "average_confi": "+0.54 / -0.55"
39
+ }
temperature=0.0/gpt4all-13b-snoozy.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "nomic-ai/gpt4all-13b-snoozy",
3
+ "generation": 0.06,
4
+ "open_qa": 0.549,
5
+ "brainstorm": 0.02,
6
+ "rewrite": 0.059,
7
+ "summarize": 0.02,
8
+ "classify": 0.102,
9
+ "closed_qa": 0.04,
10
+ "extract": 0.067,
11
+ "reasoning_over_numerical_data": 0.064,
12
+ "multi-document_synthesis": 0.022,
13
+ "fact_checking_or_attributed_qa": 0.054,
14
+ "average": 0.0612,
15
+ "generation_rank": 23,
16
+ "open_qa_rank": 1,
17
+ "brainstorm_rank": 26,
18
+ "rewrite_rank": 24,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 29,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 26,
27
+ "generation_confi": "+1.8 / -1.8",
28
+ "open_qa_confi": "+9.8 / -9.8",
29
+ "brainstorm_confi": "+1.2 / -1.0",
30
+ "rewrite_confi": "+1.7 / -1.7",
31
+ "summarize_confi": "+2.0 / -1.5",
32
+ "classify_confi": "+4.2 / -4.0",
33
+ "closed_qa_confi": "+3.0 / -2.5",
34
+ "extract_confi": "+3.5 / -3.2",
35
+ "reasoning_over_numerical_data_confi": "+2.2 / -2.0",
36
+ "multi-document_synthesis_confi": "+1.5 / -1.3",
37
+ "fact_checking_or_attributed_qa_confi": "+2.2 / -1.9",
38
+ "average_confi": "+0.72 / -0.68"
39
+ }
temperature=0.0/koala-13B-HF.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "TheBloke/koala-13B-HF",
3
+ "generation": 0.03,
4
+ "open_qa": 0.333,
5
+ "brainstorm": 0.013,
6
+ "rewrite": 0.046,
7
+ "summarize": 0.032,
8
+ "classify": 0.144,
9
+ "closed_qa": 0.134,
10
+ "extract": 0.082,
11
+ "reasoning_over_numerical_data": 0.06,
12
+ "multi-document_synthesis": 0.031,
13
+ "fact_checking_or_attributed_qa": 0.051,
14
+ "average": 0.0566,
15
+ "generation_rank": 28,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 26,
18
+ "rewrite_rank": 28,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 29,
27
+ "generation_confi": "+1.5 / -1.2",
28
+ "open_qa_confi": "+9.3 / -8.8",
29
+ "brainstorm_confi": "+1.0 / -0.8",
30
+ "rewrite_confi": "+1.6 / -1.5",
31
+ "summarize_confi": "+2.5 / -2.2",
32
+ "classify_confi": "+5.0 / -4.5",
33
+ "closed_qa_confi": "+4.7 / -4.2",
34
+ "extract_confi": "+3.7 / -3.5",
35
+ "reasoning_over_numerical_data_confi": "+2.1 / -1.9",
36
+ "multi-document_synthesis_confi": "+1.8 / -1.5",
37
+ "fact_checking_or_attributed_qa_confi": "+2.2 / -1.9",
38
+ "average_confi": "+0.69 / -0.69"
39
+ }
temperature=0.0/koala-7B-HF.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "TheBloke/koala-7B-HF",
3
+ "generation": 0.027,
4
+ "open_qa": 0.245,
5
+ "brainstorm": 0.004,
6
+ "rewrite": 0.037,
7
+ "summarize": 0.005,
8
+ "classify": 0.095,
9
+ "closed_qa": 0.064,
10
+ "extract": 0.089,
11
+ "reasoning_over_numerical_data": 0.041,
12
+ "multi-document_synthesis": 0.02,
13
+ "fact_checking_or_attributed_qa": 0.049,
14
+ "average": 0.0409,
15
+ "generation_rank": 28,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 26,
18
+ "rewrite_rank": 28,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 30,
21
+ "closed_qa_rank": 29,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 31,
27
+ "generation_confi": "+1.3 / -1.2",
28
+ "open_qa_confi": "+8.8 / -7.8",
29
+ "brainstorm_confi": "+0.6 / -0.4",
30
+ "rewrite_confi": "+1.4 / -1.3",
31
+ "summarize_confi": "+1.0 / -0.5",
32
+ "classify_confi": "+4.0 / -4.0",
33
+ "closed_qa_confi": "+3.7 / -3.0",
34
+ "extract_confi": "+4.0 / -3.7",
35
+ "reasoning_over_numerical_data_confi": "+1.7 / -1.5",
36
+ "multi-document_synthesis_confi": "+1.3 / -1.1",
37
+ "fact_checking_or_attributed_qa_confi": "+2.1 / -1.8",
38
+ "average_confi": "+0.59 / -0.58"
39
+ }
temperature=0.0/mpt-7b-chat.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mosaicml/mpt-7b-chat",
3
+ "generation": 0.037,
4
+ "open_qa": 0.363,
5
+ "brainstorm": 0.005,
6
+ "rewrite": 0.057,
7
+ "summarize": 0.025,
8
+ "classify": 0.182,
9
+ "closed_qa": 0.136,
10
+ "extract": 0.042,
11
+ "reasoning_over_numerical_data": 0.052,
12
+ "multi-document_synthesis": 0.015,
13
+ "fact_checking_or_attributed_qa": 0.045,
14
+ "average": 0.0553,
15
+ "generation_rank": 28,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 26,
18
+ "rewrite_rank": 24,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 32,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 32,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 29,
27
+ "generation_confi": "+1.7 / -1.3",
28
+ "open_qa_confi": "+8.8 / -8.8",
29
+ "brainstorm_confi": "+0.7 / -0.5",
30
+ "rewrite_confi": "+1.7 / -1.6",
31
+ "summarize_confi": "+2.5 / -2.0",
32
+ "classify_confi": "+5.5 / -5.2",
33
+ "closed_qa_confi": "+4.7 / -4.5",
34
+ "extract_confi": "+2.7 / -2.5",
35
+ "reasoning_over_numerical_data_confi": "+1.9 / -1.9",
36
+ "multi-document_synthesis_confi": "+1.3 / -1.1",
37
+ "fact_checking_or_attributed_qa_confi": "+2.1 / -1.7",
38
+ "average_confi": "+0.69 / -0.67"
39
+ }
temperature=0.0/oasst-sft-1-pythia-12b.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "OpenAssistant/oasst-sft-1-pythia-12b",
3
+ "generation": 0.012,
4
+ "open_qa": 0.216,
5
+ "brainstorm": 0.003,
6
+ "rewrite": 0.016,
7
+ "summarize": 0.005,
8
+ "classify": 0.03,
9
+ "closed_qa": 0.01,
10
+ "extract": 0.02,
11
+ "reasoning_over_numerical_data": 0.032,
12
+ "multi-document_synthesis": 0.018,
13
+ "fact_checking_or_attributed_qa": 0.028,
14
+ "average": 0.0218,
15
+ "generation_rank": 33,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 32,
18
+ "rewrite_rank": 32,
19
+ "summarize_rank": 27,
20
+ "classify_rank": 30,
21
+ "closed_qa_rank": 34,
22
+ "extract_rank": 32,
23
+ "reasoning_over_numerical_data_rank": 33,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 27,
26
+ "average_rank": 34,
27
+ "generation_confi": "+1.0 / -0.8",
28
+ "open_qa_confi": "+7.8 / -7.8",
29
+ "brainstorm_confi": "+0.5 / -0.3",
30
+ "rewrite_confi": "+1.0 / -0.9",
31
+ "summarize_confi": "+1.0 / -0.5",
32
+ "classify_confi": "+2.5 / -2.0",
33
+ "closed_qa_confi": "+1.5 / -1.0",
34
+ "extract_confi": "+2.0 / -1.5",
35
+ "reasoning_over_numerical_data_confi": "+1.7 / -1.3",
36
+ "multi-document_synthesis_confi": "+1.3 / -1.1",
37
+ "fact_checking_or_attributed_qa_confi": "+1.5 / -1.3",
38
+ "average_confi": "+0.45 / -0.42"
39
+ }
temperature=0.0/tulu-2-dpo-13b.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-2-dpo-13b",
3
+ "generation": 0.124,
4
+ "open_qa": 0.358,
5
+ "brainstorm": 0.137,
6
+ "rewrite": 0.144,
7
+ "summarize": 0.171,
8
+ "classify": 0.244,
9
+ "closed_qa": 0.151,
10
+ "extract": 0.181,
11
+ "reasoning_over_numerical_data": 0.129,
12
+ "multi-document_synthesis": 0.095,
13
+ "fact_checking_or_attributed_qa": 0.161,
14
+ "average": 0.1481,
15
+ "generation_rank": 19,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 21,
18
+ "rewrite_rank": 20,
19
+ "summarize_rank": 18,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 18,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 16,
26
+ "average_rank": 20,
27
+ "generation_confi": "+2.7 / -2.6",
28
+ "open_qa_confi": "+9.3 / -8.8",
29
+ "brainstorm_confi": "+2.8 / -2.7",
30
+ "rewrite_confi": "+2.6 / -2.6",
31
+ "summarize_confi": "+5.2 / -5.0",
32
+ "classify_confi": "+6.0 / -5.7",
33
+ "closed_qa_confi": "+5.2 / -4.7",
34
+ "extract_confi": "+5.2 / -5.0",
35
+ "reasoning_over_numerical_data_confi": "+2.8 / -2.7",
36
+ "multi-document_synthesis_confi": "+2.9 / -2.6",
37
+ "fact_checking_or_attributed_qa_confi": "+3.4 / -3.2",
38
+ "average_confi": "+1.05 / -1.07"
39
+ }
temperature=0.0/tulu-2-dpo-70b.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-2-dpo-70b",
3
+ "generation": 0.211,
4
+ "open_qa": 0.328,
5
+ "brainstorm": 0.227,
6
+ "rewrite": 0.221,
7
+ "summarize": 0.235,
8
+ "classify": 0.281,
9
+ "closed_qa": 0.255,
10
+ "extract": 0.238,
11
+ "reasoning_over_numerical_data": 0.224,
12
+ "multi-document_synthesis": 0.188,
13
+ "fact_checking_or_attributed_qa": 0.229,
14
+ "average": 0.2267,
15
+ "generation_rank": 14,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 17,
18
+ "rewrite_rank": 14,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 5,
23
+ "reasoning_over_numerical_data_rank": 12,
24
+ "multi-document_synthesis_rank": 12,
25
+ "fact_checking_or_attributed_qa_rank": 8,
26
+ "average_rank": 14,
27
+ "generation_confi": "+3.2 / -3.2",
28
+ "open_qa_confi": "+9.3 / -8.3",
29
+ "brainstorm_confi": "+3.5 / -3.3",
30
+ "rewrite_confi": "+3.1 / -3.0",
31
+ "summarize_confi": "+6.2 / -5.7",
32
+ "classify_confi": "+6.0 / -6.0",
33
+ "closed_qa_confi": "+5.9 / -5.7",
34
+ "extract_confi": "+5.7 / -5.4",
35
+ "reasoning_over_numerical_data_confi": "+3.5 / -3.4",
36
+ "multi-document_synthesis_confi": "+3.8 / -3.5",
37
+ "fact_checking_or_attributed_qa_confi": "+4.0 / -3.7",
38
+ "average_confi": "+1.26 / -1.22"
39
+ }
temperature=0.0/tulu-2-dpo-7b.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-2-dpo-7b",
3
+ "generation": 0.1,
4
+ "open_qa": 0.284,
5
+ "brainstorm": 0.076,
6
+ "rewrite": 0.102,
7
+ "summarize": 0.119,
8
+ "classify": 0.164,
9
+ "closed_qa": 0.158,
10
+ "extract": 0.116,
11
+ "reasoning_over_numerical_data": 0.057,
12
+ "multi-document_synthesis": 0.062,
13
+ "fact_checking_or_attributed_qa": 0.117,
14
+ "average": 0.1012,
15
+ "generation_rank": 23,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 23,
18
+ "rewrite_rank": 20,
19
+ "summarize_rank": 23,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 22,
26
+ "average_rank": 24,
27
+ "generation_confi": "+2.5 / -2.3",
28
+ "open_qa_confi": "+8.8 / -8.8",
29
+ "brainstorm_confi": "+2.1 / -2.1",
30
+ "rewrite_confi": "+2.3 / -2.2",
31
+ "summarize_confi": "+4.5 / -4.5",
32
+ "classify_confi": "+5.0 / -5.0",
33
+ "closed_qa_confi": "+5.0 / -5.0",
34
+ "extract_confi": "+4.5 / -4.0",
35
+ "reasoning_over_numerical_data_confi": "+2.0 / -1.9",
36
+ "multi-document_synthesis_confi": "+2.2 / -2.2",
37
+ "fact_checking_or_attributed_qa_confi": "+3.0 / -2.8",
38
+ "average_confi": "+0.92 / -0.92"
39
+ }
temperature=0.0/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm",
3
+ "generation": 0.186,
4
+ "open_qa": 0.196,
5
+ "brainstorm": 0.387,
6
+ "rewrite": 0.177,
7
+ "summarize": 0.243,
8
+ "classify": 0.129,
9
+ "closed_qa": 0.05,
10
+ "extract": 0.111,
11
+ "reasoning_over_numerical_data": 0.126,
12
+ "multi-document_synthesis": 0.223,
13
+ "fact_checking_or_attributed_qa": 0.152,
14
+ "average": 0.1957,
15
+ "generation_rank": 14,
16
+ "open_qa_rank": 28,
17
+ "brainstorm_rank": 9,
18
+ "rewrite_rank": 14,
19
+ "summarize_rank": 9,
20
+ "classify_rank": 23,
21
+ "closed_qa_rank": 29,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 18,
24
+ "multi-document_synthesis_rank": 12,
25
+ "fact_checking_or_attributed_qa_rank": 16,
26
+ "average_rank": 17,
27
+ "generation_confi": "+3.2 / -3.0",
28
+ "open_qa_confi": "+7.8 / -6.9",
29
+ "brainstorm_confi": "+4.0 / -3.7",
30
+ "rewrite_confi": "+2.9 / -2.7",
31
+ "summarize_confi": "+5.9 / -5.9",
32
+ "classify_confi": "+4.7 / -4.5",
33
+ "closed_qa_confi": "+3.2 / -2.7",
34
+ "extract_confi": "+4.2 / -4.2",
35
+ "reasoning_over_numerical_data_confi": "+2.9 / -2.6",
36
+ "multi-document_synthesis_confi": "+4.0 / -4.0",
37
+ "fact_checking_or_attributed_qa_confi": "+3.5 / -3.0",
38
+ "average_confi": "+1.20 / -1.24"
39
+ }
temperature=0.0/vicuna-13b-v1.5.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "lmsys/vicuna-13b-v1.5",
3
+ "generation": 0.11,
4
+ "open_qa": 0.49,
5
+ "brainstorm": 0.075,
6
+ "rewrite": 0.129,
7
+ "summarize": 0.134,
8
+ "classify": 0.224,
9
+ "closed_qa": 0.25,
10
+ "extract": 0.173,
11
+ "reasoning_over_numerical_data": 0.11,
12
+ "multi-document_synthesis": 0.049,
13
+ "fact_checking_or_attributed_qa": 0.137,
14
+ "average": 0.1299,
15
+ "generation_rank": 19,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 23,
18
+ "rewrite_rank": 20,
19
+ "summarize_rank": 18,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 3,
22
+ "extract_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 18,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 22,
26
+ "average_rank": 22,
27
+ "generation_confi": "+2.7 / -2.5",
28
+ "open_qa_confi": "+9.3 / -9.3",
29
+ "brainstorm_confi": "+2.2 / -2.0",
30
+ "rewrite_confi": "+2.6 / -2.4",
31
+ "summarize_confi": "+5.0 / -4.5",
32
+ "classify_confi": "+5.7 / -5.5",
33
+ "closed_qa_confi": "+5.9 / -5.7",
34
+ "extract_confi": "+5.0 / -4.7",
35
+ "reasoning_over_numerical_data_confi": "+2.7 / -2.6",
36
+ "multi-document_synthesis_confi": "+2.2 / -1.8",
37
+ "fact_checking_or_attributed_qa_confi": "+3.0 / -3.0",
38
+ "average_confi": "+1.02 / -0.97"
39
+ }
temperature=0.0/vicuna-7b-v1.5.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "lmsys/vicuna-7b-v1.5",
3
+ "generation": 0.072,
4
+ "open_qa": 0.373,
5
+ "brainstorm": 0.051,
6
+ "rewrite": 0.096,
7
+ "summarize": 0.077,
8
+ "classify": 0.219,
9
+ "closed_qa": 0.213,
10
+ "extract": 0.126,
11
+ "reasoning_over_numerical_data": 0.074,
12
+ "multi-document_synthesis": 0.044,
13
+ "fact_checking_or_attributed_qa": 0.083,
14
+ "average": 0.095,
15
+ "generation_rank": 23,
16
+ "open_qa_rank": 8,
17
+ "brainstorm_rank": 23,
18
+ "rewrite_rank": 24,
19
+ "summarize_rank": 23,
20
+ "classify_rank": 12,
21
+ "closed_qa_rank": 20,
22
+ "extract_rank": 22,
23
+ "reasoning_over_numerical_data_rank": 23,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 22,
26
+ "average_rank": 24,
27
+ "generation_confi": "+2.2 / -2.0",
28
+ "open_qa_confi": "+9.3 / -8.8",
29
+ "brainstorm_confi": "+1.8 / -1.7",
30
+ "rewrite_confi": "+2.3 / -2.1",
31
+ "summarize_confi": "+3.7 / -3.5",
32
+ "classify_confi": "+5.7 / -5.7",
33
+ "closed_qa_confi": "+5.7 / -5.7",
34
+ "extract_confi": "+4.7 / -4.2",
35
+ "reasoning_over_numerical_data_confi": "+2.3 / -2.1",
36
+ "multi-document_synthesis_confi": "+2.0 / -1.8",
37
+ "fact_checking_or_attributed_qa_confi": "+2.6 / -2.4",
38
+ "average_confi": "+0.88 / -0.86"
39
+ }