ThomasTheMaker commited on
Commit
c17aa30
·
verified ·
1 Parent(s): d81cb79

Upload results_2025-05-11T02-56-53.179102.json with huggingface_hub

Browse files
results_2025-05-11T02-56-53.179102.json ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "?",
4
+ "num_fewshot_seeds": 1,
5
+ "max_samples": null,
6
+ "job_id": 0,
7
+ "start_time": 3333.242390676,
8
+ "end_time": 3866.725562865,
9
+ "total_evaluation_time_secondes": "533.483172189",
10
+ "model_name": "allenai/OLMo-2-0425-1B",
11
+ "model_sha": "1352aa4026bfcdea35bc2397d5c0f1c138970593",
12
+ "model_dtype": null,
13
+ "model_size": "5.53 GB",
14
+ "generation_parameters": {
15
+ "early_stopping": null,
16
+ "repetition_penalty": null,
17
+ "frequency_penalty": null,
18
+ "length_penalty": null,
19
+ "presence_penalty": null,
20
+ "max_new_tokens": null,
21
+ "min_new_tokens": null,
22
+ "seed": null,
23
+ "stop_tokens": null,
24
+ "temperature": null,
25
+ "top_k": null,
26
+ "min_p": null,
27
+ "top_p": null,
28
+ "truncate_prompt": null,
29
+ "response_format": null
30
+ }
31
+ },
32
+ "results": {
33
+ "leaderboard|truthfulqa:mc|0": {
34
+ "truthfulqa_mc1": 0.23133414932680538,
35
+ "truthfulqa_mc1_stderr": 0.014761945174862673,
36
+ "truthfulqa_mc2": 0.3685296588302264,
37
+ "truthfulqa_mc2_stderr": 0.01359981212303006
38
+ },
39
+ "all": {
40
+ "truthfulqa_mc1": 0.23133414932680538,
41
+ "truthfulqa_mc1_stderr": 0.014761945174862673,
42
+ "truthfulqa_mc2": 0.3685296588302264,
43
+ "truthfulqa_mc2_stderr": 0.01359981212303006
44
+ }
45
+ },
46
+ "versions": {
47
+ "leaderboard|truthfulqa:mc|0": 0
48
+ },
49
+ "config_tasks": {
50
+ "leaderboard|truthfulqa:mc": {
51
+ "name": "truthfulqa:mc",
52
+ "prompt_function": "truthful_qa_multiple_choice",
53
+ "hf_repo": "truthful_qa",
54
+ "hf_subset": "multiple_choice",
55
+ "metric": [
56
+ {
57
+ "metric_name": [
58
+ "truthfulqa_mc1",
59
+ "truthfulqa_mc2"
60
+ ],
61
+ "higher_is_better": {
62
+ "truthfulqa_mc1": true,
63
+ "truthfulqa_mc2": true
64
+ },
65
+ "category": "8",
66
+ "use_case": "1",
67
+ "sample_level_fn": "truthfulqa_mc_metrics",
68
+ "corpus_level_fn": {
69
+ "truthfulqa_mc1": "mean",
70
+ "truthfulqa_mc2": "mean"
71
+ }
72
+ }
73
+ ],
74
+ "hf_revision": null,
75
+ "hf_filter": null,
76
+ "hf_avail_splits": [
77
+ "validation"
78
+ ],
79
+ "trust_dataset": true,
80
+ "evaluation_splits": [
81
+ "validation"
82
+ ],
83
+ "few_shots_split": null,
84
+ "few_shots_select": null,
85
+ "generation_size": -1,
86
+ "generation_grammar": null,
87
+ "stop_sequence": [
88
+ "\n"
89
+ ],
90
+ "num_samples": null,
91
+ "suite": [
92
+ "leaderboard"
93
+ ],
94
+ "original_num_docs": 817,
95
+ "effective_num_docs": 817,
96
+ "must_remove_duplicate_docs": false,
97
+ "version": 0
98
+ }
99
+ },
100
+ "summary_tasks": {
101
+ "leaderboard|truthfulqa:mc|0": {
102
+ "hashes": {
103
+ "hash_examples": "36a6d90e75d92d4a",
104
+ "hash_full_prompts": "36a6d90e75d92d4a",
105
+ "hash_input_tokens": "404fdea99381fcbe",
106
+ "hash_cont_tokens": "cc3f0628ee1c9d08"
107
+ },
108
+ "truncated": 0,
109
+ "non_truncated": 817,
110
+ "padded": 9361,
111
+ "non_padded": 635,
112
+ "effective_few_shots": 0.0,
113
+ "num_truncated_few_shots": 0
114
+ }
115
+ },
116
+ "summary_general": {
117
+ "hashes": {
118
+ "hash_examples": "aed1dfc67e53d0f2",
119
+ "hash_full_prompts": "aed1dfc67e53d0f2",
120
+ "hash_input_tokens": "2b653e3dd18a0749",
121
+ "hash_cont_tokens": "ca187de990176e1e"
122
+ },
123
+ "truncated": 0,
124
+ "non_truncated": 817,
125
+ "padded": 9361,
126
+ "non_padded": 635,
127
+ "num_truncated_few_shots": 0
128
+ }
129
+ }