elselse commited on
Commit
d6eecb1
·
verified ·
1 Parent(s): a9849ec

CIRCL/cwe-parent-vulnerability-classification-roberta-base

Browse files
Files changed (4) hide show
  1. README.md +43 -43
  2. config.json +52 -52
  3. emissions.csv +1 -1
  4. metrics.json +6 -6
README.md CHANGED
@@ -18,9 +18,9 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model is a fine-tuned version of [roberta-base](https://huggingface.co/roberta-base) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 1.5419
22
- - Accuracy: 0.8068
23
- - F1 Macro: 0.3782
24
 
25
  ## Model description
26
 
@@ -51,46 +51,46 @@ The following hyperparameters were used during training:
51
 
52
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 Macro |
53
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|
54
- | 3.2546 | 1.0 | 25 | 3.1526 | 0.0682 | 0.0142 |
55
- | 3.1769 | 2.0 | 50 | 2.9132 | 0.0795 | 0.0185 |
56
- | 3.1105 | 3.0 | 75 | 2.8945 | 0.5 | 0.0752 |
57
- | 3.0319 | 4.0 | 100 | 2.7792 | 0.0114 | 0.0027 |
58
- | 3.0703 | 5.0 | 125 | 2.8108 | 0.0455 | 0.0130 |
59
- | 2.9543 | 6.0 | 150 | 2.8282 | 0.0682 | 0.0235 |
60
- | 2.9445 | 7.0 | 175 | 2.7967 | 0.125 | 0.0743 |
61
- | 2.8502 | 8.0 | 200 | 2.7834 | 0.1818 | 0.0913 |
62
- | 2.7131 | 9.0 | 225 | 2.6824 | 0.4205 | 0.1350 |
63
- | 2.6082 | 10.0 | 250 | 2.6462 | 0.5227 | 0.1447 |
64
- | 2.6341 | 11.0 | 275 | 2.6106 | 0.4545 | 0.1765 |
65
- | 2.3253 | 12.0 | 300 | 2.5876 | 0.5455 | 0.1611 |
66
- | 2.2511 | 13.0 | 325 | 2.6247 | 0.4773 | 0.1580 |
67
- | 2.2056 | 14.0 | 350 | 2.5191 | 0.6136 | 0.1808 |
68
- | 2.0981 | 15.0 | 375 | 2.3928 | 0.625 | 0.2183 |
69
- | 1.8694 | 16.0 | 400 | 2.3963 | 0.6818 | 0.2353 |
70
- | 1.7945 | 17.0 | 425 | 2.2359 | 0.6818 | 0.2290 |
71
- | 1.7152 | 18.0 | 450 | 2.2076 | 0.7159 | 0.2540 |
72
- | 1.6186 | 19.0 | 475 | 2.1035 | 0.7045 | 0.2388 |
73
- | 1.4477 | 20.0 | 500 | 2.0271 | 0.6932 | 0.2464 |
74
- | 1.4064 | 21.0 | 525 | 1.9818 | 0.7159 | 0.2478 |
75
- | 1.2211 | 22.0 | 550 | 1.8832 | 0.7159 | 0.2517 |
76
- | 1.2831 | 23.0 | 575 | 1.8892 | 0.7273 | 0.2712 |
77
- | 1.1426 | 24.0 | 600 | 1.7992 | 0.7273 | 0.2644 |
78
- | 1.054 | 25.0 | 625 | 1.8517 | 0.7386 | 0.2726 |
79
- | 1.0345 | 26.0 | 650 | 1.7283 | 0.7273 | 0.2644 |
80
- | 0.9516 | 27.0 | 675 | 1.7043 | 0.7045 | 0.2630 |
81
- | 0.8861 | 28.0 | 700 | 1.6532 | 0.7386 | 0.2735 |
82
- | 0.8477 | 29.0 | 725 | 1.6508 | 0.7614 | 0.2795 |
83
- | 0.8804 | 30.0 | 750 | 1.6057 | 0.7386 | 0.2612 |
84
- | 0.7854 | 31.0 | 775 | 1.5771 | 0.75 | 0.2902 |
85
- | 0.7311 | 32.0 | 800 | 1.5838 | 0.7614 | 0.2662 |
86
- | 0.7362 | 33.0 | 825 | 1.5649 | 0.7841 | 0.3463 |
87
- | 0.7031 | 34.0 | 850 | 1.5553 | 0.7841 | 0.3361 |
88
- | 0.7589 | 35.0 | 875 | 1.5546 | 0.7955 | 0.3545 |
89
- | 0.6877 | 36.0 | 900 | 1.5557 | 0.7841 | 0.3361 |
90
- | 0.6497 | 37.0 | 925 | 1.5419 | 0.8068 | 0.3782 |
91
- | 0.6565 | 38.0 | 950 | 1.5496 | 0.7955 | 0.3663 |
92
- | 0.6333 | 39.0 | 975 | 1.5531 | 0.7955 | 0.3458 |
93
- | 0.653 | 40.0 | 1000 | 1.5501 | 0.7955 | 0.3458 |
94
 
95
 
96
  ### Framework versions
 
18
 
19
  This model is a fine-tuned version of [roberta-base](https://huggingface.co/roberta-base) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 1.2078
22
+ - Accuracy: 0.875
23
+ - F1 Macro: 0.6248
24
 
25
  ## Model description
26
 
 
51
 
52
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 Macro |
53
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|
54
+ | 3.2699 | 1.0 | 25 | 3.1492 | 0.0341 | 0.0055 |
55
+ | 3.1972 | 2.0 | 50 | 2.9909 | 0.0114 | 0.0064 |
56
+ | 3.1211 | 3.0 | 75 | 3.0017 | 0.0341 | 0.0140 |
57
+ | 3.0888 | 4.0 | 100 | 3.0223 | 0.2841 | 0.0463 |
58
+ | 2.9467 | 5.0 | 125 | 2.9608 | 0.0114 | 0.0018 |
59
+ | 2.9851 | 6.0 | 150 | 2.8743 | 0.1932 | 0.0641 |
60
+ | 2.9083 | 7.0 | 175 | 2.7687 | 0.375 | 0.0963 |
61
+ | 2.7652 | 8.0 | 200 | 2.7049 | 0.4318 | 0.1953 |
62
+ | 2.6893 | 9.0 | 225 | 2.5547 | 0.4886 | 0.1952 |
63
+ | 2.5636 | 10.0 | 250 | 2.4970 | 0.5682 | 0.3314 |
64
+ | 2.477 | 11.0 | 275 | 2.3499 | 0.6136 | 0.3790 |
65
+ | 2.2936 | 12.0 | 300 | 2.2659 | 0.6364 | 0.3949 |
66
+ | 2.1369 | 13.0 | 325 | 2.1758 | 0.625 | 0.4002 |
67
+ | 2.0615 | 14.0 | 350 | 2.1015 | 0.6477 | 0.4169 |
68
+ | 1.9548 | 15.0 | 375 | 1.9444 | 0.6932 | 0.3972 |
69
+ | 1.7943 | 16.0 | 400 | 1.8892 | 0.6818 | 0.4210 |
70
+ | 1.6619 | 17.0 | 425 | 1.8439 | 0.6818 | 0.4149 |
71
+ | 1.5391 | 18.0 | 450 | 1.7247 | 0.7159 | 0.4848 |
72
+ | 1.4415 | 19.0 | 475 | 1.6650 | 0.7273 | 0.4749 |
73
+ | 1.2834 | 20.0 | 500 | 1.5743 | 0.7727 | 0.5574 |
74
+ | 1.2245 | 21.0 | 525 | 1.5396 | 0.7614 | 0.5373 |
75
+ | 1.1629 | 22.0 | 550 | 1.5005 | 0.7614 | 0.5350 |
76
+ | 1.0894 | 23.0 | 575 | 1.4478 | 0.7614 | 0.5383 |
77
+ | 0.9755 | 24.0 | 600 | 1.4335 | 0.7841 | 0.5599 |
78
+ | 0.9271 | 25.0 | 625 | 1.4195 | 0.7841 | 0.5562 |
79
+ | 0.8761 | 26.0 | 650 | 1.3740 | 0.8182 | 0.6015 |
80
+ | 0.8312 | 27.0 | 675 | 1.3479 | 0.8295 | 0.6086 |
81
+ | 0.7523 | 28.0 | 700 | 1.3379 | 0.8295 | 0.5948 |
82
+ | 0.718 | 29.0 | 725 | 1.2991 | 0.8295 | 0.5948 |
83
+ | 0.6819 | 30.0 | 750 | 1.3059 | 0.8409 | 0.6047 |
84
+ | 0.6771 | 31.0 | 775 | 1.2650 | 0.8636 | 0.6167 |
85
+ | 0.6267 | 32.0 | 800 | 1.2905 | 0.8523 | 0.6252 |
86
+ | 0.6068 | 33.0 | 825 | 1.2559 | 0.875 | 0.6248 |
87
+ | 0.5811 | 34.0 | 850 | 1.2371 | 0.875 | 0.6248 |
88
+ | 0.5579 | 35.0 | 875 | 1.2231 | 0.875 | 0.6248 |
89
+ | 0.5385 | 36.0 | 900 | 1.2342 | 0.875 | 0.6248 |
90
+ | 0.5334 | 37.0 | 925 | 1.2255 | 0.875 | 0.6248 |
91
+ | 0.4868 | 38.0 | 950 | 1.2223 | 0.875 | 0.6248 |
92
+ | 0.5228 | 39.0 | 975 | 1.2078 | 0.875 | 0.6248 |
93
+ | 0.5325 | 40.0 | 1000 | 1.2101 | 0.875 | 0.6248 |
94
 
95
 
96
  ### Framework versions
config.json CHANGED
@@ -10,62 +10,62 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "LABEL_0",
14
- "1": "LABEL_1",
15
- "2": "LABEL_2",
16
- "3": "LABEL_3",
17
- "4": "LABEL_4",
18
- "5": "LABEL_5",
19
- "6": "LABEL_6",
20
- "7": "LABEL_7",
21
- "8": "LABEL_8",
22
- "9": "LABEL_9",
23
- "10": "LABEL_10",
24
- "11": "LABEL_11",
25
- "12": "LABEL_12",
26
- "13": "LABEL_13",
27
- "14": "LABEL_14",
28
- "15": "LABEL_15",
29
- "16": "LABEL_16",
30
- "17": "LABEL_17",
31
- "18": "LABEL_18",
32
- "19": "LABEL_19",
33
- "20": "LABEL_20",
34
- "21": "LABEL_21",
35
- "22": "LABEL_22",
36
- "23": "LABEL_23",
37
- "24": "LABEL_24",
38
- "25": "LABEL_25"
39
  },
40
  "initializer_range": 0.02,
41
  "intermediate_size": 3072,
42
  "label2id": {
43
- "LABEL_0": 0,
44
- "LABEL_1": 1,
45
- "LABEL_10": 10,
46
- "LABEL_11": 11,
47
- "LABEL_12": 12,
48
- "LABEL_13": 13,
49
- "LABEL_14": 14,
50
- "LABEL_15": 15,
51
- "LABEL_16": 16,
52
- "LABEL_17": 17,
53
- "LABEL_18": 18,
54
- "LABEL_19": 19,
55
- "LABEL_2": 2,
56
- "LABEL_20": 20,
57
- "LABEL_21": 21,
58
- "LABEL_22": 22,
59
- "LABEL_23": 23,
60
- "LABEL_24": 24,
61
- "LABEL_25": 25,
62
- "LABEL_3": 3,
63
- "LABEL_4": 4,
64
- "LABEL_5": 5,
65
- "LABEL_6": 6,
66
- "LABEL_7": 7,
67
- "LABEL_8": 8,
68
- "LABEL_9": 9
69
  },
70
  "layer_norm_eps": 1e-05,
71
  "max_position_embeddings": 514,
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "1025",
14
+ "1": "1071",
15
+ "2": "131",
16
+ "3": "138",
17
+ "4": "284",
18
+ "5": "285",
19
+ "6": "435",
20
+ "7": "436",
21
+ "8": "595",
22
+ "9": "657",
23
+ "10": "664",
24
+ "11": "682",
25
+ "12": "684",
26
+ "13": "691",
27
+ "14": "693",
28
+ "15": "697",
29
+ "16": "703",
30
+ "17": "706",
31
+ "18": "707",
32
+ "19": "710",
33
+ "20": "74",
34
+ "21": "754",
35
+ "22": "829",
36
+ "23": "862",
37
+ "24": "913",
38
+ "25": "94"
39
  },
40
  "initializer_range": 0.02,
41
  "intermediate_size": 3072,
42
  "label2id": {
43
+ "1025": 0,
44
+ "1071": 1,
45
+ "131": 2,
46
+ "138": 3,
47
+ "284": 4,
48
+ "285": 5,
49
+ "435": 6,
50
+ "436": 7,
51
+ "595": 8,
52
+ "657": 9,
53
+ "664": 10,
54
+ "682": 11,
55
+ "684": 12,
56
+ "691": 13,
57
+ "693": 14,
58
+ "697": 15,
59
+ "703": 16,
60
+ "706": 17,
61
+ "707": 18,
62
+ "710": 19,
63
+ "74": 20,
64
+ "754": 21,
65
+ "829": 22,
66
+ "862": 23,
67
+ "913": 24,
68
+ "94": 25
69
  },
70
  "layer_norm_eps": 1e-05,
71
  "max_position_embeddings": 514,
emissions.csv CHANGED
@@ -1,2 +1,2 @@
1
  timestamp,project_name,run_id,experiment_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue
2
- 2025-09-03T12:17:12,codecarbon,0159c288-82a1-46d0-9f6f-69a628c2f5b5,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,374.19489855505526,0.006155503634068279,1.64499934601931e-05,42.5,344.38833905205155,94.34468507766725,0.004413893506312161,0.044265694023621904,0.009797784709772923,0.058477372239707004,Luxembourg,LUX,luxembourg,,,Linux-6.8.0-71-generic-x86_64-with-glibc2.39,3.12.3,2.8.4,64,AMD EPYC 9124 16-Core Processor,2,2 x NVIDIA L40S,6.1294,49.6113,251.5858268737793,machine,N,1.0
 
1
  timestamp,project_name,run_id,experiment_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue
2
+ 2025-09-03T13:07:51,codecarbon,a4931482-bbba-40f5-bb09-0c95cf8cc22d,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,381.010146320099,0.006199580425386055,1.6271431312954028e-05,42.5,399.8939178594448,94.34468507766725,0.004493555260882164,0.04442788498671746,0.009974662137721463,0.05889610238532109,Luxembourg,LUX,luxembourg,,,Linux-6.8.0-71-generic-x86_64-with-glibc2.39,3.12.3,2.8.4,64,AMD EPYC 9124 16-Core Processor,2,2 x NVIDIA L40S,6.1294,49.6113,251.5858268737793,machine,N,1.0
metrics.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "eval_loss": 1.541908621788025,
3
- "eval_accuracy": 0.8068181818181818,
4
- "eval_f1_macro": 0.37820512820512825,
5
- "eval_runtime": 0.2819,
6
- "eval_samples_per_second": 312.153,
7
- "eval_steps_per_second": 10.642,
8
  "epoch": 40.0
9
  }
 
1
  {
2
+ "eval_loss": 1.2077823877334595,
3
+ "eval_accuracy": 0.875,
4
+ "eval_f1_macro": 0.6247628726287263,
5
+ "eval_runtime": 0.2845,
6
+ "eval_samples_per_second": 309.345,
7
+ "eval_steps_per_second": 10.546,
8
  "epoch": 40.0
9
  }