bioner_gnormplus / performance_report.json
jakelever's picture
Upload folder using huggingface_hub
272cb7c verified
{
"train": {
"token_level": {
"O": {
"precision": 0.9992820624388121,
"recall": 0.9996517802733524,
"f1-score": 0.9994668871650365,
"support": 45948.0
},
"B-DomainMotif": {
"precision": 0.9736842105263158,
"recall": 0.9487179487179487,
"f1-score": 0.961038961038961,
"support": 195.0
},
"I-DomainMotif": {
"precision": 0.9962406015037594,
"recall": 0.9888059701492538,
"f1-score": 0.9925093632958801,
"support": 536.0
},
"B-FamilyName": {
"precision": 0.9926289926289926,
"recall": 0.9901960784313726,
"f1-score": 0.9914110429447853,
"support": 816.0
},
"I-FamilyName": {
"precision": 0.9940054495912807,
"recall": 0.9972662657189721,
"f1-score": 0.9956331877729258,
"support": 1829.0
},
"B-Gene": {
"precision": 0.9959641255605381,
"recall": 0.991960696739616,
"f1-score": 0.993958379950772,
"support": 2239.0
},
"I-Gene": {
"precision": 0.9986388384754991,
"recall": 0.9979596463386987,
"f1-score": 0.9982991268851343,
"support": 4411.0
},
"accuracy": 0.9987136884982313,
"macro avg": {
"precision": 0.9929206115321711,
"recall": 0.987794055195602,
"f1-score": 0.9903309927219279,
"support": 55974.0
},
"weighted avg": {
"precision": 0.9987109444979878,
"recall": 0.9987136884982313,
"f1-score": 0.9987113109741671,
"support": 55974.0
}
},
"span_level": {
"DomainMotif": {
"precision": 0.9067357512953368,
"recall": 0.8883248730964467,
"f1-score": 0.8974358974358975,
"support": 197
},
"FamilyName": {
"precision": 0.9755799755799756,
"recall": 0.9803680981595092,
"f1-score": 0.97796817625459,
"support": 815
},
"Gene": {
"precision": 0.9882671480144405,
"recall": 0.9763709317877842,
"f1-score": 0.9822830230993497,
"support": 2243
},
"macro avg": {
"precision": 0.9568609582965842,
"recall": 0.9483546343479133,
"f1-score": 0.9525623655966124,
"support": 3255
},
"weighted avg": {
"precision": 0.9801560172347932,
"recall": 0.9720430107526882,
"f1-score": 0.9760675134421517,
"support": 3255
}
}
},
"val": {
"token_level": {
"O": {
"precision": 0.982154530003922,
"recall": 0.982796964939822,
"f1-score": 0.9824756424507944,
"support": 15288.0
},
"B-DomainMotif": {
"precision": 0.7536231884057971,
"recall": 0.6419753086419753,
"f1-score": 0.6933333333333334,
"support": 81.0
},
"I-DomainMotif": {
"precision": 0.8457142857142858,
"recall": 0.714975845410628,
"f1-score": 0.774869109947644,
"support": 207.0
},
"B-FamilyName": {
"precision": 0.6816479400749064,
"recall": 0.6086956521739131,
"f1-score": 0.6431095406360424,
"support": 299.0
},
"I-FamilyName": {
"precision": 0.7571428571428571,
"recall": 0.6656200941915228,
"f1-score": 0.70843776106934,
"support": 637.0
},
"B-Gene": {
"precision": 0.8721071863580999,
"recall": 0.9384010484927916,
"f1-score": 0.9040404040404041,
"support": 763.0
},
"I-Gene": {
"precision": 0.8863487916394513,
"recall": 0.9384508990318119,
"f1-score": 0.9116560295599597,
"support": 1446.0
},
"accuracy": 0.9563591688478179,
"macro avg": {
"precision": 0.8255341113341885,
"recall": 0.784416544697495,
"f1-score": 0.8025602601482168,
"support": 18721.0
},
"weighted avg": {
"precision": 0.9553162576832412,
"recall": 0.9563591688478179,
"f1-score": 0.9555177384234166,
"support": 18721.0
}
},
"span_level": {
"DomainMotif": {
"precision": 0.6133333333333333,
"recall": 0.5679012345679012,
"f1-score": 0.5897435897435898,
"support": 81
},
"FamilyName": {
"precision": 0.5387323943661971,
"recall": 0.5134228187919463,
"f1-score": 0.5257731958762886,
"support": 298
},
"Gene": {
"precision": 0.8486682808716707,
"recall": 0.9127604166666666,
"f1-score": 0.8795483061480552,
"support": 768
},
"macro avg": {
"precision": 0.6669113361904003,
"recall": 0.6646948233421713,
"f1-score": 0.6650216972559778,
"support": 1147
},
"weighted avg": {
"precision": 0.7515252774460068,
"recall": 0.7846556233653008,
"f1-score": 0.7671689121726863,
"support": 1147
}
}
},
"test": {
"token_level": {
"O": {
"precision": 0.9869390488948426,
"recall": 0.9731650937657508,
"f1-score": 0.9800036754732172,
"support": 57537.0
},
"B-DomainMotif": {
"precision": 0.7365591397849462,
"recall": 0.7611111111111111,
"f1-score": 0.7486338797814208,
"support": 360.0
},
"I-DomainMotif": {
"precision": 0.8471337579617835,
"recall": 0.81511746680286,
"f1-score": 0.8308172826652785,
"support": 979.0
},
"B-FamilyName": {
"precision": 0.6186627479794269,
"recall": 0.6736,
"f1-score": 0.6449636154729989,
"support": 1250.0
},
"I-FamilyName": {
"precision": 0.6251338807568726,
"recall": 0.705763804917372,
"f1-score": 0.6630064369556986,
"support": 2481.0
},
"B-Gene": {
"precision": 0.8868312757201646,
"recall": 0.9337666357164964,
"f1-score": 0.9096939544700738,
"support": 3231.0
},
"I-Gene": {
"precision": 0.8947873361048929,
"recall": 0.9283344392833444,
"f1-score": 0.9112522390490149,
"support": 6028.0
},
"accuracy": 0.9499763448640526,
"macro avg": {
"precision": 0.7994353124575614,
"recall": 0.8272655073709906,
"f1-score": 0.8126244405525289,
"support": 71866.0
},
"weighted avg": {
"precision": 0.9526540061037759,
"recall": 0.9499763448640526,
"f1-score": 0.9511135021493018,
"support": 71866.0
}
},
"span_level": {
"DomainMotif": {
"precision": 0.6019900497512438,
"recall": 0.6703601108033241,
"f1-score": 0.6343381389252949,
"support": 361
},
"FamilyName": {
"precision": 0.4965083798882682,
"recall": 0.5688,
"f1-score": 0.5302013422818792,
"support": 1250
},
"Gene": {
"precision": 0.8559102674719585,
"recall": 0.9227906976744186,
"f1-score": 0.8880931065353624,
"support": 3225
},
"macro avg": {
"precision": 0.6514695657038235,
"recall": 0.7206502694925808,
"f1-score": 0.6842108625808455,
"support": 4836
},
"weighted avg": {
"precision": 0.7440580015338297,
"recall": 0.8124483043837882,
"f1-score": 0.7766435100456833,
"support": 4836
}
}
}
}