File size: 233 Bytes
3ef7666 |
1 2 3 4 5 6 7 8 9 |
{
"llama": {
"bias": 4.837947675026953,
"position_coeff": -0.014912999235093592,
"token_coeff": 0.0017892057076102294,
"score": 0.00015952710686484295,
"model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct"
}
} |