PyTorch
llama
alignment-handbook
Generated from Trainer
File size: 769 Bytes
a981c20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
    "epoch": 1.0,
    "eval_logits/chosen": -1.203917384147644,
    "eval_logits/rejected": -1.191305160522461,
    "eval_logps/chosen": -463.00738525390625,
    "eval_logps/rejected": -795.7894897460938,
    "eval_loss": 0.4001549482345581,
    "eval_rewards/accuracies": 0.8553571701049805,
    "eval_rewards/chosen": -2.2448642253875732,
    "eval_rewards/margins": 3.252180576324463,
    "eval_rewards/rejected": -5.497044563293457,
    "eval_runtime": 203.3522,
    "eval_samples": 4461,
    "eval_samples_per_second": 21.937,
    "eval_steps_per_second": 0.344,
    "total_flos": 0.0,
    "train_loss": 0.4717116741438516,
    "train_runtime": 20989.4384,
    "train_samples": 133368,
    "train_samples_per_second": 6.354,
    "train_steps_per_second": 0.199
}