PyTorch
llama
alignment-handbook
Generated from Trainer
File size: 771 Bytes
93df259
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
    "epoch": 1.0,
    "eval_logits/chosen": -0.5580978393554688,
    "eval_logits/rejected": -0.579554557800293,
    "eval_logps/chosen": -453.0203552246094,
    "eval_logps/rejected": -622.4476928710938,
    "eval_loss": 0.42684775590896606,
    "eval_rewards/accuracies": 0.8410714268684387,
    "eval_rewards/chosen": -2.0037572383880615,
    "eval_rewards/margins": 1.612953782081604,
    "eval_rewards/rejected": -3.616710901260376,
    "eval_runtime": 203.0312,
    "eval_samples": 4461,
    "eval_samples_per_second": 21.972,
    "eval_steps_per_second": 0.345,
    "total_flos": 0.0,
    "train_loss": 0.49609584714538074,
    "train_runtime": 16148.8615,
    "train_samples": 133368,
    "train_samples_per_second": 8.259,
    "train_steps_per_second": 0.258
}