| [ | |
| { | |
| "loss": 1.7883309326171875, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "epoch": 0.2222222222222222, | |
| "total_flos": 4360067088384000, | |
| "step": 500 | |
| }, | |
| { | |
| "loss": 1.5973499755859375, | |
| "learning_rate": 3.888888888888889e-05, | |
| "epoch": 0.4444444444444444, | |
| "total_flos": 8720134176768000, | |
| "step": 1000 | |
| }, | |
| { | |
| "loss": 1.551248291015625, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "epoch": 0.6666666666666666, | |
| "total_flos": 13080201265152000, | |
| "step": 1500 | |
| }, | |
| { | |
| "loss": 1.51922998046875, | |
| "learning_rate": 2.777777777777778e-05, | |
| "epoch": 0.8888888888888888, | |
| "total_flos": 17440268353536000, | |
| "step": 2000 | |
| }, | |
| { | |
| "loss": 1.4523193359375, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "epoch": 1.1111111111111112, | |
| "total_flos": 21800335441920000, | |
| "step": 2500 | |
| }, | |
| { | |
| "loss": 1.389939453125, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "epoch": 1.3333333333333333, | |
| "total_flos": 26160402530304000, | |
| "step": 3000 | |
| }, | |
| { | |
| "loss": 1.3925107421875, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "epoch": 1.5555555555555556, | |
| "total_flos": 30520469618688000, | |
| "step": 3500 | |
| }, | |
| { | |
| "loss": 1.36883984375, | |
| "learning_rate": 5.555555555555556e-06, | |
| "epoch": 1.7777777777777777, | |
| "total_flos": 34880536707072000, | |
| "step": 4000 | |
| }, | |
| { | |
| "loss": 1.3849873046875, | |
| "learning_rate": 0.0, | |
| "epoch": 2.0, | |
| "total_flos": 39240603795456000, | |
| "step": 4500 | |
| } | |
| ] |