| { | |
| "best_metric": 1.1455323696136475, | |
| "best_model_checkpoint": "./checkpoints/t5-large/checkpoint-13314", | |
| "epoch": 14.0, | |
| "global_step": 13314, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.5e-06, | |
| "loss": 12.6774, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5e-06, | |
| "loss": 5.0181, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.930840717527556e-06, | |
| "loss": 1.8667, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.3393584489822388, | |
| "eval_runtime": 7.0532, | |
| "eval_samples_per_second": 70.89, | |
| "eval_steps_per_second": 8.932, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.8616814350551114e-06, | |
| "loss": 1.5018, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.792522152582667e-06, | |
| "loss": 1.343, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.723362870110223e-06, | |
| "loss": 1.334, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.654203587637779e-06, | |
| "loss": 1.2965, | |
| "step": 1792 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.22269606590271, | |
| "eval_runtime": 7.1665, | |
| "eval_samples_per_second": 69.77, | |
| "eval_steps_per_second": 8.791, | |
| "step": 1902 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.585044305165334e-06, | |
| "loss": 1.2232, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.51588502269289e-06, | |
| "loss": 1.2218, | |
| "step": 2304 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 4.446725740220445e-06, | |
| "loss": 1.2472, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 4.377566457748001e-06, | |
| "loss": 1.2539, | |
| "step": 2816 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.1890126466751099, | |
| "eval_runtime": 7.0492, | |
| "eval_samples_per_second": 70.93, | |
| "eval_steps_per_second": 8.937, | |
| "step": 2853 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 4.308407175275557e-06, | |
| "loss": 1.215, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 4.239247892803113e-06, | |
| "loss": 1.1736, | |
| "step": 3328 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.170088610330668e-06, | |
| "loss": 1.1738, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.1715768575668335, | |
| "eval_runtime": 6.9966, | |
| "eval_samples_per_second": 71.463, | |
| "eval_steps_per_second": 9.004, | |
| "step": 3804 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 4.100929327858224e-06, | |
| "loss": 1.2254, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 4.031770045385779e-06, | |
| "loss": 1.2022, | |
| "step": 4096 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 3.962610762913335e-06, | |
| "loss": 1.1542, | |
| "step": 4352 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 3.893451480440891e-06, | |
| "loss": 1.1795, | |
| "step": 4608 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 1.162368655204773, | |
| "eval_runtime": 6.9995, | |
| "eval_samples_per_second": 71.433, | |
| "eval_steps_per_second": 9.001, | |
| "step": 4755 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 3.824292197968447e-06, | |
| "loss": 1.1573, | |
| "step": 4864 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 3.755132915496002e-06, | |
| "loss": 1.1332, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 3.685973633023558e-06, | |
| "loss": 1.1795, | |
| "step": 5376 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 3.616814350551113e-06, | |
| "loss": 1.1308, | |
| "step": 5632 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_loss": 1.1566176414489746, | |
| "eval_runtime": 7.0064, | |
| "eval_samples_per_second": 71.364, | |
| "eval_steps_per_second": 8.992, | |
| "step": 5706 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 3.547655068078669e-06, | |
| "loss": 1.1676, | |
| "step": 5888 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 3.4784957856062245e-06, | |
| "loss": 1.1117, | |
| "step": 6144 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 3.4093365031337805e-06, | |
| "loss": 1.1289, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 3.340177220661336e-06, | |
| "loss": 1.1195, | |
| "step": 6656 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_loss": 1.1531755924224854, | |
| "eval_runtime": 6.9771, | |
| "eval_samples_per_second": 71.663, | |
| "eval_steps_per_second": 9.03, | |
| "step": 6657 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 3.271017938188892e-06, | |
| "loss": 1.1091, | |
| "step": 6912 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 3.201858655716447e-06, | |
| "loss": 1.1003, | |
| "step": 7168 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 3.132699373244003e-06, | |
| "loss": 1.1075, | |
| "step": 7424 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_loss": 1.1493767499923706, | |
| "eval_runtime": 6.9872, | |
| "eval_samples_per_second": 71.56, | |
| "eval_steps_per_second": 9.017, | |
| "step": 7608 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 3.0635400907715584e-06, | |
| "loss": 1.1445, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 2.9943808082991144e-06, | |
| "loss": 1.0677, | |
| "step": 7936 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 2.92522152582667e-06, | |
| "loss": 1.1393, | |
| "step": 8192 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 2.856062243354226e-06, | |
| "loss": 1.0989, | |
| "step": 8448 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 1.1490625143051147, | |
| "eval_runtime": 7.0593, | |
| "eval_samples_per_second": 70.828, | |
| "eval_steps_per_second": 8.924, | |
| "step": 8559 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 2.786902960881781e-06, | |
| "loss": 1.0875, | |
| "step": 8704 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 2.717743678409337e-06, | |
| "loss": 1.0832, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 2.6485843959368923e-06, | |
| "loss": 1.0572, | |
| "step": 9216 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 2.5794251134644482e-06, | |
| "loss": 1.097, | |
| "step": 9472 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_loss": 1.1468651294708252, | |
| "eval_runtime": 7.1468, | |
| "eval_samples_per_second": 69.962, | |
| "eval_steps_per_second": 8.815, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 2.5102658309920038e-06, | |
| "loss": 1.0672, | |
| "step": 9728 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 2.4411065485195593e-06, | |
| "loss": 1.0832, | |
| "step": 9984 | |
| }, | |
| { | |
| "epoch": 10.77, | |
| "learning_rate": 2.3719472660471148e-06, | |
| "loss": 1.0644, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "eval_loss": 1.1458113193511963, | |
| "eval_runtime": 7.0737, | |
| "eval_samples_per_second": 70.684, | |
| "eval_steps_per_second": 8.906, | |
| "step": 10461 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 2.3027879835746707e-06, | |
| "loss": 1.0828, | |
| "step": 10496 | |
| }, | |
| { | |
| "epoch": 11.31, | |
| "learning_rate": 2.233628701102226e-06, | |
| "loss": 1.0609, | |
| "step": 10752 | |
| }, | |
| { | |
| "epoch": 11.58, | |
| "learning_rate": 2.1644694186297817e-06, | |
| "loss": 1.0648, | |
| "step": 11008 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 2.0953101361573376e-06, | |
| "loss": 1.0587, | |
| "step": 11264 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "eval_loss": 1.146124005317688, | |
| "eval_runtime": 7.0057, | |
| "eval_samples_per_second": 71.371, | |
| "eval_steps_per_second": 8.993, | |
| "step": 11412 | |
| }, | |
| { | |
| "epoch": 12.11, | |
| "learning_rate": 2.026150853684893e-06, | |
| "loss": 1.0746, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 1.9569915712124487e-06, | |
| "loss": 1.0449, | |
| "step": 11776 | |
| }, | |
| { | |
| "epoch": 12.65, | |
| "learning_rate": 1.8878322887400044e-06, | |
| "loss": 1.0563, | |
| "step": 12032 | |
| }, | |
| { | |
| "epoch": 12.92, | |
| "learning_rate": 1.81867300626756e-06, | |
| "loss": 1.0635, | |
| "step": 12288 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "eval_loss": 1.1461944580078125, | |
| "eval_runtime": 7.0215, | |
| "eval_samples_per_second": 71.209, | |
| "eval_steps_per_second": 8.972, | |
| "step": 12363 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 1.7495137237951156e-06, | |
| "loss": 1.0407, | |
| "step": 12544 | |
| }, | |
| { | |
| "epoch": 13.46, | |
| "learning_rate": 1.6803544413226713e-06, | |
| "loss": 1.0555, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 13.73, | |
| "learning_rate": 1.611195158850227e-06, | |
| "loss": 1.0174, | |
| "step": 13056 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 1.5420358763777825e-06, | |
| "loss": 1.0661, | |
| "step": 13312 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "eval_loss": 1.1455323696136475, | |
| "eval_runtime": 7.1221, | |
| "eval_samples_per_second": 70.204, | |
| "eval_steps_per_second": 8.846, | |
| "step": 13314 | |
| } | |
| ], | |
| "max_steps": 19020, | |
| "num_train_epochs": 20, | |
| "total_flos": 2.3059242172416e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |