| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9999235220638846, | |
| "eval_steps": 500, | |
| "global_step": 4903, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0020394116297448184, | |
| "grad_norm": 416.0261542171565, | |
| "learning_rate": 3.378378378378379e-07, | |
| "loss": 73.7906, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004078823259489637, | |
| "grad_norm": 176.44597487763102, | |
| "learning_rate": 6.756756756756758e-07, | |
| "loss": 59.368, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.006118234889234456, | |
| "grad_norm": 86.02408883452614, | |
| "learning_rate": 1.0135135135135136e-06, | |
| "loss": 43.9054, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.008157646518979274, | |
| "grad_norm": 48.78099953842141, | |
| "learning_rate": 1.3513513513513515e-06, | |
| "loss": 37.344, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.010197058148724092, | |
| "grad_norm": 59.00532328533055, | |
| "learning_rate": 1.6891891891891894e-06, | |
| "loss": 35.4945, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.012236469778468912, | |
| "grad_norm": 42.96551818056488, | |
| "learning_rate": 2.0270270270270273e-06, | |
| "loss": 34.506, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.01427588140821373, | |
| "grad_norm": 31.65537424852591, | |
| "learning_rate": 2.364864864864865e-06, | |
| "loss": 33.6203, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.016315293037958548, | |
| "grad_norm": 54.92033486362346, | |
| "learning_rate": 2.702702702702703e-06, | |
| "loss": 33.1313, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.018354704667703368, | |
| "grad_norm": 40.46176095549042, | |
| "learning_rate": 3.040540540540541e-06, | |
| "loss": 32.157, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.020394116297448184, | |
| "grad_norm": 61.273325432745324, | |
| "learning_rate": 3.3783783783783788e-06, | |
| "loss": 31.0497, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.022433527927193005, | |
| "grad_norm": 64.3759451606204, | |
| "learning_rate": 3.7162162162162162e-06, | |
| "loss": 29.0966, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.024472939556937825, | |
| "grad_norm": 49.81979980897721, | |
| "learning_rate": 4.0540540540540545e-06, | |
| "loss": 25.7664, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.02651235118668264, | |
| "grad_norm": 53.97034325712405, | |
| "learning_rate": 4.391891891891892e-06, | |
| "loss": 22.6339, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.02855176281642746, | |
| "grad_norm": 45.22248833674121, | |
| "learning_rate": 4.72972972972973e-06, | |
| "loss": 18.4785, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03059117444617228, | |
| "grad_norm": 41.55548956071476, | |
| "learning_rate": 4.999997817427676e-06, | |
| "loss": 14.245, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.032630586075917095, | |
| "grad_norm": 41.40754232941594, | |
| "learning_rate": 4.999921427796456e-06, | |
| "loss": 10.8795, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.03466999770566192, | |
| "grad_norm": 25.881466706955045, | |
| "learning_rate": 4.99973591335987e-06, | |
| "loss": 8.5891, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.036709409335406736, | |
| "grad_norm": 25.894728638081446, | |
| "learning_rate": 4.999441282215864e-06, | |
| "loss": 7.8307, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.03874882096515155, | |
| "grad_norm": 28.336080933989287, | |
| "learning_rate": 4.999037547225467e-06, | |
| "loss": 7.4253, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.04078823259489637, | |
| "grad_norm": 29.417876548615816, | |
| "learning_rate": 4.998524726012237e-06, | |
| "loss": 7.3717, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.04282764422464119, | |
| "grad_norm": 26.239293765079168, | |
| "learning_rate": 4.99790284096148e-06, | |
| "loss": 7.2478, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.04486705585438601, | |
| "grad_norm": 14.542416285761892, | |
| "learning_rate": 4.997171919219285e-06, | |
| "loss": 6.9808, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.046906467484130826, | |
| "grad_norm": 21.57349128905767, | |
| "learning_rate": 4.996331992691331e-06, | |
| "loss": 6.8236, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04894587911387565, | |
| "grad_norm": 16.18485452820067, | |
| "learning_rate": 4.9953830980414995e-06, | |
| "loss": 6.5693, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.050985290743620466, | |
| "grad_norm": 17.24603864401605, | |
| "learning_rate": 4.994325276690269e-06, | |
| "loss": 6.4391, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.05302470237336528, | |
| "grad_norm": 17.815416004459667, | |
| "learning_rate": 4.99315857481291e-06, | |
| "loss": 6.6979, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.0550641140031101, | |
| "grad_norm": 18.11307456753595, | |
| "learning_rate": 4.991883043337469e-06, | |
| "loss": 7.0256, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.05710352563285492, | |
| "grad_norm": 14.094011405950782, | |
| "learning_rate": 4.990498737942546e-06, | |
| "loss": 6.8038, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.05914293726259974, | |
| "grad_norm": 17.08019015337673, | |
| "learning_rate": 4.9890057190548624e-06, | |
| "loss": 6.5316, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.06118234889234456, | |
| "grad_norm": 14.037189749911553, | |
| "learning_rate": 4.987404051846626e-06, | |
| "loss": 6.6865, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.06322176052208937, | |
| "grad_norm": 16.524361976050557, | |
| "learning_rate": 4.985693806232682e-06, | |
| "loss": 6.6122, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.06526117215183419, | |
| "grad_norm": 14.482343818627136, | |
| "learning_rate": 4.983875056867465e-06, | |
| "loss": 6.7456, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.06730058378157902, | |
| "grad_norm": 15.112376227157249, | |
| "learning_rate": 4.981947883141738e-06, | |
| "loss": 6.7078, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.06933999541132384, | |
| "grad_norm": 10.698156963129385, | |
| "learning_rate": 4.979912369179129e-06, | |
| "loss": 6.2382, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.07137940704106865, | |
| "grad_norm": 11.770671514851704, | |
| "learning_rate": 4.977768603832454e-06, | |
| "loss": 6.5136, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.07341881867081347, | |
| "grad_norm": 10.786322429347136, | |
| "learning_rate": 4.975516680679847e-06, | |
| "loss": 6.5095, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.07545823030055829, | |
| "grad_norm": 9.074114726707114, | |
| "learning_rate": 4.973156698020667e-06, | |
| "loss": 6.4467, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.0774976419303031, | |
| "grad_norm": 12.61967333137717, | |
| "learning_rate": 4.970688758871211e-06, | |
| "loss": 6.3277, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.07953705356004792, | |
| "grad_norm": 5.904025228887194, | |
| "learning_rate": 4.968112970960217e-06, | |
| "loss": 6.2881, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.08157646518979274, | |
| "grad_norm": 15.099418681801705, | |
| "learning_rate": 4.96542944672416e-06, | |
| "loss": 6.405, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.08361587681953757, | |
| "grad_norm": 11.908714596527965, | |
| "learning_rate": 4.962638303302345e-06, | |
| "loss": 6.4003, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.08565528844928239, | |
| "grad_norm": 13.433001486568871, | |
| "learning_rate": 4.959739662531796e-06, | |
| "loss": 6.1106, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0876947000790272, | |
| "grad_norm": 11.497300438164624, | |
| "learning_rate": 4.956733650941931e-06, | |
| "loss": 5.9704, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.08973411170877202, | |
| "grad_norm": 8.414104614793995, | |
| "learning_rate": 4.953620399749049e-06, | |
| "loss": 6.1982, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.09177352333851684, | |
| "grad_norm": 9.918891815515376, | |
| "learning_rate": 4.950400044850591e-06, | |
| "loss": 6.2846, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.09381293496826165, | |
| "grad_norm": 8.216763124460652, | |
| "learning_rate": 4.947072726819216e-06, | |
| "loss": 6.2039, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.09585234659800647, | |
| "grad_norm": 11.442657226474541, | |
| "learning_rate": 4.943638590896663e-06, | |
| "loss": 6.148, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.0978917582277513, | |
| "grad_norm": 10.7989783448724, | |
| "learning_rate": 4.940097786987408e-06, | |
| "loss": 6.067, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.09993116985749612, | |
| "grad_norm": 7.66274546063033, | |
| "learning_rate": 4.936450469652123e-06, | |
| "loss": 6.0997, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.10197058148724093, | |
| "grad_norm": 11.155574725614574, | |
| "learning_rate": 4.93269679810093e-06, | |
| "loss": 6.5548, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.10400999311698575, | |
| "grad_norm": 6.583246362480005, | |
| "learning_rate": 4.928836936186451e-06, | |
| "loss": 6.0022, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.10604940474673057, | |
| "grad_norm": 7.041533689745258, | |
| "learning_rate": 4.924871052396652e-06, | |
| "loss": 5.9554, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.10808881637647538, | |
| "grad_norm": 9.007162906891262, | |
| "learning_rate": 4.920799319847492e-06, | |
| "loss": 5.8155, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.1101282280062202, | |
| "grad_norm": 7.851973912778234, | |
| "learning_rate": 4.916621916275368e-06, | |
| "loss": 6.0246, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.11216763963596503, | |
| "grad_norm": 11.497046086141061, | |
| "learning_rate": 4.91233902402935e-06, | |
| "loss": 6.3948, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.11420705126570985, | |
| "grad_norm": 11.5824916010593, | |
| "learning_rate": 4.9079508300632285e-06, | |
| "loss": 6.1677, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.11624646289545466, | |
| "grad_norm": 8.291991010048283, | |
| "learning_rate": 4.903457525927346e-06, | |
| "loss": 5.8365, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.11828587452519948, | |
| "grad_norm": 10.643398932054943, | |
| "learning_rate": 4.898859307760244e-06, | |
| "loss": 5.8869, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.1203252861549443, | |
| "grad_norm": 7.296026077222149, | |
| "learning_rate": 4.894156376280097e-06, | |
| "loss": 6.1545, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.12236469778468911, | |
| "grad_norm": 8.126364280765701, | |
| "learning_rate": 4.889348936775949e-06, | |
| "loss": 6.0321, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.12440410941443393, | |
| "grad_norm": 8.519085629561392, | |
| "learning_rate": 4.884437199098755e-06, | |
| "loss": 5.9508, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.12644352104417875, | |
| "grad_norm": 7.392519482936644, | |
| "learning_rate": 4.879421377652221e-06, | |
| "loss": 5.8742, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.12848293267392358, | |
| "grad_norm": 7.757565156734196, | |
| "learning_rate": 4.874301691383444e-06, | |
| "loss": 5.8579, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.13052234430366838, | |
| "grad_norm": 5.593766231869827, | |
| "learning_rate": 4.8690783637733556e-06, | |
| "loss": 5.5822, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.1325617559334132, | |
| "grad_norm": 8.876625978244254, | |
| "learning_rate": 4.863751622826963e-06, | |
| "loss": 5.9153, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.13460116756315804, | |
| "grad_norm": 9.33367328824495, | |
| "learning_rate": 4.858321701063404e-06, | |
| "loss": 5.7982, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.13664057919290284, | |
| "grad_norm": 6.988379291475624, | |
| "learning_rate": 4.852788835505789e-06, | |
| "loss": 5.4791, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.13867999082264768, | |
| "grad_norm": 7.587497929148608, | |
| "learning_rate": 4.847153267670861e-06, | |
| "loss": 5.6539, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.14071940245239248, | |
| "grad_norm": 4.534723143566571, | |
| "learning_rate": 4.841415243558446e-06, | |
| "loss": 5.4806, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.1427588140821373, | |
| "grad_norm": 9.721969800057037, | |
| "learning_rate": 4.835575013640724e-06, | |
| "loss": 5.8345, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1447982257118821, | |
| "grad_norm": 10.52209667619718, | |
| "learning_rate": 4.8296328328512876e-06, | |
| "loss": 5.7927, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.14683763734162694, | |
| "grad_norm": 8.761127944716362, | |
| "learning_rate": 4.823588960574019e-06, | |
| "loss": 5.6675, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.14887704897137177, | |
| "grad_norm": 7.302359459744512, | |
| "learning_rate": 4.817443660631762e-06, | |
| "loss": 5.6315, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.15091646060111658, | |
| "grad_norm": 6.275383790159637, | |
| "learning_rate": 4.811197201274813e-06, | |
| "loss": 5.4671, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.1529558722308614, | |
| "grad_norm": 10.551322104997928, | |
| "learning_rate": 4.804849855169206e-06, | |
| "loss": 5.6134, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1549952838606062, | |
| "grad_norm": 7.584661880579045, | |
| "learning_rate": 4.798401899384813e-06, | |
| "loss": 5.9102, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.15703469549035104, | |
| "grad_norm": 4.793753789486284, | |
| "learning_rate": 4.791853615383246e-06, | |
| "loss": 5.4862, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.15907410712009584, | |
| "grad_norm": 10.242198429563965, | |
| "learning_rate": 4.785205289005576e-06, | |
| "loss": 5.4147, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.16111351874984067, | |
| "grad_norm": 6.632351115695969, | |
| "learning_rate": 4.7784572104598555e-06, | |
| "loss": 5.6655, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.16315293037958548, | |
| "grad_norm": 8.620608149299821, | |
| "learning_rate": 4.771609674308443e-06, | |
| "loss": 5.4152, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.1651923420093303, | |
| "grad_norm": 7.554055633248649, | |
| "learning_rate": 4.764662979455153e-06, | |
| "loss": 5.4569, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.16723175363907514, | |
| "grad_norm": 7.374395786991857, | |
| "learning_rate": 4.757617429132205e-06, | |
| "loss": 5.6693, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.16927116526881994, | |
| "grad_norm": 7.778658999668707, | |
| "learning_rate": 4.7504733308869885e-06, | |
| "loss": 5.3745, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.17131057689856477, | |
| "grad_norm": 7.802265783668195, | |
| "learning_rate": 4.743230996568636e-06, | |
| "loss": 5.6156, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.17334998852830957, | |
| "grad_norm": 6.023468248280255, | |
| "learning_rate": 4.735890742314414e-06, | |
| "loss": 5.4607, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1753894001580544, | |
| "grad_norm": 10.949757092686623, | |
| "learning_rate": 4.728452888535917e-06, | |
| "loss": 5.5242, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.1774288117877992, | |
| "grad_norm": 8.317892545044202, | |
| "learning_rate": 4.72091775990509e-06, | |
| "loss": 5.4653, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.17946822341754404, | |
| "grad_norm": 6.757147922173718, | |
| "learning_rate": 4.713285685340047e-06, | |
| "loss": 5.8241, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.18150763504728887, | |
| "grad_norm": 6.363739805456836, | |
| "learning_rate": 4.70555699799072e-06, | |
| "loss": 5.4058, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.18354704667703367, | |
| "grad_norm": 10.24562868217905, | |
| "learning_rate": 4.697732035224313e-06, | |
| "loss": 5.4712, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.1855864583067785, | |
| "grad_norm": 8.518144636408193, | |
| "learning_rate": 4.689811138610576e-06, | |
| "loss": 5.4263, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.1876258699365233, | |
| "grad_norm": 7.395829764767208, | |
| "learning_rate": 4.681794653906897e-06, | |
| "loss": 5.2392, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.18966528156626813, | |
| "grad_norm": 7.494096023177462, | |
| "learning_rate": 4.673682931043206e-06, | |
| "loss": 5.7766, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.19170469319601294, | |
| "grad_norm": 5.4911466037050145, | |
| "learning_rate": 4.665476324106705e-06, | |
| "loss": 5.4307, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.19374410482575777, | |
| "grad_norm": 7.523992837258927, | |
| "learning_rate": 4.657175191326405e-06, | |
| "loss": 5.2756, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.1957835164555026, | |
| "grad_norm": 7.885343320180726, | |
| "learning_rate": 4.648779895057495e-06, | |
| "loss": 5.4366, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1978229280852474, | |
| "grad_norm": 4.885451371326598, | |
| "learning_rate": 4.64029080176552e-06, | |
| "loss": 5.3246, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.19986233971499223, | |
| "grad_norm": 8.50397433436197, | |
| "learning_rate": 4.631708282010389e-06, | |
| "loss": 5.5952, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.20190175134473703, | |
| "grad_norm": 7.0206835314562195, | |
| "learning_rate": 4.6230327104301935e-06, | |
| "loss": 5.2864, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.20394116297448187, | |
| "grad_norm": 6.969378735928141, | |
| "learning_rate": 4.614264465724862e-06, | |
| "loss": 5.2238, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.20598057460422667, | |
| "grad_norm": 8.080868706627124, | |
| "learning_rate": 4.605403930639621e-06, | |
| "loss": 5.3855, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2080199862339715, | |
| "grad_norm": 5.944525382159798, | |
| "learning_rate": 4.5964514919482935e-06, | |
| "loss": 5.4129, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.21005939786371633, | |
| "grad_norm": 5.520005113209886, | |
| "learning_rate": 4.587407540436414e-06, | |
| "loss": 5.4312, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.21209880949346113, | |
| "grad_norm": 4.896352945576618, | |
| "learning_rate": 4.578272470884169e-06, | |
| "loss": 5.312, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.21413822112320596, | |
| "grad_norm": 7.324460281537905, | |
| "learning_rate": 4.569046682049164e-06, | |
| "loss": 5.356, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.21617763275295077, | |
| "grad_norm": 5.713020075072778, | |
| "learning_rate": 4.559730576649023e-06, | |
| "loss": 5.2835, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.2182170443826956, | |
| "grad_norm": 4.649247125372443, | |
| "learning_rate": 4.5503245613438004e-06, | |
| "loss": 5.2421, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2202564560124404, | |
| "grad_norm": 7.7755489393380905, | |
| "learning_rate": 4.540829046718238e-06, | |
| "loss": 5.4405, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.22229586764218523, | |
| "grad_norm": 8.143008411892646, | |
| "learning_rate": 4.531244447263835e-06, | |
| "loss": 4.9708, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.22433527927193006, | |
| "grad_norm": 10.474648698876116, | |
| "learning_rate": 4.521571181360762e-06, | |
| "loss": 5.2158, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.22637469090167486, | |
| "grad_norm": 5.6615222624327215, | |
| "learning_rate": 4.5118096712595925e-06, | |
| "loss": 5.2773, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2284141025314197, | |
| "grad_norm": 5.546903591037121, | |
| "learning_rate": 4.501960343062875e-06, | |
| "loss": 5.2797, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2304535141611645, | |
| "grad_norm": 6.483847976431606, | |
| "learning_rate": 4.492023626706531e-06, | |
| "loss": 5.2986, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.23249292579090933, | |
| "grad_norm": 5.661938295947555, | |
| "learning_rate": 4.481999955941088e-06, | |
| "loss": 5.0571, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.23453233742065413, | |
| "grad_norm": 7.072541427396651, | |
| "learning_rate": 4.4718897683127445e-06, | |
| "loss": 5.1104, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.23657174905039896, | |
| "grad_norm": 16.98201550221037, | |
| "learning_rate": 4.4616935051442764e-06, | |
| "loss": 5.3549, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.2386111606801438, | |
| "grad_norm": 5.531136789877289, | |
| "learning_rate": 4.451411611515764e-06, | |
| "loss": 4.9175, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.2406505723098886, | |
| "grad_norm": 6.473678682260914, | |
| "learning_rate": 4.4410445362451696e-06, | |
| "loss": 5.4938, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.24268998393963342, | |
| "grad_norm": 3.568688453698588, | |
| "learning_rate": 4.4305927318687445e-06, | |
| "loss": 5.1755, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.24472939556937823, | |
| "grad_norm": 9.957209893503183, | |
| "learning_rate": 4.420056654621276e-06, | |
| "loss": 5.0824, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.24676880719912306, | |
| "grad_norm": 6.550501507797596, | |
| "learning_rate": 4.409436764416167e-06, | |
| "loss": 5.0426, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.24880821882886786, | |
| "grad_norm": 6.096650424567716, | |
| "learning_rate": 4.398733524825372e-06, | |
| "loss": 5.1449, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.2508476304586127, | |
| "grad_norm": 5.73645263262766, | |
| "learning_rate": 4.3879474030591475e-06, | |
| "loss": 5.3313, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.2528870420883575, | |
| "grad_norm": 5.554280166145116, | |
| "learning_rate": 4.377078869945666e-06, | |
| "loss": 5.1019, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.25492645371810235, | |
| "grad_norm": 8.19010821517748, | |
| "learning_rate": 4.366128399910463e-06, | |
| "loss": 5.3396, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.25696586534784716, | |
| "grad_norm": 5.809513632072872, | |
| "learning_rate": 4.355096470955726e-06, | |
| "loss": 5.3416, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.25900527697759196, | |
| "grad_norm": 5.140517143148237, | |
| "learning_rate": 4.3439835646394314e-06, | |
| "loss": 5.232, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.26104468860733676, | |
| "grad_norm": 7.786748845574264, | |
| "learning_rate": 4.332790166054318e-06, | |
| "loss": 5.3942, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2630841002370816, | |
| "grad_norm": 5.34911882358093, | |
| "learning_rate": 4.32151676380672e-06, | |
| "loss": 4.9009, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.2651235118668264, | |
| "grad_norm": 7.7837668085611025, | |
| "learning_rate": 4.310163849995234e-06, | |
| "loss": 5.2734, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2671629234965712, | |
| "grad_norm": 5.496423754443762, | |
| "learning_rate": 4.2987319201892405e-06, | |
| "loss": 5.1612, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2692023351263161, | |
| "grad_norm": 5.843994083100653, | |
| "learning_rate": 4.287221473407267e-06, | |
| "loss": 5.1954, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.2712417467560609, | |
| "grad_norm": 7.265974737367786, | |
| "learning_rate": 4.2756330120952125e-06, | |
| "loss": 5.262, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.2732811583858057, | |
| "grad_norm": 4.575773182227481, | |
| "learning_rate": 4.263967042104408e-06, | |
| "loss": 5.0813, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.2753205700155505, | |
| "grad_norm": 8.171128477846313, | |
| "learning_rate": 4.25222407266954e-06, | |
| "loss": 5.128, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.27735998164529535, | |
| "grad_norm": 7.220886668527826, | |
| "learning_rate": 4.240404616386422e-06, | |
| "loss": 5.1711, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.27939939327504015, | |
| "grad_norm": 4.10147133416572, | |
| "learning_rate": 4.228509189189614e-06, | |
| "loss": 4.9569, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.28143880490478496, | |
| "grad_norm": 7.066133692204384, | |
| "learning_rate": 4.216538310329908e-06, | |
| "loss": 5.0134, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.2834782165345298, | |
| "grad_norm": 5.21162609757401, | |
| "learning_rate": 4.204492502351656e-06, | |
| "loss": 5.1385, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.2855176281642746, | |
| "grad_norm": 12.133570883449654, | |
| "learning_rate": 4.192372291069965e-06, | |
| "loss": 5.3003, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2875570397940194, | |
| "grad_norm": 5.447844429023172, | |
| "learning_rate": 4.180178205547741e-06, | |
| "loss": 5.1224, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.2895964514237642, | |
| "grad_norm": 5.255219528432195, | |
| "learning_rate": 4.167910778072598e-06, | |
| "loss": 5.2098, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.2916358630535091, | |
| "grad_norm": 8.974121936568055, | |
| "learning_rate": 4.15557054413362e-06, | |
| "loss": 5.043, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.2936752746832539, | |
| "grad_norm": 5.127949434548276, | |
| "learning_rate": 4.143158042397986e-06, | |
| "loss": 4.7639, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.2957146863129987, | |
| "grad_norm": 6.7317777225167355, | |
| "learning_rate": 4.13067381468746e-06, | |
| "loss": 4.851, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.29775409794274355, | |
| "grad_norm": 7.362780361944449, | |
| "learning_rate": 4.118118405954737e-06, | |
| "loss": 5.0614, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.29979350957248835, | |
| "grad_norm": 6.269272741616841, | |
| "learning_rate": 4.105492364259656e-06, | |
| "loss": 5.0865, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.30183292120223315, | |
| "grad_norm": 9.08220881321792, | |
| "learning_rate": 4.0927962407452746e-06, | |
| "loss": 5.2192, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.30387233283197795, | |
| "grad_norm": 4.7165201076742385, | |
| "learning_rate": 4.080030589613815e-06, | |
| "loss": 4.781, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3059117444617228, | |
| "grad_norm": 8.211182184786706, | |
| "learning_rate": 4.067195968102468e-06, | |
| "loss": 5.2033, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3079511560914676, | |
| "grad_norm": 6.509747362409568, | |
| "learning_rate": 4.054292936459071e-06, | |
| "loss": 5.1884, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.3099905677212124, | |
| "grad_norm": 5.544500373765245, | |
| "learning_rate": 4.041322057917653e-06, | |
| "loss": 4.9485, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3120299793509573, | |
| "grad_norm": 4.807701021398306, | |
| "learning_rate": 4.0282838986738485e-06, | |
| "loss": 4.8131, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.3140693909807021, | |
| "grad_norm": 3.233409705549431, | |
| "learning_rate": 4.015179027860178e-06, | |
| "loss": 4.7307, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.3161088026104469, | |
| "grad_norm": 6.77236682107715, | |
| "learning_rate": 4.002008017521212e-06, | |
| "loss": 4.7627, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.3181482142401917, | |
| "grad_norm": 6.020669488961124, | |
| "learning_rate": 3.9887714425885975e-06, | |
| "loss": 4.9527, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.32018762586993654, | |
| "grad_norm": 4.923860327213745, | |
| "learning_rate": 3.975469880855958e-06, | |
| "loss": 4.7287, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.32222703749968135, | |
| "grad_norm": 5.631284127525618, | |
| "learning_rate": 3.962103912953674e-06, | |
| "loss": 4.9252, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.32426644912942615, | |
| "grad_norm": 5.814788040501255, | |
| "learning_rate": 3.9486741223235445e-06, | |
| "loss": 5.1897, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.32630586075917095, | |
| "grad_norm": 5.508231515820795, | |
| "learning_rate": 3.935181095193308e-06, | |
| "loss": 5.0457, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.3283452723889158, | |
| "grad_norm": 5.4508146817265954, | |
| "learning_rate": 3.921625420551059e-06, | |
| "loss": 4.8422, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.3303846840186606, | |
| "grad_norm": 4.715200633768152, | |
| "learning_rate": 3.908007690119537e-06, | |
| "loss": 4.9572, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.3324240956484054, | |
| "grad_norm": 5.635074094088275, | |
| "learning_rate": 3.894328498330298e-06, | |
| "loss": 4.9342, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.3344635072781503, | |
| "grad_norm": 6.246219395266704, | |
| "learning_rate": 3.880588442297766e-06, | |
| "loss": 4.9741, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.3365029189078951, | |
| "grad_norm": 8.098590882244267, | |
| "learning_rate": 3.866788121793167e-06, | |
| "loss": 4.8886, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.3385423305376399, | |
| "grad_norm": 12.62629986891231, | |
| "learning_rate": 3.852928139218348e-06, | |
| "loss": 4.8273, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.3405817421673847, | |
| "grad_norm": 4.193982563974684, | |
| "learning_rate": 3.839009099579486e-06, | |
| "loss": 4.933, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.34262115379712954, | |
| "grad_norm": 5.632695469956019, | |
| "learning_rate": 3.825031610460672e-06, | |
| "loss": 4.6028, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.34466056542687434, | |
| "grad_norm": 3.989355667747194, | |
| "learning_rate": 3.8109962819973944e-06, | |
| "loss": 4.8633, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.34669997705661915, | |
| "grad_norm": 7.846817101081181, | |
| "learning_rate": 3.7969037268499025e-06, | |
| "loss": 4.9931, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.348739388686364, | |
| "grad_norm": 5.987919721991004, | |
| "learning_rate": 3.7827545601764653e-06, | |
| "loss": 4.7492, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.3507788003161088, | |
| "grad_norm": 4.985392387181365, | |
| "learning_rate": 3.768549399606518e-06, | |
| "loss": 4.7011, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.3528182119458536, | |
| "grad_norm": 6.5690265181699585, | |
| "learning_rate": 3.7542888652137025e-06, | |
| "loss": 4.7899, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.3548576235755984, | |
| "grad_norm": 5.920232383701147, | |
| "learning_rate": 3.7399735794887983e-06, | |
| "loss": 4.7349, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.35689703520534327, | |
| "grad_norm": 6.9773660448694965, | |
| "learning_rate": 3.7256041673125513e-06, | |
| "loss": 4.8938, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.3589364468350881, | |
| "grad_norm": 5.12271828837655, | |
| "learning_rate": 3.711181255928399e-06, | |
| "loss": 4.8061, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.3609758584648329, | |
| "grad_norm": 4.509496100857058, | |
| "learning_rate": 3.6967054749150872e-06, | |
| "loss": 4.6748, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.36301527009457774, | |
| "grad_norm": 7.295595382239702, | |
| "learning_rate": 3.6821774561591893e-06, | |
| "loss": 4.6083, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.36505468172432254, | |
| "grad_norm": 4.599385314907489, | |
| "learning_rate": 3.667597833827525e-06, | |
| "loss": 4.883, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.36709409335406734, | |
| "grad_norm": 6.629516043807281, | |
| "learning_rate": 3.6529672443394736e-06, | |
| "loss": 4.9669, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.36913350498381214, | |
| "grad_norm": 4.577184516232719, | |
| "learning_rate": 3.6382863263392017e-06, | |
| "loss": 4.7151, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.371172916613557, | |
| "grad_norm": 4.027378150905267, | |
| "learning_rate": 3.623555720667777e-06, | |
| "loss": 4.8222, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.3732123282433018, | |
| "grad_norm": 5.024565452729882, | |
| "learning_rate": 3.608776070335199e-06, | |
| "loss": 4.8064, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.3752517398730466, | |
| "grad_norm": 4.066513690833921, | |
| "learning_rate": 3.5939480204923304e-06, | |
| "loss": 4.9129, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.37729115150279147, | |
| "grad_norm": 6.837117278953486, | |
| "learning_rate": 3.5790722184027366e-06, | |
| "loss": 4.9833, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.37933056313253627, | |
| "grad_norm": 6.6515332693332425, | |
| "learning_rate": 3.564149313414427e-06, | |
| "loss": 4.6832, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.38136997476228107, | |
| "grad_norm": 4.476454101392343, | |
| "learning_rate": 3.549179956931517e-06, | |
| "loss": 4.8533, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3834093863920259, | |
| "grad_norm": 5.956840861878673, | |
| "learning_rate": 3.5341648023857862e-06, | |
| "loss": 4.6882, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.38544879802177073, | |
| "grad_norm": 4.536553049777035, | |
| "learning_rate": 3.5191045052081635e-06, | |
| "loss": 4.5991, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.38748820965151554, | |
| "grad_norm": 5.54267535596638, | |
| "learning_rate": 3.503999722800108e-06, | |
| "loss": 4.6078, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.38952762128126034, | |
| "grad_norm": 4.50789482208058, | |
| "learning_rate": 3.4888511145049185e-06, | |
| "loss": 4.5409, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.3915670329110052, | |
| "grad_norm": 5.279561131733627, | |
| "learning_rate": 3.473659341578951e-06, | |
| "loss": 4.5287, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.39360644454075, | |
| "grad_norm": 7.116875907931303, | |
| "learning_rate": 3.4584250671627525e-06, | |
| "loss": 5.1626, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3956458561704948, | |
| "grad_norm": 4.65457006318472, | |
| "learning_rate": 3.443148956252115e-06, | |
| "loss": 4.6972, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.3976852678002396, | |
| "grad_norm": 6.4616508610483425, | |
| "learning_rate": 3.427831675669048e-06, | |
| "loss": 4.7647, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.39972467942998446, | |
| "grad_norm": 5.316379863410858, | |
| "learning_rate": 3.4124738940326695e-06, | |
| "loss": 4.7853, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.40176409105972927, | |
| "grad_norm": 4.914166576095454, | |
| "learning_rate": 3.397076281730023e-06, | |
| "loss": 5.0169, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.40380350268947407, | |
| "grad_norm": 8.28351097157618, | |
| "learning_rate": 3.3816395108868104e-06, | |
| "loss": 4.7176, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.40584291431921893, | |
| "grad_norm": 4.575225455721527, | |
| "learning_rate": 3.3661642553380556e-06, | |
| "loss": 4.8754, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.40788232594896373, | |
| "grad_norm": 5.003204369305476, | |
| "learning_rate": 3.3506511905986894e-06, | |
| "loss": 4.7545, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.40992173757870853, | |
| "grad_norm": 4.5398048037729595, | |
| "learning_rate": 3.335100993834061e-06, | |
| "loss": 4.6255, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.41196114920845334, | |
| "grad_norm": 6.197545243168517, | |
| "learning_rate": 3.319514343830383e-06, | |
| "loss": 4.6299, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.4140005608381982, | |
| "grad_norm": 7.889241131506948, | |
| "learning_rate": 3.303891920965098e-06, | |
| "loss": 4.9074, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.416039972467943, | |
| "grad_norm": 5.074720773644666, | |
| "learning_rate": 3.288234407177181e-06, | |
| "loss": 4.573, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.4180793840976878, | |
| "grad_norm": 6.112647596280491, | |
| "learning_rate": 3.272542485937369e-06, | |
| "loss": 4.8592, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.42011879572743266, | |
| "grad_norm": 6.4517690477181695, | |
| "learning_rate": 3.256816842218331e-06, | |
| "loss": 4.5932, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.42215820735717746, | |
| "grad_norm": 8.247239836120627, | |
| "learning_rate": 3.241058162464767e-06, | |
| "loss": 4.5315, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.42419761898692226, | |
| "grad_norm": 5.52060883189122, | |
| "learning_rate": 3.225267134563439e-06, | |
| "loss": 4.8001, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.42623703061666707, | |
| "grad_norm": 3.859291794827778, | |
| "learning_rate": 3.209444447813149e-06, | |
| "loss": 4.7745, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.4282764422464119, | |
| "grad_norm": 6.789072712547568, | |
| "learning_rate": 3.193590792894651e-06, | |
| "loss": 4.6465, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.43031585387615673, | |
| "grad_norm": 6.921492613916647, | |
| "learning_rate": 3.1777068618404954e-06, | |
| "loss": 4.8625, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.43235526550590153, | |
| "grad_norm": 4.185712566739971, | |
| "learning_rate": 3.1617933480048297e-06, | |
| "loss": 4.7646, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.4343946771356464, | |
| "grad_norm": 5.908064336894023, | |
| "learning_rate": 3.145850946033125e-06, | |
| "loss": 4.7062, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.4364340887653912, | |
| "grad_norm": 4.9512422103962495, | |
| "learning_rate": 3.1298803518318565e-06, | |
| "loss": 4.6632, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.438473500395136, | |
| "grad_norm": 4.462025583947847, | |
| "learning_rate": 3.11388226253813e-06, | |
| "loss": 4.8027, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.4405129120248808, | |
| "grad_norm": 21.54345806183561, | |
| "learning_rate": 3.097857376489244e-06, | |
| "loss": 4.7234, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.44255232365462566, | |
| "grad_norm": 4.390377807291699, | |
| "learning_rate": 3.081806393192213e-06, | |
| "loss": 4.4904, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.44459173528437046, | |
| "grad_norm": 6.955688616513592, | |
| "learning_rate": 3.0657300132932276e-06, | |
| "loss": 4.6193, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.44663114691411526, | |
| "grad_norm": 5.926834469512367, | |
| "learning_rate": 3.049628938547075e-06, | |
| "loss": 4.5311, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.4486705585438601, | |
| "grad_norm": 6.810382975795083, | |
| "learning_rate": 3.0335038717865036e-06, | |
| "loss": 4.5441, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.4507099701736049, | |
| "grad_norm": 6.105828666147869, | |
| "learning_rate": 3.017355516891543e-06, | |
| "loss": 4.7737, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.4527493818033497, | |
| "grad_norm": 4.1649273404858524, | |
| "learning_rate": 3.001184578758783e-06, | |
| "loss": 4.7812, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.45478879343309453, | |
| "grad_norm": 5.4531825579013, | |
| "learning_rate": 2.9849917632705983e-06, | |
| "loss": 4.5843, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.4568282050628394, | |
| "grad_norm": 3.3490621444252247, | |
| "learning_rate": 2.9687777772643395e-06, | |
| "loss": 4.7987, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.4588676166925842, | |
| "grad_norm": 4.6539570043652985, | |
| "learning_rate": 2.9525433285014775e-06, | |
| "loss": 4.7358, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.460907028322329, | |
| "grad_norm": 5.481796267159919, | |
| "learning_rate": 2.936289125636709e-06, | |
| "loss": 4.5467, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.46294643995207385, | |
| "grad_norm": 4.96946248531744, | |
| "learning_rate": 2.9200158781870234e-06, | |
| "loss": 4.5166, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.46498585158181865, | |
| "grad_norm": 5.728062919250581, | |
| "learning_rate": 2.9037242965007306e-06, | |
| "loss": 4.4551, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.46702526321156346, | |
| "grad_norm": 5.557886147170241, | |
| "learning_rate": 2.8874150917264526e-06, | |
| "loss": 4.5336, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.46906467484130826, | |
| "grad_norm": 4.31588701471457, | |
| "learning_rate": 2.8710889757820836e-06, | |
| "loss": 4.7721, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.4711040864710531, | |
| "grad_norm": 5.845242353576575, | |
| "learning_rate": 2.8547466613237103e-06, | |
| "loss": 4.539, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.4731434981007979, | |
| "grad_norm": 4.457642166089704, | |
| "learning_rate": 2.8383888617145082e-06, | |
| "loss": 4.6368, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.4751829097305427, | |
| "grad_norm": 16.549355482991313, | |
| "learning_rate": 2.822016290993598e-06, | |
| "loss": 4.5717, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.4772223213602876, | |
| "grad_norm": 3.401945066769072, | |
| "learning_rate": 2.805629663844878e-06, | |
| "loss": 4.5665, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.4792617329900324, | |
| "grad_norm": 6.848905879632381, | |
| "learning_rate": 2.7892296955658283e-06, | |
| "loss": 4.6795, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.4813011446197772, | |
| "grad_norm": 4.697700609937575, | |
| "learning_rate": 2.7728171020362877e-06, | |
| "loss": 4.6161, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.483340556249522, | |
| "grad_norm": 4.222974275891583, | |
| "learning_rate": 2.756392599687202e-06, | |
| "loss": 4.8794, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.48537996787926685, | |
| "grad_norm": 4.801423370516862, | |
| "learning_rate": 2.739956905469353e-06, | |
| "loss": 4.7057, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.48741937950901165, | |
| "grad_norm": 7.011187588456055, | |
| "learning_rate": 2.7235107368220627e-06, | |
| "loss": 4.7853, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.48945879113875645, | |
| "grad_norm": 5.51967028487539, | |
| "learning_rate": 2.707054811641874e-06, | |
| "loss": 4.4753, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.4914982027685013, | |
| "grad_norm": 5.620489808473178, | |
| "learning_rate": 2.690589848251216e-06, | |
| "loss": 4.5693, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.4935376143982461, | |
| "grad_norm": 3.3770101266170562, | |
| "learning_rate": 2.674116565367048e-06, | |
| "loss": 4.1814, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.4955770260279909, | |
| "grad_norm": 4.579061030784262, | |
| "learning_rate": 2.6576356820694845e-06, | |
| "loss": 4.5227, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.4976164376577357, | |
| "grad_norm": 5.8851234142566415, | |
| "learning_rate": 2.641147917770409e-06, | |
| "loss": 4.5298, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.4996558492874806, | |
| "grad_norm": 9.014156661371938, | |
| "learning_rate": 2.62465399218207e-06, | |
| "loss": 4.7321, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.5016952609172254, | |
| "grad_norm": 4.875094287742309, | |
| "learning_rate": 2.608154625285662e-06, | |
| "loss": 4.499, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.5037346725469702, | |
| "grad_norm": 4.551111540309978, | |
| "learning_rate": 2.5916505372999023e-06, | |
| "loss": 4.5375, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.505774084176715, | |
| "grad_norm": 6.132709561525347, | |
| "learning_rate": 2.575142448649588e-06, | |
| "loss": 4.6213, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.5078134958064598, | |
| "grad_norm": 4.48174003397933, | |
| "learning_rate": 2.5586310799341525e-06, | |
| "loss": 4.4213, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.5098529074362047, | |
| "grad_norm": 5.701907718717752, | |
| "learning_rate": 2.542117151896205e-06, | |
| "loss": 4.5751, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.5118923190659495, | |
| "grad_norm": 5.285721986438155, | |
| "learning_rate": 2.525601385390075e-06, | |
| "loss": 4.5735, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.5139317306956943, | |
| "grad_norm": 4.831357059223784, | |
| "learning_rate": 2.5090845013503432e-06, | |
| "loss": 4.4947, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.5159711423254391, | |
| "grad_norm": 5.890813547398467, | |
| "learning_rate": 2.4925672207603728e-06, | |
| "loss": 4.6477, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.5180105539551839, | |
| "grad_norm": 5.13336486025524, | |
| "learning_rate": 2.4760502646208327e-06, | |
| "loss": 4.3634, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.5200499655849288, | |
| "grad_norm": 5.76181531319271, | |
| "learning_rate": 2.4595343539182357e-06, | |
| "loss": 4.4792, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.5220893772146735, | |
| "grad_norm": 4.720700356635706, | |
| "learning_rate": 2.4430202095934547e-06, | |
| "loss": 4.8011, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.5241287888444184, | |
| "grad_norm": 3.6552009210206764, | |
| "learning_rate": 2.4265085525102595e-06, | |
| "loss": 4.5557, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.5261682004741632, | |
| "grad_norm": 4.164312880021689, | |
| "learning_rate": 2.41000010342385e-06, | |
| "loss": 4.4727, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.528207612103908, | |
| "grad_norm": 3.9949595505069144, | |
| "learning_rate": 2.3934955829493913e-06, | |
| "loss": 4.615, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.5302470237336528, | |
| "grad_norm": 9.599795198507397, | |
| "learning_rate": 2.37699571153056e-06, | |
| "loss": 4.6411, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.5322864353633977, | |
| "grad_norm": 4.881244701910976, | |
| "learning_rate": 2.360501209408094e-06, | |
| "loss": 4.511, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.5343258469931425, | |
| "grad_norm": 5.613652005799349, | |
| "learning_rate": 2.3440127965883565e-06, | |
| "loss": 4.5933, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.5363652586228873, | |
| "grad_norm": 6.539569907879984, | |
| "learning_rate": 2.327531192811905e-06, | |
| "loss": 4.5861, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.5384046702526322, | |
| "grad_norm": 3.2807070439491173, | |
| "learning_rate": 2.311057117522072e-06, | |
| "loss": 4.3642, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.5404440818823769, | |
| "grad_norm": 10.987859768581247, | |
| "learning_rate": 2.2945912898335627e-06, | |
| "loss": 4.7251, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.5424834935121218, | |
| "grad_norm": 4.33875805182618, | |
| "learning_rate": 2.2781344285010617e-06, | |
| "loss": 4.4663, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.5445229051418665, | |
| "grad_norm": 3.668387406387121, | |
| "learning_rate": 2.2616872518878645e-06, | |
| "loss": 4.4141, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.5465623167716114, | |
| "grad_norm": 4.892756009503129, | |
| "learning_rate": 2.2452504779345126e-06, | |
| "loss": 4.7187, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.5486017284013562, | |
| "grad_norm": 4.369590818952964, | |
| "learning_rate": 2.228824824127459e-06, | |
| "loss": 4.4326, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.550641140031101, | |
| "grad_norm": 4.9893333587642354, | |
| "learning_rate": 2.2124110074677485e-06, | |
| "loss": 4.6509, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.5526805516608458, | |
| "grad_norm": 4.75169760739204, | |
| "learning_rate": 2.196009744439719e-06, | |
| "loss": 4.751, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.5547199632905907, | |
| "grad_norm": 6.1761937193487695, | |
| "learning_rate": 2.179621750979725e-06, | |
| "loss": 4.3822, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.5567593749203354, | |
| "grad_norm": 4.31169666950459, | |
| "learning_rate": 2.1632477424448893e-06, | |
| "loss": 4.6249, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.5587987865500803, | |
| "grad_norm": 8.068080427315808, | |
| "learning_rate": 2.146888433581873e-06, | |
| "loss": 4.4225, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.5608381981798252, | |
| "grad_norm": 5.9890454137128595, | |
| "learning_rate": 2.130544538495678e-06, | |
| "loss": 4.5456, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.5628776098095699, | |
| "grad_norm": 4.86884228126595, | |
| "learning_rate": 2.114216770618473e-06, | |
| "loss": 4.508, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.5649170214393148, | |
| "grad_norm": 4.516754247676273, | |
| "learning_rate": 2.097905842678457e-06, | |
| "loss": 4.5319, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.5669564330690596, | |
| "grad_norm": 4.619583468395096, | |
| "learning_rate": 2.0816124666687416e-06, | |
| "loss": 4.6548, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.5689958446988044, | |
| "grad_norm": 3.5811643080060693, | |
| "learning_rate": 2.065337353816272e-06, | |
| "loss": 4.2559, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.5710352563285492, | |
| "grad_norm": 4.052378199644621, | |
| "learning_rate": 2.049081214550788e-06, | |
| "loss": 4.5639, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.573074667958294, | |
| "grad_norm": 4.872742245565623, | |
| "learning_rate": 2.032844758473804e-06, | |
| "loss": 4.7058, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.5751140795880388, | |
| "grad_norm": 5.3735486404682185, | |
| "learning_rate": 2.016628694327638e-06, | |
| "loss": 4.5673, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.5771534912177837, | |
| "grad_norm": 5.811383736126772, | |
| "learning_rate": 2.000433729964475e-06, | |
| "loss": 4.514, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.5791929028475284, | |
| "grad_norm": 3.9005330620881002, | |
| "learning_rate": 1.984260572315467e-06, | |
| "loss": 4.3003, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.5812323144772733, | |
| "grad_norm": 4.981061673020621, | |
| "learning_rate": 1.968109927359874e-06, | |
| "loss": 4.591, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.5832717261070182, | |
| "grad_norm": 5.28160349345245, | |
| "learning_rate": 1.9519825000942474e-06, | |
| "loss": 4.5335, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.5853111377367629, | |
| "grad_norm": 4.193551615155147, | |
| "learning_rate": 1.935878994501659e-06, | |
| "loss": 4.2131, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.5873505493665078, | |
| "grad_norm": 15.810563918590717, | |
| "learning_rate": 1.919800113520964e-06, | |
| "loss": 4.4921, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.5893899609962526, | |
| "grad_norm": 5.781901199191389, | |
| "learning_rate": 1.9037465590161272e-06, | |
| "loss": 4.5006, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.5914293726259974, | |
| "grad_norm": 4.451645024528842, | |
| "learning_rate": 1.8877190317455756e-06, | |
| "loss": 4.3096, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.5934687842557422, | |
| "grad_norm": 6.015033853666374, | |
| "learning_rate": 1.8717182313316155e-06, | |
| "loss": 4.2639, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.5955081958854871, | |
| "grad_norm": 3.4963193002348705, | |
| "learning_rate": 1.8557448562298896e-06, | |
| "loss": 4.4011, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.5975476075152318, | |
| "grad_norm": 4.695537212839546, | |
| "learning_rate": 1.8397996036988935e-06, | |
| "loss": 4.6197, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.5995870191449767, | |
| "grad_norm": 3.3273564795922437, | |
| "learning_rate": 1.823883169769533e-06, | |
| "loss": 4.2336, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.6016264307747214, | |
| "grad_norm": 6.019335006969918, | |
| "learning_rate": 1.8079962492147465e-06, | |
| "loss": 4.519, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.6036658424044663, | |
| "grad_norm": 4.803166364148703, | |
| "learning_rate": 1.7921395355191735e-06, | |
| "loss": 4.0433, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.6057052540342112, | |
| "grad_norm": 3.4575731489719077, | |
| "learning_rate": 1.7763137208488878e-06, | |
| "loss": 4.3438, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.6077446656639559, | |
| "grad_norm": 5.841179295451367, | |
| "learning_rate": 1.7605194960211791e-06, | |
| "loss": 4.54, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.6097840772937008, | |
| "grad_norm": 4.010802292312647, | |
| "learning_rate": 1.7447575504743996e-06, | |
| "loss": 4.5196, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.6118234889234456, | |
| "grad_norm": 4.079807608234882, | |
| "learning_rate": 1.7290285722378697e-06, | |
| "loss": 4.4801, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.6138629005531904, | |
| "grad_norm": 4.703521982032006, | |
| "learning_rate": 1.7133332479018419e-06, | |
| "loss": 4.4832, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.6159023121829352, | |
| "grad_norm": 9.409558474450884, | |
| "learning_rate": 1.6976722625875362e-06, | |
| "loss": 4.0591, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.6179417238126801, | |
| "grad_norm": 5.118006818534075, | |
| "learning_rate": 1.6820462999172266e-06, | |
| "loss": 4.5095, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.6199811354424248, | |
| "grad_norm": 2.6789318264322097, | |
| "learning_rate": 1.6664560419844051e-06, | |
| "loss": 4.3834, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.6220205470721697, | |
| "grad_norm": 5.328537617550238, | |
| "learning_rate": 1.650902169324004e-06, | |
| "loss": 4.554, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.6240599587019146, | |
| "grad_norm": 3.9326508972377416, | |
| "learning_rate": 1.6353853608826942e-06, | |
| "loss": 4.3164, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.6260993703316593, | |
| "grad_norm": 4.470136786890529, | |
| "learning_rate": 1.6199062939892426e-06, | |
| "loss": 4.4296, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.6281387819614042, | |
| "grad_norm": 5.384698695320496, | |
| "learning_rate": 1.6044656443249512e-06, | |
| "loss": 4.4133, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.6301781935911489, | |
| "grad_norm": 3.245101255628368, | |
| "learning_rate": 1.5890640858941578e-06, | |
| "loss": 4.3772, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.6322176052208938, | |
| "grad_norm": 4.177433422795092, | |
| "learning_rate": 1.5737022909948187e-06, | |
| "loss": 4.5023, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.6342570168506386, | |
| "grad_norm": 4.942129871851291, | |
| "learning_rate": 1.55838093018916e-06, | |
| "loss": 4.3273, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.6362964284803834, | |
| "grad_norm": 4.032557715744289, | |
| "learning_rate": 1.5431006722744086e-06, | |
| "loss": 4.182, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.6383358401101282, | |
| "grad_norm": 8.249226380324744, | |
| "learning_rate": 1.5278621842535937e-06, | |
| "loss": 4.5497, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.6403752517398731, | |
| "grad_norm": 3.6779613986557465, | |
| "learning_rate": 1.51266613130644e-06, | |
| "loss": 4.4185, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.6424146633696178, | |
| "grad_norm": 5.380447409096335, | |
| "learning_rate": 1.4975131767603215e-06, | |
| "loss": 4.5836, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.6444540749993627, | |
| "grad_norm": 5.1455123232357876, | |
| "learning_rate": 1.4824039820613134e-06, | |
| "loss": 4.4765, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.6464934866291075, | |
| "grad_norm": 3.9507143956971023, | |
| "learning_rate": 1.4673392067453158e-06, | |
| "loss": 4.0599, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.6485328982588523, | |
| "grad_norm": 4.797317932658691, | |
| "learning_rate": 1.4523195084092665e-06, | |
| "loss": 4.5293, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.6505723098885972, | |
| "grad_norm": 3.5538316460061687, | |
| "learning_rate": 1.437345542682434e-06, | |
| "loss": 4.2219, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.6526117215183419, | |
| "grad_norm": 7.363861097074114, | |
| "learning_rate": 1.4224179631978002e-06, | |
| "loss": 4.5262, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.6546511331480868, | |
| "grad_norm": 5.098795556120382, | |
| "learning_rate": 1.407537421563525e-06, | |
| "loss": 4.4479, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.6566905447778316, | |
| "grad_norm": 3.3971768297731684, | |
| "learning_rate": 1.3927045673345085e-06, | |
| "loss": 4.4144, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.6587299564075764, | |
| "grad_norm": 4.591105852592274, | |
| "learning_rate": 1.3779200479840322e-06, | |
| "loss": 4.0876, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.6607693680373212, | |
| "grad_norm": 4.587346979025779, | |
| "learning_rate": 1.363184508875498e-06, | |
| "loss": 4.4312, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.6628087796670661, | |
| "grad_norm": 3.86759041881434, | |
| "learning_rate": 1.3484985932342554e-06, | |
| "loss": 4.4305, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.6648481912968108, | |
| "grad_norm": 4.625795822737769, | |
| "learning_rate": 1.3338629421195272e-06, | |
| "loss": 4.2427, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.6668876029265557, | |
| "grad_norm": 4.05468266187965, | |
| "learning_rate": 1.3192781943964255e-06, | |
| "loss": 4.0492, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.6689270145563005, | |
| "grad_norm": 12.057541220778676, | |
| "learning_rate": 1.304744986708059e-06, | |
| "loss": 4.3695, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.6709664261860453, | |
| "grad_norm": 3.955501871805983, | |
| "learning_rate": 1.2902639534477517e-06, | |
| "loss": 4.2644, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.6730058378157902, | |
| "grad_norm": 6.015589365963082, | |
| "learning_rate": 1.275835726731345e-06, | |
| "loss": 4.6075, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.675045249445535, | |
| "grad_norm": 3.6104804846277974, | |
| "learning_rate": 1.2614609363696054e-06, | |
| "loss": 4.2682, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.6770846610752798, | |
| "grad_norm": 3.2686468990077926, | |
| "learning_rate": 1.247140209840735e-06, | |
| "loss": 4.1998, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.6791240727050246, | |
| "grad_norm": 6.0351949078430716, | |
| "learning_rate": 1.2328741722629773e-06, | |
| "loss": 4.3661, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.6811634843347694, | |
| "grad_norm": 2.7443897616119495, | |
| "learning_rate": 1.2186634463673339e-06, | |
| "loss": 4.0667, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.6832028959645142, | |
| "grad_norm": 4.166081955260256, | |
| "learning_rate": 1.2045086524703808e-06, | |
| "loss": 4.2851, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.6852423075942591, | |
| "grad_norm": 3.8466934846642977, | |
| "learning_rate": 1.1904104084471872e-06, | |
| "loss": 4.2769, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.6872817192240038, | |
| "grad_norm": 4.318540374677886, | |
| "learning_rate": 1.1763693297043501e-06, | |
| "loss": 4.06, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.6893211308537487, | |
| "grad_norm": 12.509219369860856, | |
| "learning_rate": 1.162386029153125e-06, | |
| "loss": 4.3515, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.6913605424834935, | |
| "grad_norm": 20.50428059192819, | |
| "learning_rate": 1.1484611171826768e-06, | |
| "loss": 4.2457, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.6933999541132383, | |
| "grad_norm": 4.712481159806876, | |
| "learning_rate": 1.134595201633433e-06, | |
| "loss": 4.344, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.6954393657429832, | |
| "grad_norm": 4.559090159189356, | |
| "learning_rate": 1.1207888877705503e-06, | |
| "loss": 4.261, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.697478777372728, | |
| "grad_norm": 3.076117640729486, | |
| "learning_rate": 1.1070427782574964e-06, | |
| "loss": 4.3783, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.6995181890024728, | |
| "grad_norm": 10.814924419040487, | |
| "learning_rate": 1.0933574731297373e-06, | |
| "loss": 4.5092, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.7015576006322176, | |
| "grad_norm": 3.5390470251362354, | |
| "learning_rate": 1.0797335697685523e-06, | |
| "loss": 4.3021, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.7035970122619625, | |
| "grad_norm": 4.333079885813776, | |
| "learning_rate": 1.066171662874955e-06, | |
| "loss": 4.2686, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.7056364238917072, | |
| "grad_norm": 8.713694803463515, | |
| "learning_rate": 1.0526723444437287e-06, | |
| "loss": 4.2791, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.7076758355214521, | |
| "grad_norm": 4.354574129939474, | |
| "learning_rate": 1.0392362037375928e-06, | |
| "loss": 3.9504, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.7097152471511968, | |
| "grad_norm": 6.694211003803312, | |
| "learning_rate": 1.0258638272614763e-06, | |
| "loss": 4.1915, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.7117546587809417, | |
| "grad_norm": 3.241654472591947, | |
| "learning_rate": 1.012555798736918e-06, | |
| "loss": 4.1974, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.7137940704106865, | |
| "grad_norm": 5.514721835392517, | |
| "learning_rate": 9.993126990765825e-07, | |
| "loss": 4.2386, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.7158334820404313, | |
| "grad_norm": 5.583834432765822, | |
| "learning_rate": 9.861351063589084e-07, | |
| "loss": 4.3129, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.7178728936701761, | |
| "grad_norm": 2.949575907156756, | |
| "learning_rate": 9.730235958028707e-07, | |
| "loss": 4.1852, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.719912305299921, | |
| "grad_norm": 6.121302032242972, | |
| "learning_rate": 9.599787397428712e-07, | |
| "loss": 4.2227, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.7219517169296658, | |
| "grad_norm": 2.9030577045949673, | |
| "learning_rate": 9.47001107603758e-07, | |
| "loss": 4.3546, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.7239911285594106, | |
| "grad_norm": 3.9002064313827383, | |
| "learning_rate": 9.34091265875969e-07, | |
| "loss": 4.5714, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.7260305401891555, | |
| "grad_norm": 3.9962371475302647, | |
| "learning_rate": 9.212497780907989e-07, | |
| "loss": 4.3046, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.7280699518189002, | |
| "grad_norm": 4.008686311903414, | |
| "learning_rate": 9.084772047958107e-07, | |
| "loss": 4.4721, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.7301093634486451, | |
| "grad_norm": 5.036721845326526, | |
| "learning_rate": 8.957741035303547e-07, | |
| "loss": 4.3119, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.7321487750783899, | |
| "grad_norm": 3.6228822528546694, | |
| "learning_rate": 8.83141028801241e-07, | |
| "loss": 3.977, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.7341881867081347, | |
| "grad_norm": 4.485524936400477, | |
| "learning_rate": 8.705785320585281e-07, | |
| "loss": 4.4212, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.7362275983378795, | |
| "grad_norm": 5.387805154097571, | |
| "learning_rate": 8.580871616714561e-07, | |
| "loss": 4.3625, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.7382670099676243, | |
| "grad_norm": 3.846937945913234, | |
| "learning_rate": 8.456674629045081e-07, | |
| "loss": 4.1486, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.7403064215973691, | |
| "grad_norm": 6.153503941340491, | |
| "learning_rate": 8.333199778936052e-07, | |
| "loss": 4.6233, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.742345833227114, | |
| "grad_norm": 3.444349995280495, | |
| "learning_rate": 8.210452456224471e-07, | |
| "loss": 4.2848, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.7443852448568588, | |
| "grad_norm": 5.676013436667852, | |
| "learning_rate": 8.08843801898982e-07, | |
| "loss": 4.3391, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.7464246564866036, | |
| "grad_norm": 4.470026226905971, | |
| "learning_rate": 7.967161793320175e-07, | |
| "loss": 4.2121, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.7484640681163485, | |
| "grad_norm": 4.040281249461269, | |
| "learning_rate": 7.846629073079734e-07, | |
| "loss": 4.1892, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.7505034797460932, | |
| "grad_norm": 6.190674341778419, | |
| "learning_rate": 7.726845119677698e-07, | |
| "loss": 4.5201, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.7525428913758381, | |
| "grad_norm": 2.909828341550929, | |
| "learning_rate": 7.607815161838647e-07, | |
| "loss": 4.2093, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.7545823030055829, | |
| "grad_norm": 6.443973285289415, | |
| "learning_rate": 7.489544395374276e-07, | |
| "loss": 4.4615, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.7566217146353277, | |
| "grad_norm": 4.2246542700997995, | |
| "learning_rate": 7.372037982956581e-07, | |
| "loss": 4.2434, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.7586611262650725, | |
| "grad_norm": 4.279904431120203, | |
| "learning_rate": 7.255301053892538e-07, | |
| "loss": 4.3651, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.7607005378948174, | |
| "grad_norm": 4.186725897756824, | |
| "learning_rate": 7.13933870390014e-07, | |
| "loss": 4.119, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.7627399495245621, | |
| "grad_norm": 3.298108219376201, | |
| "learning_rate": 7.024155994886056e-07, | |
| "loss": 4.3578, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.764779361154307, | |
| "grad_norm": 7.412387730617449, | |
| "learning_rate": 6.909757954724567e-07, | |
| "loss": 4.3607, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.7668187727840517, | |
| "grad_norm": 5.690719384335585, | |
| "learning_rate": 6.796149577038172e-07, | |
| "loss": 4.4603, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.7688581844137966, | |
| "grad_norm": 3.3341556363071145, | |
| "learning_rate": 6.683335820979577e-07, | |
| "loss": 4.2913, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.7708975960435415, | |
| "grad_norm": 8.374678448066359, | |
| "learning_rate": 6.57132161101521e-07, | |
| "loss": 4.2341, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.7729370076732862, | |
| "grad_norm": 4.567460131418673, | |
| "learning_rate": 6.460111836710292e-07, | |
| "loss": 4.2593, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.7749764193030311, | |
| "grad_norm": 4.921509276106569, | |
| "learning_rate": 6.349711352515397e-07, | |
| "loss": 4.2571, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.7770158309327759, | |
| "grad_norm": 5.485393631951355, | |
| "learning_rate": 6.240124977554496e-07, | |
| "loss": 4.2477, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.7790552425625207, | |
| "grad_norm": 4.0589121948883315, | |
| "learning_rate": 6.1313574954147e-07, | |
| "loss": 4.2543, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.7810946541922655, | |
| "grad_norm": 6.063871941982522, | |
| "learning_rate": 6.023413653937335e-07, | |
| "loss": 4.1011, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.7831340658220104, | |
| "grad_norm": 4.6886199538087, | |
| "learning_rate": 5.916298165010778e-07, | |
| "loss": 4.2018, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.7851734774517551, | |
| "grad_norm": 6.255729817180625, | |
| "learning_rate": 5.810015704364722e-07, | |
| "loss": 4.0752, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.7872128890815, | |
| "grad_norm": 7.741347653825498, | |
| "learning_rate": 5.704570911366117e-07, | |
| "loss": 4.4319, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.7892523007112449, | |
| "grad_norm": 6.658946418910774, | |
| "learning_rate": 5.599968388816635e-07, | |
| "loss": 4.623, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.7912917123409896, | |
| "grad_norm": 5.600756053871576, | |
| "learning_rate": 5.496212702751736e-07, | |
| "loss": 4.541, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.7933311239707345, | |
| "grad_norm": 9.936589010366395, | |
| "learning_rate": 5.393308382241383e-07, | |
| "loss": 4.1214, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.7953705356004792, | |
| "grad_norm": 6.814478103726471, | |
| "learning_rate": 5.291259919192337e-07, | |
| "loss": 4.3134, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.7974099472302241, | |
| "grad_norm": 3.908443331703017, | |
| "learning_rate": 5.190071768152067e-07, | |
| "loss": 4.4869, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.7994493588599689, | |
| "grad_norm": 3.716131760782437, | |
| "learning_rate": 5.089748346114309e-07, | |
| "loss": 4.4105, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.8014887704897137, | |
| "grad_norm": 6.8101963045551415, | |
| "learning_rate": 4.990294032326252e-07, | |
| "loss": 4.0734, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.8035281821194585, | |
| "grad_norm": 3.7721519878629373, | |
| "learning_rate": 4.891713168097404e-07, | |
| "loss": 4.2706, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.8055675937492034, | |
| "grad_norm": 7.455829480816375, | |
| "learning_rate": 4.794010056610044e-07, | |
| "loss": 4.5555, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.8076070053789481, | |
| "grad_norm": 5.575426927891118, | |
| "learning_rate": 4.6971889627314305e-07, | |
| "loss": 4.1672, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.809646417008693, | |
| "grad_norm": 8.292437401244339, | |
| "learning_rate": 4.601254112827608e-07, | |
| "loss": 4.3117, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.8116858286384379, | |
| "grad_norm": 5.710115393344384, | |
| "learning_rate": 4.5062096945789004e-07, | |
| "loss": 4.4062, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.8137252402681826, | |
| "grad_norm": 3.046501449591303, | |
| "learning_rate": 4.412059856797182e-07, | |
| "loss": 4.2776, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.8157646518979275, | |
| "grad_norm": 6.346563023941847, | |
| "learning_rate": 4.318808709244693e-07, | |
| "loss": 4.1605, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.8178040635276723, | |
| "grad_norm": 7.581367680226025, | |
| "learning_rate": 4.2264603224546977e-07, | |
| "loss": 4.32, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.8198434751574171, | |
| "grad_norm": 3.438971658725587, | |
| "learning_rate": 4.135018727553791e-07, | |
| "loss": 4.355, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.8218828867871619, | |
| "grad_norm": 5.36352458560893, | |
| "learning_rate": 4.0444879160859096e-07, | |
| "loss": 4.1265, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.8239222984169067, | |
| "grad_norm": 4.14351500900665, | |
| "learning_rate": 3.954871839838134e-07, | |
| "loss": 4.0768, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.8259617100466515, | |
| "grad_norm": 12.933552582436281, | |
| "learning_rate": 3.866174410668161e-07, | |
| "loss": 4.3251, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.8280011216763964, | |
| "grad_norm": 3.3139167027662144, | |
| "learning_rate": 3.7783995003335486e-07, | |
| "loss": 4.2153, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.8300405333061411, | |
| "grad_norm": 18.71624346031171, | |
| "learning_rate": 3.6915509403227216e-07, | |
| "loss": 4.2698, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.832079944935886, | |
| "grad_norm": 10.79391233473762, | |
| "learning_rate": 3.6056325216877104e-07, | |
| "loss": 4.2338, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.8341193565656309, | |
| "grad_norm": 4.261424850688782, | |
| "learning_rate": 3.520647994878676e-07, | |
| "loss": 4.0913, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.8361587681953756, | |
| "grad_norm": 5.630887303127973, | |
| "learning_rate": 3.436601069580181e-07, | |
| "loss": 4.2007, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.8381981798251205, | |
| "grad_norm": 4.374740825282518, | |
| "learning_rate": 3.353495414549282e-07, | |
| "loss": 4.3166, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.8402375914548653, | |
| "grad_norm": 5.070493007615233, | |
| "learning_rate": 3.271334657455366e-07, | |
| "loss": 4.3273, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.8422770030846101, | |
| "grad_norm": 3.9659319877143755, | |
| "learning_rate": 3.1901223847217943e-07, | |
| "loss": 4.0599, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.8443164147143549, | |
| "grad_norm": 3.302210490940212, | |
| "learning_rate": 3.1098621413693684e-07, | |
| "loss": 4.1971, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.8463558263440998, | |
| "grad_norm": 8.124128614183714, | |
| "learning_rate": 3.030557430861572e-07, | |
| "loss": 4.4926, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.8483952379738445, | |
| "grad_norm": 3.630713845863831, | |
| "learning_rate": 2.9522117149516443e-07, | |
| "loss": 4.4875, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.8504346496035894, | |
| "grad_norm": 4.977767116293893, | |
| "learning_rate": 2.874828413531475e-07, | |
| "loss": 4.27, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.8524740612333341, | |
| "grad_norm": 4.821953875486025, | |
| "learning_rate": 2.798410904482296e-07, | |
| "loss": 4.2437, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.854513472863079, | |
| "grad_norm": 3.528796787732878, | |
| "learning_rate": 2.7229625235272785e-07, | |
| "loss": 4.2598, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.8565528844928239, | |
| "grad_norm": 8.200601299622635, | |
| "learning_rate": 2.6484865640858724e-07, | |
| "loss": 4.4022, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.8585922961225686, | |
| "grad_norm": 4.935057938188518, | |
| "learning_rate": 2.5749862771300954e-07, | |
| "loss": 4.2679, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.8606317077523135, | |
| "grad_norm": 4.4371812067689325, | |
| "learning_rate": 2.502464871042584e-07, | |
| "loss": 3.9411, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.8626711193820583, | |
| "grad_norm": 6.133431718705425, | |
| "learning_rate": 2.430925511476556e-07, | |
| "loss": 4.212, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.8647105310118031, | |
| "grad_norm": 3.7989128876998337, | |
| "learning_rate": 2.360371321217636e-07, | |
| "loss": 4.3091, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.8667499426415479, | |
| "grad_norm": 7.197995519271034, | |
| "learning_rate": 2.2908053800475284e-07, | |
| "loss": 4.4547, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.8687893542712928, | |
| "grad_norm": 6.338189710574811, | |
| "learning_rate": 2.2222307246095892e-07, | |
| "loss": 4.1689, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.8708287659010375, | |
| "grad_norm": 5.284651087725345, | |
| "learning_rate": 2.1546503482762742e-07, | |
| "loss": 4.1259, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.8728681775307824, | |
| "grad_norm": 5.362100007675278, | |
| "learning_rate": 2.088067201018451e-07, | |
| "loss": 4.2418, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.8749075891605271, | |
| "grad_norm": 2.9996892461239066, | |
| "learning_rate": 2.022484189276669e-07, | |
| "loss": 4.3194, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.876947000790272, | |
| "grad_norm": 7.9814644784987925, | |
| "learning_rate": 1.9579041758342522e-07, | |
| "loss": 4.062, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.8789864124200168, | |
| "grad_norm": 3.855185899855706, | |
| "learning_rate": 1.894329979692361e-07, | |
| "loss": 4.1905, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.8810258240497616, | |
| "grad_norm": 3.8633348931471305, | |
| "learning_rate": 1.8317643759469233e-07, | |
| "loss": 4.5633, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.8830652356795065, | |
| "grad_norm": 5.787458872245402, | |
| "learning_rate": 1.77021009566751e-07, | |
| "loss": 4.1177, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.8851046473092513, | |
| "grad_norm": 3.208157539224473, | |
| "learning_rate": 1.7096698257781124e-07, | |
| "loss": 4.2558, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.8871440589389961, | |
| "grad_norm": 6.020899915866313, | |
| "learning_rate": 1.6501462089398485e-07, | |
| "loss": 4.3454, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.8891834705687409, | |
| "grad_norm": 4.643730489010232, | |
| "learning_rate": 1.591641843435618e-07, | |
| "loss": 4.0158, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.8912228821984858, | |
| "grad_norm": 5.134502691716626, | |
| "learning_rate": 1.534159283056691e-07, | |
| "loss": 4.2297, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.8932622938282305, | |
| "grad_norm": 6.77345470394396, | |
| "learning_rate": 1.4777010369912054e-07, | |
| "loss": 4.271, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.8953017054579754, | |
| "grad_norm": 3.9105901688770763, | |
| "learning_rate": 1.4222695697146682e-07, | |
| "loss": 4.1278, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.8973411170877202, | |
| "grad_norm": 5.788934405987601, | |
| "learning_rate": 1.3678673008823584e-07, | |
| "loss": 4.2833, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.899380528717465, | |
| "grad_norm": 6.550185731329167, | |
| "learning_rate": 1.3144966052237002e-07, | |
| "loss": 4.2289, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.9014199403472098, | |
| "grad_norm": 3.933076654499303, | |
| "learning_rate": 1.2621598124386376e-07, | |
| "loss": 3.961, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.9034593519769546, | |
| "grad_norm": 7.28993855071651, | |
| "learning_rate": 1.2108592070958936e-07, | |
| "loss": 4.1796, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.9054987636066995, | |
| "grad_norm": 2.8163528727824465, | |
| "learning_rate": 1.1605970285332835e-07, | |
| "loss": 4.2515, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.9075381752364443, | |
| "grad_norm": 5.909100454954851, | |
| "learning_rate": 1.1113754707599345e-07, | |
| "loss": 4.0799, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.9095775868661891, | |
| "grad_norm": 3.9679302320486065, | |
| "learning_rate": 1.0631966823605456e-07, | |
| "loss": 4.1994, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.9116169984959339, | |
| "grad_norm": 3.8889816643991453, | |
| "learning_rate": 1.0160627664015793e-07, | |
| "loss": 4.273, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.9136564101256788, | |
| "grad_norm": 5.2979751703547, | |
| "learning_rate": 9.699757803394549e-08, | |
| "loss": 4.0648, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.9156958217554235, | |
| "grad_norm": 3.6151393643840817, | |
| "learning_rate": 9.249377359307532e-08, | |
| "loss": 4.06, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.9177352333851684, | |
| "grad_norm": 4.111366955877977, | |
| "learning_rate": 8.809505991443979e-08, | |
| "loss": 4.1282, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.9197746450149132, | |
| "grad_norm": 3.978413951555099, | |
| "learning_rate": 8.380162900758299e-08, | |
| "loss": 4.2936, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.921814056644658, | |
| "grad_norm": 3.641231421347921, | |
| "learning_rate": 7.96136682863205e-08, | |
| "loss": 4.3491, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.9238534682744028, | |
| "grad_norm": 5.665911738375297, | |
| "learning_rate": 7.553136056055621e-08, | |
| "loss": 4.3914, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.9258928799041477, | |
| "grad_norm": 2.896437566008968, | |
| "learning_rate": 7.155488402830618e-08, | |
| "loss": 4.2446, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.9279322915338925, | |
| "grad_norm": 4.397366238946691, | |
| "learning_rate": 6.7684412267916e-08, | |
| "loss": 4.1295, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.9299717031636373, | |
| "grad_norm": 3.7962281859910574, | |
| "learning_rate": 6.392011423048711e-08, | |
| "loss": 4.1476, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.9320111147933821, | |
| "grad_norm": 5.396299510851452, | |
| "learning_rate": 6.026215423249992e-08, | |
| "loss": 4.1864, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.9340505264231269, | |
| "grad_norm": 6.24798187472314, | |
| "learning_rate": 5.671069194864154e-08, | |
| "loss": 4.6053, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.9360899380528718, | |
| "grad_norm": 4.347169377156265, | |
| "learning_rate": 5.32658824048371e-08, | |
| "loss": 4.4272, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.9381293496826165, | |
| "grad_norm": 7.01565530019786, | |
| "learning_rate": 4.99278759714808e-08, | |
| "loss": 4.3681, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.9401687613123614, | |
| "grad_norm": 9.337991796223331, | |
| "learning_rate": 4.669681835687279e-08, | |
| "loss": 4.2015, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.9422081729421062, | |
| "grad_norm": 6.742563868831824, | |
| "learning_rate": 4.357285060085953e-08, | |
| "loss": 3.974, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.944247584571851, | |
| "grad_norm": 6.533686057687156, | |
| "learning_rate": 4.0556109068675685e-08, | |
| "loss": 4.3575, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.9462869962015958, | |
| "grad_norm": 4.02192579282106, | |
| "learning_rate": 3.764672544499331e-08, | |
| "loss": 4.0806, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.9483264078313407, | |
| "grad_norm": 6.441975788133546, | |
| "learning_rate": 3.48448267281723e-08, | |
| "loss": 4.0688, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.9503658194610854, | |
| "grad_norm": 3.9399725566315515, | |
| "learning_rate": 3.215053522471756e-08, | |
| "loss": 4.3011, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.9524052310908303, | |
| "grad_norm": 4.847038369316615, | |
| "learning_rate": 2.9563968543939726e-08, | |
| "loss": 4.3392, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.9544446427205752, | |
| "grad_norm": 6.073962610386516, | |
| "learning_rate": 2.708523959282172e-08, | |
| "loss": 4.2602, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.9564840543503199, | |
| "grad_norm": 3.436252568358376, | |
| "learning_rate": 2.471445657108995e-08, | |
| "loss": 4.4303, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.9585234659800648, | |
| "grad_norm": 5.1023824914571545, | |
| "learning_rate": 2.245172296649084e-08, | |
| "loss": 4.1511, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.9605628776098095, | |
| "grad_norm": 4.674912647498775, | |
| "learning_rate": 2.0297137550274458e-08, | |
| "loss": 4.3211, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.9626022892395544, | |
| "grad_norm": 3.990797348629162, | |
| "learning_rate": 1.8250794372882687e-08, | |
| "loss": 4.2356, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.9646417008692992, | |
| "grad_norm": 4.3953191044322555, | |
| "learning_rate": 1.631278275984305e-08, | |
| "loss": 4.4084, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.966681112499044, | |
| "grad_norm": 4.412715515426169, | |
| "learning_rate": 1.4483187307870461e-08, | |
| "loss": 4.035, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.9687205241287888, | |
| "grad_norm": 5.844550729133818, | |
| "learning_rate": 1.276208788117378e-08, | |
| "loss": 4.1833, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.9707599357585337, | |
| "grad_norm": 4.814300797469836, | |
| "learning_rate": 1.114955960797054e-08, | |
| "loss": 4.1522, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.9727993473882784, | |
| "grad_norm": 3.808346781062702, | |
| "learning_rate": 9.645672877206524e-09, | |
| "loss": 4.3424, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.9748387590180233, | |
| "grad_norm": 4.616232239908184, | |
| "learning_rate": 8.250493335483222e-09, | |
| "loss": 4.2023, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.9768781706477682, | |
| "grad_norm": 3.5474207218106226, | |
| "learning_rate": 6.96408188419373e-09, | |
| "loss": 4.2108, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.9789175822775129, | |
| "grad_norm": 5.824257814537629, | |
| "learning_rate": 5.78649467686182e-09, | |
| "loss": 4.4236, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.9809569939072578, | |
| "grad_norm": 5.7167849452363955, | |
| "learning_rate": 4.7177831166936305e-09, | |
| "loss": 4.4846, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.9829964055370026, | |
| "grad_norm": 4.277887071585096, | |
| "learning_rate": 3.757993854331399e-09, | |
| "loss": 4.1656, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.9850358171667474, | |
| "grad_norm": 7.68100927972657, | |
| "learning_rate": 2.907168785818426e-09, | |
| "loss": 4.3236, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.9870752287964922, | |
| "grad_norm": 6.350245782652061, | |
| "learning_rate": 2.1653450507699846e-09, | |
| "loss": 4.0609, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.989114640426237, | |
| "grad_norm": 5.287321922805873, | |
| "learning_rate": 1.5325550307523918e-09, | |
| "loss": 4.5286, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.9911540520559818, | |
| "grad_norm": 3.7490178618124177, | |
| "learning_rate": 1.0088263478685855e-09, | |
| "loss": 4.3112, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.9931934636857267, | |
| "grad_norm": 3.5121517243670644, | |
| "learning_rate": 5.941818635532559e-10, | |
| "loss": 4.1425, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.9952328753154714, | |
| "grad_norm": 6.450314985115036, | |
| "learning_rate": 2.886396775747535e-10, | |
| "loss": 4.1702, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.9972722869452163, | |
| "grad_norm": 3.28593100847704, | |
| "learning_rate": 9.221312724516651e-11, | |
| "loss": 4.1792, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.9993116985749612, | |
| "grad_norm": 6.915473930840807, | |
| "learning_rate": 4.910786837453163e-12, | |
| "loss": 4.6966, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.9999235220638846, | |
| "step": 4903, | |
| "total_flos": 8.858734643076137e+17, | |
| "train_loss": 5.744887723889663, | |
| "train_runtime": 109223.0893, | |
| "train_samples_per_second": 11.493, | |
| "train_steps_per_second": 0.045 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4903, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.858734643076137e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |