| { | |
| "best_metric": 1.5390384197235107, | |
| "best_model_checkpoint": "facial_age_image_detection/checkpoint-15680", | |
| "epoch": 49.0, | |
| "eval_steps": 500, | |
| "global_step": 15680, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.22556684910086006, | |
| "eval_loss": 2.945213556289673, | |
| "eval_runtime": 51.2577, | |
| "eval_samples_per_second": 99.809, | |
| "eval_steps_per_second": 3.121, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.915360501567398e-06, | |
| "loss": 2.913, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.2580140734949179, | |
| "eval_loss": 2.7111563682556152, | |
| "eval_runtime": 50.9659, | |
| "eval_samples_per_second": 100.381, | |
| "eval_steps_per_second": 3.139, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.2922204847537138, | |
| "eval_loss": 2.557264804840088, | |
| "eval_runtime": 51.3801, | |
| "eval_samples_per_second": 99.572, | |
| "eval_steps_per_second": 3.114, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 2.821316614420063e-06, | |
| "loss": 2.4834, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.3027756059421423, | |
| "eval_loss": 2.448474645614624, | |
| "eval_runtime": 51.2195, | |
| "eval_samples_per_second": 99.884, | |
| "eval_steps_per_second": 3.124, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 2.246, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.31841282251759184, | |
| "eval_loss": 2.3704957962036133, | |
| "eval_runtime": 51.7745, | |
| "eval_samples_per_second": 98.813, | |
| "eval_steps_per_second": 3.09, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.3348318999218139, | |
| "eval_loss": 2.298917055130005, | |
| "eval_runtime": 51.1066, | |
| "eval_samples_per_second": 100.104, | |
| "eval_steps_per_second": 3.131, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 2.633228840125392e-06, | |
| "loss": 2.1013, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.3336591086786552, | |
| "eval_loss": 2.2518346309661865, | |
| "eval_runtime": 51.0284, | |
| "eval_samples_per_second": 100.258, | |
| "eval_steps_per_second": 3.136, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 2.5391849529780565e-06, | |
| "loss": 1.992, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.3506645817044566, | |
| "eval_loss": 2.195549249649048, | |
| "eval_runtime": 50.666, | |
| "eval_samples_per_second": 100.975, | |
| "eval_steps_per_second": 3.158, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.3596559812353401, | |
| "eval_loss": 2.1464827060699463, | |
| "eval_runtime": 51.2548, | |
| "eval_samples_per_second": 99.815, | |
| "eval_steps_per_second": 3.122, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 2.445141065830721e-06, | |
| "loss": 1.9057, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.3844800625488663, | |
| "eval_loss": 2.097637176513672, | |
| "eval_runtime": 51.7915, | |
| "eval_samples_per_second": 98.781, | |
| "eval_steps_per_second": 3.089, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 2.3510971786833857e-06, | |
| "loss": 1.8272, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "eval_accuracy": 0.3784206411258796, | |
| "eval_loss": 2.0678927898406982, | |
| "eval_runtime": 52.5229, | |
| "eval_samples_per_second": 97.405, | |
| "eval_steps_per_second": 3.046, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "eval_accuracy": 0.388975762314308, | |
| "eval_loss": 2.0365805625915527, | |
| "eval_runtime": 52.5303, | |
| "eval_samples_per_second": 97.391, | |
| "eval_steps_per_second": 3.046, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 2.2570532915360503e-06, | |
| "loss": 1.7607, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "eval_accuracy": 0.4012900703674746, | |
| "eval_loss": 1.9932725429534912, | |
| "eval_runtime": 51.4196, | |
| "eval_samples_per_second": 99.495, | |
| "eval_steps_per_second": 3.112, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "eval_accuracy": 0.39190774042220483, | |
| "eval_loss": 1.9702576398849487, | |
| "eval_runtime": 51.6324, | |
| "eval_samples_per_second": 99.085, | |
| "eval_steps_per_second": 3.099, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 2.163009404388715e-06, | |
| "loss": 1.7033, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "eval_accuracy": 0.39874902267396406, | |
| "eval_loss": 1.9390950202941895, | |
| "eval_runtime": 51.5713, | |
| "eval_samples_per_second": 99.203, | |
| "eval_steps_per_second": 3.103, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 2.0689655172413796e-06, | |
| "loss": 1.6414, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "eval_accuracy": 0.40187646598905397, | |
| "eval_loss": 1.914416790008545, | |
| "eval_runtime": 51.2253, | |
| "eval_samples_per_second": 99.872, | |
| "eval_steps_per_second": 3.123, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "eval_accuracy": 0.405199374511337, | |
| "eval_loss": 1.8871612548828125, | |
| "eval_runtime": 51.1314, | |
| "eval_samples_per_second": 100.056, | |
| "eval_steps_per_second": 3.129, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 17.19, | |
| "learning_rate": 1.9749216300940438e-06, | |
| "loss": 1.5939, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "eval_accuracy": 0.40852228303362004, | |
| "eval_loss": 1.8684996366500854, | |
| "eval_runtime": 52.2991, | |
| "eval_samples_per_second": 97.822, | |
| "eval_steps_per_second": 3.059, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 18.75, | |
| "learning_rate": 1.8808777429467086e-06, | |
| "loss": 1.5461, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "eval_accuracy": 0.4161454261141517, | |
| "eval_loss": 1.842322587966919, | |
| "eval_runtime": 51.5547, | |
| "eval_samples_per_second": 99.234, | |
| "eval_steps_per_second": 3.104, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "eval_accuracy": 0.41575449569976547, | |
| "eval_loss": 1.8240511417388916, | |
| "eval_runtime": 51.0599, | |
| "eval_samples_per_second": 100.196, | |
| "eval_steps_per_second": 3.134, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 20.31, | |
| "learning_rate": 1.786833855799373e-06, | |
| "loss": 1.5016, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "eval_accuracy": 0.4245504300234558, | |
| "eval_loss": 1.7987163066864014, | |
| "eval_runtime": 52.1072, | |
| "eval_samples_per_second": 98.182, | |
| "eval_steps_per_second": 3.071, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 21.88, | |
| "learning_rate": 1.6927899686520374e-06, | |
| "loss": 1.4636, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "eval_accuracy": 0.42865519937451135, | |
| "eval_loss": 1.777077078819275, | |
| "eval_runtime": 51.2408, | |
| "eval_samples_per_second": 99.842, | |
| "eval_steps_per_second": 3.123, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "eval_accuracy": 0.4292415949960907, | |
| "eval_loss": 1.7641410827636719, | |
| "eval_runtime": 51.4723, | |
| "eval_samples_per_second": 99.393, | |
| "eval_steps_per_second": 3.108, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 23.44, | |
| "learning_rate": 1.5987460815047023e-06, | |
| "loss": 1.4204, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "eval_accuracy": 0.4462470680218921, | |
| "eval_loss": 1.7357829809188843, | |
| "eval_runtime": 51.0544, | |
| "eval_samples_per_second": 100.207, | |
| "eval_steps_per_second": 3.134, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 1.5047021943573667e-06, | |
| "loss": 1.3845, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "eval_accuracy": 0.4472243940578577, | |
| "eval_loss": 1.7216110229492188, | |
| "eval_runtime": 51.2122, | |
| "eval_samples_per_second": 99.898, | |
| "eval_steps_per_second": 3.124, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "eval_accuracy": 0.45054730258014075, | |
| "eval_loss": 1.7076504230499268, | |
| "eval_runtime": 51.4157, | |
| "eval_samples_per_second": 99.503, | |
| "eval_steps_per_second": 3.112, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 26.56, | |
| "learning_rate": 1.4106583072100315e-06, | |
| "loss": 1.3521, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 27.0, | |
| "eval_accuracy": 0.45347928068803756, | |
| "eval_loss": 1.6927095651626587, | |
| "eval_runtime": 51.5458, | |
| "eval_samples_per_second": 99.252, | |
| "eval_steps_per_second": 3.104, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "eval_accuracy": 0.45856137607505865, | |
| "eval_loss": 1.6739999055862427, | |
| "eval_runtime": 51.4056, | |
| "eval_samples_per_second": 99.522, | |
| "eval_steps_per_second": 3.112, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 28.12, | |
| "learning_rate": 1.316614420062696e-06, | |
| "loss": 1.322, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "eval_accuracy": 0.45269741985926504, | |
| "eval_loss": 1.6700100898742676, | |
| "eval_runtime": 51.1233, | |
| "eval_samples_per_second": 100.072, | |
| "eval_steps_per_second": 3.13, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 29.69, | |
| "learning_rate": 1.2225705329153605e-06, | |
| "loss": 1.2923, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "eval_accuracy": 0.46325254104769353, | |
| "eval_loss": 1.648740291595459, | |
| "eval_runtime": 52.5113, | |
| "eval_samples_per_second": 97.427, | |
| "eval_steps_per_second": 3.047, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 31.0, | |
| "eval_accuracy": 0.4611024237685692, | |
| "eval_loss": 1.6401363611221313, | |
| "eval_runtime": 52.7667, | |
| "eval_samples_per_second": 96.955, | |
| "eval_steps_per_second": 3.032, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 31.25, | |
| "learning_rate": 1.1285266457680252e-06, | |
| "loss": 1.2658, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 32.0, | |
| "eval_accuracy": 0.47146207974980453, | |
| "eval_loss": 1.6254924535751343, | |
| "eval_runtime": 52.1102, | |
| "eval_samples_per_second": 98.177, | |
| "eval_steps_per_second": 3.07, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 32.81, | |
| "learning_rate": 1.0344827586206898e-06, | |
| "loss": 1.245, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 33.0, | |
| "eval_accuracy": 0.4683346364347146, | |
| "eval_loss": 1.6232922077178955, | |
| "eval_runtime": 50.6089, | |
| "eval_samples_per_second": 101.089, | |
| "eval_steps_per_second": 3.161, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 34.0, | |
| "eval_accuracy": 0.47439405785770133, | |
| "eval_loss": 1.6077964305877686, | |
| "eval_runtime": 53.7216, | |
| "eval_samples_per_second": 95.232, | |
| "eval_steps_per_second": 2.978, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 34.38, | |
| "learning_rate": 9.404388714733543e-07, | |
| "loss": 1.2201, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 35.0, | |
| "eval_accuracy": 0.47537138389366695, | |
| "eval_loss": 1.5978459119796753, | |
| "eval_runtime": 51.1056, | |
| "eval_samples_per_second": 100.107, | |
| "eval_steps_per_second": 3.131, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 35.94, | |
| "learning_rate": 8.463949843260187e-07, | |
| "loss": 1.1993, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 36.0, | |
| "eval_accuracy": 0.47556684910086006, | |
| "eval_loss": 1.5951019525527954, | |
| "eval_runtime": 51.3095, | |
| "eval_samples_per_second": 99.709, | |
| "eval_steps_per_second": 3.118, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 37.0, | |
| "eval_accuracy": 0.4734167318217357, | |
| "eval_loss": 1.58991539478302, | |
| "eval_runtime": 50.7545, | |
| "eval_samples_per_second": 100.799, | |
| "eval_steps_per_second": 3.152, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "learning_rate": 7.523510971786833e-07, | |
| "loss": 1.1829, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 38.0, | |
| "eval_accuracy": 0.4794761532447224, | |
| "eval_loss": 1.5782525539398193, | |
| "eval_runtime": 51.9408, | |
| "eval_samples_per_second": 98.497, | |
| "eval_steps_per_second": 3.08, | |
| "step": 12160 | |
| }, | |
| { | |
| "epoch": 39.0, | |
| "eval_accuracy": 0.48964034401876466, | |
| "eval_loss": 1.5660046339035034, | |
| "eval_runtime": 51.4816, | |
| "eval_samples_per_second": 99.375, | |
| "eval_steps_per_second": 3.108, | |
| "step": 12480 | |
| }, | |
| { | |
| "epoch": 39.06, | |
| "learning_rate": 6.58307210031348e-07, | |
| "loss": 1.1693, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "eval_accuracy": 0.4822126661454261, | |
| "eval_loss": 1.564785122871399, | |
| "eval_runtime": 51.6845, | |
| "eval_samples_per_second": 98.985, | |
| "eval_steps_per_second": 3.096, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 40.62, | |
| "learning_rate": 5.642633228840126e-07, | |
| "loss": 1.1526, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 41.0, | |
| "eval_accuracy": 0.48377638780297105, | |
| "eval_loss": 1.5616761445999146, | |
| "eval_runtime": 51.6796, | |
| "eval_samples_per_second": 98.995, | |
| "eval_steps_per_second": 3.096, | |
| "step": 13120 | |
| }, | |
| { | |
| "epoch": 42.0, | |
| "eval_accuracy": 0.4878811571540266, | |
| "eval_loss": 1.555199384689331, | |
| "eval_runtime": 51.0688, | |
| "eval_samples_per_second": 100.178, | |
| "eval_steps_per_second": 3.133, | |
| "step": 13440 | |
| }, | |
| { | |
| "epoch": 42.19, | |
| "learning_rate": 4.7021943573667715e-07, | |
| "loss": 1.1422, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 43.0, | |
| "eval_accuracy": 0.4923768569194683, | |
| "eval_loss": 1.5509822368621826, | |
| "eval_runtime": 51.2886, | |
| "eval_samples_per_second": 99.749, | |
| "eval_steps_per_second": 3.12, | |
| "step": 13760 | |
| }, | |
| { | |
| "epoch": 43.75, | |
| "learning_rate": 3.7617554858934167e-07, | |
| "loss": 1.1272, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 44.0, | |
| "eval_accuracy": 0.48631743549648165, | |
| "eval_loss": 1.5502736568450928, | |
| "eval_runtime": 50.8248, | |
| "eval_samples_per_second": 100.659, | |
| "eval_steps_per_second": 3.148, | |
| "step": 14080 | |
| }, | |
| { | |
| "epoch": 45.0, | |
| "eval_accuracy": 0.4880766223612197, | |
| "eval_loss": 1.5476186275482178, | |
| "eval_runtime": 50.5558, | |
| "eval_samples_per_second": 101.195, | |
| "eval_steps_per_second": 3.165, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 45.31, | |
| "learning_rate": 2.821316614420063e-07, | |
| "loss": 1.1256, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 46.0, | |
| "eval_accuracy": 0.48670836591086786, | |
| "eval_loss": 1.544900894165039, | |
| "eval_runtime": 50.2484, | |
| "eval_samples_per_second": 101.814, | |
| "eval_steps_per_second": 3.184, | |
| "step": 14720 | |
| }, | |
| { | |
| "epoch": 46.88, | |
| "learning_rate": 1.8808777429467083e-07, | |
| "loss": 1.1104, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 47.0, | |
| "eval_accuracy": 0.4904222048475371, | |
| "eval_loss": 1.5414071083068848, | |
| "eval_runtime": 50.6029, | |
| "eval_samples_per_second": 101.101, | |
| "eval_steps_per_second": 3.162, | |
| "step": 15040 | |
| }, | |
| { | |
| "epoch": 48.0, | |
| "eval_accuracy": 0.4880766223612197, | |
| "eval_loss": 1.542637586593628, | |
| "eval_runtime": 50.2557, | |
| "eval_samples_per_second": 101.799, | |
| "eval_steps_per_second": 3.184, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 48.44, | |
| "learning_rate": 9.404388714733542e-08, | |
| "loss": 1.1101, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 49.0, | |
| "eval_accuracy": 0.49276778733385457, | |
| "eval_loss": 1.5390384197235107, | |
| "eval_runtime": 50.5326, | |
| "eval_samples_per_second": 101.242, | |
| "eval_steps_per_second": 3.166, | |
| "step": 15680 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 16000, | |
| "num_train_epochs": 50, | |
| "save_steps": 500, | |
| "total_flos": 7.770344157480554e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |