| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.2738853503184713, | |
| "eval_steps": 10000000, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012738853503184714, | |
| "grad_norm": 10.01154032587468, | |
| "learning_rate": 6.369426751592357e-09, | |
| "loss": 2.08, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.025477707006369428, | |
| "grad_norm": 10.43878352794894, | |
| "learning_rate": 1.2738853503184714e-08, | |
| "loss": 2.1159, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03821656050955414, | |
| "grad_norm": 10.0764269789937, | |
| "learning_rate": 1.910828025477707e-08, | |
| "loss": 2.1204, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.050955414012738856, | |
| "grad_norm": 10.043355598415213, | |
| "learning_rate": 2.5477707006369427e-08, | |
| "loss": 2.0954, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06369426751592357, | |
| "grad_norm": 10.244967447296624, | |
| "learning_rate": 3.184713375796178e-08, | |
| "loss": 2.0804, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07643312101910828, | |
| "grad_norm": 10.14527585194019, | |
| "learning_rate": 3.821656050955414e-08, | |
| "loss": 2.1049, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08917197452229299, | |
| "grad_norm": 9.883474116215512, | |
| "learning_rate": 4.458598726114649e-08, | |
| "loss": 2.1009, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10191082802547771, | |
| "grad_norm": 9.270706746710415, | |
| "learning_rate": 5.0955414012738854e-08, | |
| "loss": 2.1041, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11464968152866242, | |
| "grad_norm": 9.06192405647939, | |
| "learning_rate": 5.732484076433121e-08, | |
| "loss": 2.0626, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12738853503184713, | |
| "grad_norm": 8.896865751544333, | |
| "learning_rate": 6.369426751592356e-08, | |
| "loss": 2.0593, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14012738853503184, | |
| "grad_norm": 8.638541440982276, | |
| "learning_rate": 7.006369426751591e-08, | |
| "loss": 2.0573, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15286624203821655, | |
| "grad_norm": 7.505407640336526, | |
| "learning_rate": 7.643312101910828e-08, | |
| "loss": 2.0353, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16560509554140126, | |
| "grad_norm": 6.964152070747885, | |
| "learning_rate": 8.280254777070063e-08, | |
| "loss": 2.0113, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.17834394904458598, | |
| "grad_norm": 6.658516340765218, | |
| "learning_rate": 8.917197452229298e-08, | |
| "loss": 1.9885, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1910828025477707, | |
| "grad_norm": 6.318692326956116, | |
| "learning_rate": 9.554140127388536e-08, | |
| "loss": 1.9513, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.20382165605095542, | |
| "grad_norm": 4.4333175994326215, | |
| "learning_rate": 1.0191082802547771e-07, | |
| "loss": 1.9259, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.21656050955414013, | |
| "grad_norm": 3.6473065214887908, | |
| "learning_rate": 1.0828025477707006e-07, | |
| "loss": 1.8455, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.22929936305732485, | |
| "grad_norm": 3.273364001891814, | |
| "learning_rate": 1.1464968152866242e-07, | |
| "loss": 1.8639, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.24203821656050956, | |
| "grad_norm": 3.1950224297351943, | |
| "learning_rate": 1.2101910828025477e-07, | |
| "loss": 1.8662, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.25477707006369427, | |
| "grad_norm": 2.89300939868462, | |
| "learning_rate": 1.2738853503184713e-07, | |
| "loss": 1.8149, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.267515923566879, | |
| "grad_norm": 2.670339707879458, | |
| "learning_rate": 1.3375796178343948e-07, | |
| "loss": 1.8152, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2802547770700637, | |
| "grad_norm": 2.5570002442574284, | |
| "learning_rate": 1.4012738853503183e-07, | |
| "loss": 1.832, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2929936305732484, | |
| "grad_norm": 2.5062438806523546, | |
| "learning_rate": 1.464968152866242e-07, | |
| "loss": 1.7855, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3057324840764331, | |
| "grad_norm": 2.4136494593464013, | |
| "learning_rate": 1.5286624203821656e-07, | |
| "loss": 1.7954, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3184713375796178, | |
| "grad_norm": 2.220307851357771, | |
| "learning_rate": 1.592356687898089e-07, | |
| "loss": 1.7762, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.33121019108280253, | |
| "grad_norm": 2.1415882059419262, | |
| "learning_rate": 1.6560509554140126e-07, | |
| "loss": 1.7047, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.34394904458598724, | |
| "grad_norm": 2.164045881946986, | |
| "learning_rate": 1.719745222929936e-07, | |
| "loss": 1.7433, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.35668789808917195, | |
| "grad_norm": 2.1910194750747483, | |
| "learning_rate": 1.7834394904458596e-07, | |
| "loss": 1.7479, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.36942675159235666, | |
| "grad_norm": 2.129814244357235, | |
| "learning_rate": 1.847133757961783e-07, | |
| "loss": 1.7411, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3821656050955414, | |
| "grad_norm": 2.0980565171537586, | |
| "learning_rate": 1.9108280254777072e-07, | |
| "loss": 1.6909, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.39490445859872614, | |
| "grad_norm": 2.0099440986076336, | |
| "learning_rate": 1.9745222929936307e-07, | |
| "loss": 1.7261, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.40764331210191085, | |
| "grad_norm": 2.008397161319659, | |
| "learning_rate": 2.0382165605095542e-07, | |
| "loss": 1.736, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.42038216560509556, | |
| "grad_norm": 2.203880972039718, | |
| "learning_rate": 2.1019108280254777e-07, | |
| "loss": 1.7602, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.43312101910828027, | |
| "grad_norm": 2.002701604696275, | |
| "learning_rate": 2.1656050955414012e-07, | |
| "loss": 1.7154, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.445859872611465, | |
| "grad_norm": 2.004891577949888, | |
| "learning_rate": 2.2292993630573247e-07, | |
| "loss": 1.7248, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4585987261146497, | |
| "grad_norm": 2.0724004487981325, | |
| "learning_rate": 2.2929936305732485e-07, | |
| "loss": 1.724, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4713375796178344, | |
| "grad_norm": 2.170001293532303, | |
| "learning_rate": 2.356687898089172e-07, | |
| "loss": 1.7465, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4840764331210191, | |
| "grad_norm": 2.151389641835933, | |
| "learning_rate": 2.4203821656050955e-07, | |
| "loss": 1.6897, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4968152866242038, | |
| "grad_norm": 2.043765579355119, | |
| "learning_rate": 2.484076433121019e-07, | |
| "loss": 1.7148, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5095541401273885, | |
| "grad_norm": 2.001502163707854, | |
| "learning_rate": 2.5477707006369425e-07, | |
| "loss": 1.6816, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5222929936305732, | |
| "grad_norm": 2.070687131301156, | |
| "learning_rate": 2.611464968152866e-07, | |
| "loss": 1.7384, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.535031847133758, | |
| "grad_norm": 2.078535697723987, | |
| "learning_rate": 2.6751592356687895e-07, | |
| "loss": 1.7157, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5477707006369427, | |
| "grad_norm": 2.0504732027468306, | |
| "learning_rate": 2.738853503184713e-07, | |
| "loss": 1.7236, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5605095541401274, | |
| "grad_norm": 1.9243473688738968, | |
| "learning_rate": 2.8025477707006366e-07, | |
| "loss": 1.6981, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5732484076433121, | |
| "grad_norm": 2.0137540217518746, | |
| "learning_rate": 2.86624203821656e-07, | |
| "loss": 1.7092, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5859872611464968, | |
| "grad_norm": 1.970528584468044, | |
| "learning_rate": 2.929936305732484e-07, | |
| "loss": 1.7061, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5987261146496815, | |
| "grad_norm": 2.020510080590342, | |
| "learning_rate": 2.9936305732484076e-07, | |
| "loss": 1.7118, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6114649681528662, | |
| "grad_norm": 1.9807559958332168, | |
| "learning_rate": 3.057324840764331e-07, | |
| "loss": 1.7166, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6242038216560509, | |
| "grad_norm": 2.0130000132347012, | |
| "learning_rate": 3.1210191082802546e-07, | |
| "loss": 1.7187, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6369426751592356, | |
| "grad_norm": 1.9740450831562035, | |
| "learning_rate": 3.184713375796178e-07, | |
| "loss": 1.6633, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6496815286624203, | |
| "grad_norm": 2.018669800087766, | |
| "learning_rate": 3.2484076433121017e-07, | |
| "loss": 1.7267, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6624203821656051, | |
| "grad_norm": 2.0177342281703745, | |
| "learning_rate": 3.312101910828025e-07, | |
| "loss": 1.6811, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6751592356687898, | |
| "grad_norm": 2.1295969636013052, | |
| "learning_rate": 3.3757961783439487e-07, | |
| "loss": 1.7181, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6878980891719745, | |
| "grad_norm": 1.935378883836435, | |
| "learning_rate": 3.439490445859872e-07, | |
| "loss": 1.666, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7006369426751592, | |
| "grad_norm": 1.94882774029977, | |
| "learning_rate": 3.5031847133757957e-07, | |
| "loss": 1.7072, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.7133757961783439, | |
| "grad_norm": 1.9785813421260308, | |
| "learning_rate": 3.566878980891719e-07, | |
| "loss": 1.6836, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7261146496815286, | |
| "grad_norm": 2.04000926459647, | |
| "learning_rate": 3.6305732484076427e-07, | |
| "loss": 1.656, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7388535031847133, | |
| "grad_norm": 2.0145241555781097, | |
| "learning_rate": 3.694267515923566e-07, | |
| "loss": 1.6792, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7515923566878981, | |
| "grad_norm": 2.0346257993007164, | |
| "learning_rate": 3.757961783439491e-07, | |
| "loss": 1.6507, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7643312101910829, | |
| "grad_norm": 2.000023840774598, | |
| "learning_rate": 3.8216560509554143e-07, | |
| "loss": 1.6941, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7770700636942676, | |
| "grad_norm": 2.2356336224424402, | |
| "learning_rate": 3.885350318471338e-07, | |
| "loss": 1.6793, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7898089171974523, | |
| "grad_norm": 1.9933208318481108, | |
| "learning_rate": 3.9490445859872613e-07, | |
| "loss": 1.7416, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.802547770700637, | |
| "grad_norm": 2.0861974568734825, | |
| "learning_rate": 4.012738853503185e-07, | |
| "loss": 1.6593, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.8152866242038217, | |
| "grad_norm": 2.0389226912624046, | |
| "learning_rate": 4.0764331210191083e-07, | |
| "loss": 1.6907, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8280254777070064, | |
| "grad_norm": 2.2759617725500285, | |
| "learning_rate": 4.140127388535032e-07, | |
| "loss": 1.7017, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8407643312101911, | |
| "grad_norm": 1.926440466187724, | |
| "learning_rate": 4.2038216560509554e-07, | |
| "loss": 1.6706, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8535031847133758, | |
| "grad_norm": 2.036852827159696, | |
| "learning_rate": 4.267515923566879e-07, | |
| "loss": 1.6981, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8662420382165605, | |
| "grad_norm": 2.1171214884510268, | |
| "learning_rate": 4.3312101910828024e-07, | |
| "loss": 1.7097, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.8789808917197452, | |
| "grad_norm": 2.1682391266061107, | |
| "learning_rate": 4.394904458598726e-07, | |
| "loss": 1.7079, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.89171974522293, | |
| "grad_norm": 2.157524011981069, | |
| "learning_rate": 4.4585987261146494e-07, | |
| "loss": 1.6683, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9044585987261147, | |
| "grad_norm": 2.09423156365316, | |
| "learning_rate": 4.522292993630573e-07, | |
| "loss": 1.6753, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.9171974522292994, | |
| "grad_norm": 1.9476157573306783, | |
| "learning_rate": 4.585987261146497e-07, | |
| "loss": 1.68, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9299363057324841, | |
| "grad_norm": 2.0665498851684316, | |
| "learning_rate": 4.6496815286624205e-07, | |
| "loss": 1.6991, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9426751592356688, | |
| "grad_norm": 2.1185088600751305, | |
| "learning_rate": 4.713375796178344e-07, | |
| "loss": 1.668, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9554140127388535, | |
| "grad_norm": 1.9624466435924417, | |
| "learning_rate": 4.777070063694267e-07, | |
| "loss": 1.647, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.9681528662420382, | |
| "grad_norm": 1.9360271581161757, | |
| "learning_rate": 4.840764331210191e-07, | |
| "loss": 1.695, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.9808917197452229, | |
| "grad_norm": 1.995133724334253, | |
| "learning_rate": 4.904458598726115e-07, | |
| "loss": 1.6757, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.9936305732484076, | |
| "grad_norm": 2.089378861662468, | |
| "learning_rate": 4.968152866242038e-07, | |
| "loss": 1.6874, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.0063694267515924, | |
| "grad_norm": 1.8801993726800705, | |
| "learning_rate": 5.031847133757962e-07, | |
| "loss": 1.6811, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.019108280254777, | |
| "grad_norm": 1.9395870207009438, | |
| "learning_rate": 5.095541401273885e-07, | |
| "loss": 1.6692, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.0318471337579618, | |
| "grad_norm": 1.9951105637278557, | |
| "learning_rate": 5.159235668789809e-07, | |
| "loss": 1.6759, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.0445859872611465, | |
| "grad_norm": 2.2736979342772288, | |
| "learning_rate": 5.222929936305732e-07, | |
| "loss": 1.6526, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.0573248407643312, | |
| "grad_norm": 2.08017275518694, | |
| "learning_rate": 5.286624203821656e-07, | |
| "loss": 1.6308, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.070063694267516, | |
| "grad_norm": 1.8093432330198045, | |
| "learning_rate": 5.350318471337579e-07, | |
| "loss": 1.65, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.0828025477707006, | |
| "grad_norm": 2.08057444682157, | |
| "learning_rate": 5.414012738853503e-07, | |
| "loss": 1.712, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.0955414012738853, | |
| "grad_norm": 2.588505198418171, | |
| "learning_rate": 5.477707006369426e-07, | |
| "loss": 1.6563, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.10828025477707, | |
| "grad_norm": 2.602856488321636, | |
| "learning_rate": 5.54140127388535e-07, | |
| "loss": 1.7162, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.1210191082802548, | |
| "grad_norm": 1.9495454270107675, | |
| "learning_rate": 5.605095541401273e-07, | |
| "loss": 1.6859, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.1337579617834395, | |
| "grad_norm": 2.1057807183083956, | |
| "learning_rate": 5.668789808917197e-07, | |
| "loss": 1.6598, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.1464968152866242, | |
| "grad_norm": 2.5461060920841754, | |
| "learning_rate": 5.73248407643312e-07, | |
| "loss": 1.6844, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.1592356687898089, | |
| "grad_norm": 2.0050220530121257, | |
| "learning_rate": 5.796178343949044e-07, | |
| "loss": 1.6777, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.1719745222929936, | |
| "grad_norm": 1.9094455034129314, | |
| "learning_rate": 5.859872611464968e-07, | |
| "loss": 1.6919, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.1847133757961783, | |
| "grad_norm": 2.187464439680315, | |
| "learning_rate": 5.923566878980892e-07, | |
| "loss": 1.65, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.197452229299363, | |
| "grad_norm": 2.1385664914236204, | |
| "learning_rate": 5.987261146496815e-07, | |
| "loss": 1.6744, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.2101910828025477, | |
| "grad_norm": 2.0228730577655365, | |
| "learning_rate": 6.050955414012739e-07, | |
| "loss": 1.6778, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.2229299363057324, | |
| "grad_norm": 1.933448273850287, | |
| "learning_rate": 6.114649681528662e-07, | |
| "loss": 1.6585, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.2356687898089171, | |
| "grad_norm": 2.2374188952022624, | |
| "learning_rate": 6.178343949044586e-07, | |
| "loss": 1.6639, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.2484076433121019, | |
| "grad_norm": 2.380167429039919, | |
| "learning_rate": 6.242038216560509e-07, | |
| "loss": 1.6448, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.2611464968152866, | |
| "grad_norm": 2.161629296789518, | |
| "learning_rate": 6.305732484076433e-07, | |
| "loss": 1.7149, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.2738853503184713, | |
| "grad_norm": 2.111853135785596, | |
| "learning_rate": 6.369426751592356e-07, | |
| "loss": 1.6252, | |
| "step": 1000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 15700, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 20, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 60278062055424.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |