| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "eval_steps": 500, | |
| "global_step": 2690, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03717472118959108, | |
| "grad_norm": 4.877419471740723, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.8539, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07434944237918216, | |
| "grad_norm": 5.054717540740967, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 0.7058, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.11152416356877323, | |
| "grad_norm": 2.2075462341308594, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.4233, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14869888475836432, | |
| "grad_norm": 2.1013667583465576, | |
| "learning_rate": 2.962962962962963e-05, | |
| "loss": 0.3267, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18587360594795538, | |
| "grad_norm": 2.8093650341033936, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.2644, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.22304832713754646, | |
| "grad_norm": 3.7132174968719482, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.2234, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.26022304832713755, | |
| "grad_norm": 31.805850982666016, | |
| "learning_rate": 5.185185185185185e-05, | |
| "loss": 0.2112, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.29739776951672864, | |
| "grad_norm": 2.9102275371551514, | |
| "learning_rate": 5.925925925925926e-05, | |
| "loss": 0.1907, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3345724907063197, | |
| "grad_norm": 2.2478322982788086, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 0.1921, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.37174721189591076, | |
| "grad_norm": 1.355301856994629, | |
| "learning_rate": 7.407407407407407e-05, | |
| "loss": 0.1638, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.40892193308550184, | |
| "grad_norm": 1.8597526550292969, | |
| "learning_rate": 8.148148148148148e-05, | |
| "loss": 0.1569, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44609665427509293, | |
| "grad_norm": 1.6024093627929688, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 0.1676, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.483271375464684, | |
| "grad_norm": 1.8739852905273438, | |
| "learning_rate": 9.62962962962963e-05, | |
| "loss": 0.1504, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5204460966542751, | |
| "grad_norm": 1.0192468166351318, | |
| "learning_rate": 9.999905507663936e-05, | |
| "loss": 0.136, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5576208178438662, | |
| "grad_norm": 1.430972933769226, | |
| "learning_rate": 9.9991495904044e-05, | |
| "loss": 0.1305, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5947955390334573, | |
| "grad_norm": 1.3280889987945557, | |
| "learning_rate": 9.997637870169672e-05, | |
| "loss": 0.1248, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6319702602230484, | |
| "grad_norm": 1.390118956565857, | |
| "learning_rate": 9.995370575511151e-05, | |
| "loss": 0.1359, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6691449814126395, | |
| "grad_norm": 2.348705530166626, | |
| "learning_rate": 9.99234804921275e-05, | |
| "loss": 0.1252, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7063197026022305, | |
| "grad_norm": 0.7154766917228699, | |
| "learning_rate": 9.988570748239062e-05, | |
| "loss": 0.1143, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7434944237918215, | |
| "grad_norm": 0.9727993011474609, | |
| "learning_rate": 9.984039243666283e-05, | |
| "loss": 0.1045, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7806691449814126, | |
| "grad_norm": 1.0658169984817505, | |
| "learning_rate": 9.978754220595861e-05, | |
| "loss": 0.114, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8178438661710037, | |
| "grad_norm": 0.9481009840965271, | |
| "learning_rate": 9.97271647805093e-05, | |
| "loss": 0.1051, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8550185873605948, | |
| "grad_norm": 0.8618531227111816, | |
| "learning_rate": 9.965926928855499e-05, | |
| "loss": 0.0996, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.8921933085501859, | |
| "grad_norm": 0.6774977445602417, | |
| "learning_rate": 9.95838659949645e-05, | |
| "loss": 0.102, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.929368029739777, | |
| "grad_norm": 0.7148956656455994, | |
| "learning_rate": 9.950096629968352e-05, | |
| "loss": 0.1072, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.966542750929368, | |
| "grad_norm": 0.9607800245285034, | |
| "learning_rate": 9.941058273601096e-05, | |
| "loss": 0.1025, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.003717472118959, | |
| "grad_norm": 0.6112203001976013, | |
| "learning_rate": 9.931272896870426e-05, | |
| "loss": 0.0927, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0408921933085502, | |
| "grad_norm": 0.9129474759101868, | |
| "learning_rate": 9.920741979191331e-05, | |
| "loss": 0.0905, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.0780669144981412, | |
| "grad_norm": 0.7284883856773376, | |
| "learning_rate": 9.909467112694384e-05, | |
| "loss": 0.0924, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1152416356877324, | |
| "grad_norm": 0.8254210948944092, | |
| "learning_rate": 9.897450001985039e-05, | |
| "loss": 0.0901, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.1524163568773234, | |
| "grad_norm": 0.9527810215950012, | |
| "learning_rate": 9.88469246388591e-05, | |
| "loss": 0.0862, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.1895910780669146, | |
| "grad_norm": 0.9555880427360535, | |
| "learning_rate": 9.871196427162092e-05, | |
| "loss": 0.0951, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.2267657992565055, | |
| "grad_norm": 0.511620044708252, | |
| "learning_rate": 9.85696393222957e-05, | |
| "loss": 0.0939, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.2639405204460967, | |
| "grad_norm": 1.2294408082962036, | |
| "learning_rate": 9.84199713084672e-05, | |
| "loss": 0.0862, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.3011152416356877, | |
| "grad_norm": 1.0503334999084473, | |
| "learning_rate": 9.826298285789002e-05, | |
| "loss": 0.0914, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.3382899628252787, | |
| "grad_norm": 0.6006482839584351, | |
| "learning_rate": 9.809869770506856e-05, | |
| "loss": 0.0897, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.3754646840148699, | |
| "grad_norm": 1.2222704887390137, | |
| "learning_rate": 9.792714068766872e-05, | |
| "loss": 0.0919, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.412639405204461, | |
| "grad_norm": 0.9911707043647766, | |
| "learning_rate": 9.774833774276278e-05, | |
| "loss": 0.0993, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.449814126394052, | |
| "grad_norm": 0.6083410978317261, | |
| "learning_rate": 9.7562315902908e-05, | |
| "loss": 0.0854, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.486988847583643, | |
| "grad_norm": 0.9849829077720642, | |
| "learning_rate": 9.736910329205978e-05, | |
| "loss": 0.0826, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.5241635687732342, | |
| "grad_norm": 0.4323747158050537, | |
| "learning_rate": 9.716872912131964e-05, | |
| "loss": 0.0866, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.5613382899628254, | |
| "grad_norm": 0.8285312056541443, | |
| "learning_rate": 9.696122368451886e-05, | |
| "loss": 0.082, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.5985130111524164, | |
| "grad_norm": 1.4286469221115112, | |
| "learning_rate": 9.674661835363858e-05, | |
| "loss": 0.0818, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.6356877323420074, | |
| "grad_norm": 0.5847893953323364, | |
| "learning_rate": 9.652494557406666e-05, | |
| "loss": 0.0735, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.6728624535315983, | |
| "grad_norm": 0.7277955412864685, | |
| "learning_rate": 9.62962388596925e-05, | |
| "loss": 0.0752, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.7100371747211895, | |
| "grad_norm": 0.5724025368690491, | |
| "learning_rate": 9.606053278784009e-05, | |
| "loss": 0.0689, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.7472118959107807, | |
| "grad_norm": 0.49029338359832764, | |
| "learning_rate": 9.581786299404045e-05, | |
| "loss": 0.0726, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.7843866171003717, | |
| "grad_norm": 0.5488936305046082, | |
| "learning_rate": 9.556826616664407e-05, | |
| "loss": 0.0748, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.8215613382899627, | |
| "grad_norm": 0.7589528560638428, | |
| "learning_rate": 9.531178004127403e-05, | |
| "loss": 0.0838, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.858736059479554, | |
| "grad_norm": 0.4553124010562897, | |
| "learning_rate": 9.504844339512095e-05, | |
| "loss": 0.0714, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.895910780669145, | |
| "grad_norm": 0.4341762959957123, | |
| "learning_rate": 9.477829604108044e-05, | |
| "loss": 0.0759, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.933085501858736, | |
| "grad_norm": 0.4884495139122009, | |
| "learning_rate": 9.450137882173384e-05, | |
| "loss": 0.0708, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.970260223048327, | |
| "grad_norm": 0.8140550851821899, | |
| "learning_rate": 9.421773360317347e-05, | |
| "loss": 0.0705, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.007434944237918, | |
| "grad_norm": 0.5736023187637329, | |
| "learning_rate": 9.392740326867304e-05, | |
| "loss": 0.0658, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.0446096654275094, | |
| "grad_norm": 0.6355158686637878, | |
| "learning_rate": 9.363043171220423e-05, | |
| "loss": 0.082, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.0817843866171004, | |
| "grad_norm": 0.6205140948295593, | |
| "learning_rate": 9.332686383180055e-05, | |
| "loss": 0.0695, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.1189591078066914, | |
| "grad_norm": 0.8051413297653198, | |
| "learning_rate": 9.301674552276942e-05, | |
| "loss": 0.0683, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.1561338289962824, | |
| "grad_norm": 0.5303524732589722, | |
| "learning_rate": 9.270012367075336e-05, | |
| "loss": 0.0656, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.193308550185874, | |
| "grad_norm": 0.6141722798347473, | |
| "learning_rate": 9.237704614464156e-05, | |
| "loss": 0.0717, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.2304832713754648, | |
| "grad_norm": 0.645879328250885, | |
| "learning_rate": 9.204756178933274e-05, | |
| "loss": 0.0742, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.2676579925650557, | |
| "grad_norm": 0.49008268117904663, | |
| "learning_rate": 9.17117204183505e-05, | |
| "loss": 0.0681, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.3048327137546467, | |
| "grad_norm": 0.6824775338172913, | |
| "learning_rate": 9.136957280631212e-05, | |
| "loss": 0.0651, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.3420074349442377, | |
| "grad_norm": 0.47334712743759155, | |
| "learning_rate": 9.102117068125226e-05, | |
| "loss": 0.0778, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.379182156133829, | |
| "grad_norm": 0.6936899423599243, | |
| "learning_rate": 9.06665667168023e-05, | |
| "loss": 0.0682, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.41635687732342, | |
| "grad_norm": 0.5970759391784668, | |
| "learning_rate": 9.030581452422679e-05, | |
| "loss": 0.0632, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.453531598513011, | |
| "grad_norm": 0.7608362436294556, | |
| "learning_rate": 8.993896864431826e-05, | |
| "loss": 0.0624, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.4907063197026025, | |
| "grad_norm": 0.5167849659919739, | |
| "learning_rate": 8.956608453915125e-05, | |
| "loss": 0.0624, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.5278810408921935, | |
| "grad_norm": 0.48926475644111633, | |
| "learning_rate": 8.918721858369738e-05, | |
| "loss": 0.0606, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.5650557620817844, | |
| "grad_norm": 0.7994228005409241, | |
| "learning_rate": 8.880242805730208e-05, | |
| "loss": 0.0627, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.6022304832713754, | |
| "grad_norm": 0.5655274391174316, | |
| "learning_rate": 8.841177113502482e-05, | |
| "loss": 0.0756, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.6394052044609664, | |
| "grad_norm": 0.5700337290763855, | |
| "learning_rate": 8.801530687884378e-05, | |
| "loss": 0.0664, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.6765799256505574, | |
| "grad_norm": 0.920259952545166, | |
| "learning_rate": 8.761309522872657e-05, | |
| "loss": 0.0756, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.7137546468401488, | |
| "grad_norm": 0.6414476633071899, | |
| "learning_rate": 8.720519699356804e-05, | |
| "loss": 0.0636, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.7509293680297398, | |
| "grad_norm": 0.48423320055007935, | |
| "learning_rate": 8.679167384199684e-05, | |
| "loss": 0.0596, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.7881040892193307, | |
| "grad_norm": 0.8158369660377502, | |
| "learning_rate": 8.637258829305199e-05, | |
| "loss": 0.0568, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.825278810408922, | |
| "grad_norm": 0.41953688859939575, | |
| "learning_rate": 8.594800370673083e-05, | |
| "loss": 0.0615, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.862453531598513, | |
| "grad_norm": 0.5318211317062378, | |
| "learning_rate": 8.551798427440986e-05, | |
| "loss": 0.0633, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.899628252788104, | |
| "grad_norm": 0.49588924646377563, | |
| "learning_rate": 8.50825950091399e-05, | |
| "loss": 0.0563, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.936802973977695, | |
| "grad_norm": 0.5286570191383362, | |
| "learning_rate": 8.464190173581699e-05, | |
| "loss": 0.0663, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.973977695167286, | |
| "grad_norm": 0.5693170428276062, | |
| "learning_rate": 8.419597108123054e-05, | |
| "loss": 0.0727, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.0111524163568775, | |
| "grad_norm": 0.4871514141559601, | |
| "learning_rate": 8.374487046399036e-05, | |
| "loss": 0.0678, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.0483271375464684, | |
| "grad_norm": 0.4753323197364807, | |
| "learning_rate": 8.328866808433378e-05, | |
| "loss": 0.0554, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.0855018587360594, | |
| "grad_norm": 0.6938982009887695, | |
| "learning_rate": 8.282743291381481e-05, | |
| "loss": 0.0597, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.1226765799256504, | |
| "grad_norm": 0.4666888415813446, | |
| "learning_rate": 8.236123468487648e-05, | |
| "loss": 0.0625, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.159851301115242, | |
| "grad_norm": 0.6246622800827026, | |
| "learning_rate": 8.189014388030833e-05, | |
| "loss": 0.0687, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.197026022304833, | |
| "grad_norm": 0.46097248792648315, | |
| "learning_rate": 8.141423172259038e-05, | |
| "loss": 0.0601, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.2342007434944238, | |
| "grad_norm": 0.698952317237854, | |
| "learning_rate": 8.093357016312517e-05, | |
| "loss": 0.0572, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.2713754646840147, | |
| "grad_norm": 0.39901071786880493, | |
| "learning_rate": 8.044823187135984e-05, | |
| "loss": 0.0577, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.3085501858736057, | |
| "grad_norm": 0.4128256142139435, | |
| "learning_rate": 7.995829022379936e-05, | |
| "loss": 0.0669, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.345724907063197, | |
| "grad_norm": 0.3928859829902649, | |
| "learning_rate": 7.94638192929131e-05, | |
| "loss": 0.0514, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.382899628252788, | |
| "grad_norm": 0.42870765924453735, | |
| "learning_rate": 7.896489383593606e-05, | |
| "loss": 0.0591, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.420074349442379, | |
| "grad_norm": 0.5093951225280762, | |
| "learning_rate": 7.84615892835666e-05, | |
| "loss": 0.0599, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.45724907063197, | |
| "grad_norm": 0.9360804557800293, | |
| "learning_rate": 7.795398172856233e-05, | |
| "loss": 0.0541, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.4944237918215615, | |
| "grad_norm": 0.6116214394569397, | |
| "learning_rate": 7.744214791423596e-05, | |
| "loss": 0.0572, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.5315985130111525, | |
| "grad_norm": 0.6456897854804993, | |
| "learning_rate": 7.692616522285278e-05, | |
| "loss": 0.0653, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.5687732342007434, | |
| "grad_norm": 0.5947780013084412, | |
| "learning_rate": 7.640611166393141e-05, | |
| "loss": 0.0627, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.6059479553903344, | |
| "grad_norm": 1.3824375867843628, | |
| "learning_rate": 7.588206586245002e-05, | |
| "loss": 0.061, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.6431226765799254, | |
| "grad_norm": 0.5360904932022095, | |
| "learning_rate": 7.535410704695906e-05, | |
| "loss": 0.049, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.680297397769517, | |
| "grad_norm": 0.47667422890663147, | |
| "learning_rate": 7.482231503760325e-05, | |
| "loss": 0.0674, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 3.717472118959108, | |
| "grad_norm": 0.9161400198936462, | |
| "learning_rate": 7.428677023405366e-05, | |
| "loss": 0.0567, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.7546468401486988, | |
| "grad_norm": 0.5789083242416382, | |
| "learning_rate": 7.374755360335253e-05, | |
| "loss": 0.0676, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.79182156133829, | |
| "grad_norm": 1.225460171699524, | |
| "learning_rate": 7.320474666767201e-05, | |
| "loss": 0.0568, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.828996282527881, | |
| "grad_norm": 0.6838318705558777, | |
| "learning_rate": 7.265843149198931e-05, | |
| "loss": 0.0577, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.866171003717472, | |
| "grad_norm": 0.6001195311546326, | |
| "learning_rate": 7.210869067167942e-05, | |
| "loss": 0.0575, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.903345724907063, | |
| "grad_norm": 0.3830925524234772, | |
| "learning_rate": 7.155560732002791e-05, | |
| "loss": 0.0536, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.940520446096654, | |
| "grad_norm": 0.41186103224754333, | |
| "learning_rate": 7.099926505566537e-05, | |
| "loss": 0.0584, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.9776951672862455, | |
| "grad_norm": 0.42513903975486755, | |
| "learning_rate": 7.043974798992532e-05, | |
| "loss": 0.0611, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.014869888475836, | |
| "grad_norm": 0.3477434515953064, | |
| "learning_rate": 6.98771407141278e-05, | |
| "loss": 0.0481, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.052044609665428, | |
| "grad_norm": 0.5160051584243774, | |
| "learning_rate": 6.931152828679033e-05, | |
| "loss": 0.0535, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.089219330855019, | |
| "grad_norm": 0.6074197888374329, | |
| "learning_rate": 6.874299622076816e-05, | |
| "loss": 0.0505, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.12639405204461, | |
| "grad_norm": 0.6713132262229919, | |
| "learning_rate": 6.817163047032598e-05, | |
| "loss": 0.0533, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.163568773234201, | |
| "grad_norm": 0.37607887387275696, | |
| "learning_rate": 6.759751741814271e-05, | |
| "loss": 0.0537, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.200743494423792, | |
| "grad_norm": 0.6844078302383423, | |
| "learning_rate": 6.702074386225175e-05, | |
| "loss": 0.061, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.237918215613383, | |
| "grad_norm": 0.46288058161735535, | |
| "learning_rate": 6.644139700291817e-05, | |
| "loss": 0.0432, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.275092936802974, | |
| "grad_norm": 0.35319188237190247, | |
| "learning_rate": 6.585956442945532e-05, | |
| "loss": 0.0499, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.312267657992565, | |
| "grad_norm": 0.47214651107788086, | |
| "learning_rate": 6.52753341069825e-05, | |
| "loss": 0.0537, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.349442379182156, | |
| "grad_norm": 0.3913021385669708, | |
| "learning_rate": 6.468879436312584e-05, | |
| "loss": 0.0549, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.386617100371748, | |
| "grad_norm": 0.5337435603141785, | |
| "learning_rate": 6.410003387466433e-05, | |
| "loss": 0.0497, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.4237918215613385, | |
| "grad_norm": 0.5121909976005554, | |
| "learning_rate": 6.35091416541232e-05, | |
| "loss": 0.0497, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.4609665427509295, | |
| "grad_norm": 0.5067112445831299, | |
| "learning_rate": 6.29162070363163e-05, | |
| "loss": 0.0504, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.4981412639405205, | |
| "grad_norm": 0.48435378074645996, | |
| "learning_rate": 6.232131966484006e-05, | |
| "loss": 0.0504, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.5353159851301115, | |
| "grad_norm": 0.4863722324371338, | |
| "learning_rate": 6.172456947852049e-05, | |
| "loss": 0.0494, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.5724907063197024, | |
| "grad_norm": 0.4769513010978699, | |
| "learning_rate": 6.112604669781572e-05, | |
| "loss": 0.0616, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.609665427509293, | |
| "grad_norm": 0.4153217077255249, | |
| "learning_rate": 6.052584181117589e-05, | |
| "loss": 0.0514, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 4.646840148698884, | |
| "grad_norm": 0.35905030369758606, | |
| "learning_rate": 5.9924045561362474e-05, | |
| "loss": 0.0482, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.684014869888475, | |
| "grad_norm": 0.35207000374794006, | |
| "learning_rate": 5.9320748931729344e-05, | |
| "loss": 0.0652, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 4.721189591078067, | |
| "grad_norm": 0.461542546749115, | |
| "learning_rate": 5.87160431324672e-05, | |
| "loss": 0.0461, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 4.758364312267658, | |
| "grad_norm": 0.47476083040237427, | |
| "learning_rate": 5.8110019586813946e-05, | |
| "loss": 0.0526, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 4.795539033457249, | |
| "grad_norm": 0.5205811262130737, | |
| "learning_rate": 5.7502769917232635e-05, | |
| "loss": 0.0474, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 4.83271375464684, | |
| "grad_norm": 0.36620450019836426, | |
| "learning_rate": 5.689438593155956e-05, | |
| "loss": 0.0405, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 4.869888475836431, | |
| "grad_norm": 0.4866809546947479, | |
| "learning_rate": 5.6284959609124e-05, | |
| "loss": 0.0407, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 4.907063197026022, | |
| "grad_norm": 0.4499811828136444, | |
| "learning_rate": 5.567458308684232e-05, | |
| "loss": 0.0555, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 4.944237918215613, | |
| "grad_norm": 0.49062612652778625, | |
| "learning_rate": 5.506334864528808e-05, | |
| "loss": 0.0502, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 4.981412639405205, | |
| "grad_norm": 0.28027549386024475, | |
| "learning_rate": 5.445134869474049e-05, | |
| "loss": 0.0538, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 5.018587360594796, | |
| "grad_norm": 0.4135720431804657, | |
| "learning_rate": 5.3838675761213244e-05, | |
| "loss": 0.0541, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 5.055762081784387, | |
| "grad_norm": 0.9072251319885254, | |
| "learning_rate": 5.3225422472465824e-05, | |
| "loss": 0.0553, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 5.092936802973978, | |
| "grad_norm": 0.3564969599246979, | |
| "learning_rate": 5.261168154399952e-05, | |
| "loss": 0.0507, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 5.130111524163569, | |
| "grad_norm": 0.47934380173683167, | |
| "learning_rate": 5.199754576504006e-05, | |
| "loss": 0.0529, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 5.16728624535316, | |
| "grad_norm": 0.5730791687965393, | |
| "learning_rate": 5.138310798450912e-05, | |
| "loss": 0.0508, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 5.204460966542751, | |
| "grad_norm": 0.4801822006702423, | |
| "learning_rate": 5.076846109698693e-05, | |
| "loss": 0.0484, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.241635687732342, | |
| "grad_norm": 0.5102419853210449, | |
| "learning_rate": 5.01536980286678e-05, | |
| "loss": 0.0479, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 5.278810408921933, | |
| "grad_norm": 0.4119831323623657, | |
| "learning_rate": 4.9538911723310976e-05, | |
| "loss": 0.0513, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 5.315985130111525, | |
| "grad_norm": 0.5217913389205933, | |
| "learning_rate": 4.892419512818889e-05, | |
| "loss": 0.046, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 5.353159851301116, | |
| "grad_norm": 0.5952945351600647, | |
| "learning_rate": 4.830964118003468e-05, | |
| "loss": 0.0461, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 5.390334572490707, | |
| "grad_norm": 0.412113755941391, | |
| "learning_rate": 4.769534279099152e-05, | |
| "loss": 0.0492, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 5.4275092936802976, | |
| "grad_norm": 0.4845413565635681, | |
| "learning_rate": 4.708139283456551e-05, | |
| "loss": 0.049, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 5.4646840148698885, | |
| "grad_norm": 0.33267199993133545, | |
| "learning_rate": 4.646788413158455e-05, | |
| "loss": 0.0466, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 5.5018587360594795, | |
| "grad_norm": 0.35786089301109314, | |
| "learning_rate": 4.585490943616504e-05, | |
| "loss": 0.0439, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 5.5390334572490705, | |
| "grad_norm": 0.6732335686683655, | |
| "learning_rate": 4.524256142168874e-05, | |
| "loss": 0.0529, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 5.5762081784386615, | |
| "grad_norm": 0.5137932896614075, | |
| "learning_rate": 4.463093266679185e-05, | |
| "loss": 0.0415, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.613382899628252, | |
| "grad_norm": 0.3910720646381378, | |
| "learning_rate": 4.402011564136836e-05, | |
| "loss": 0.0523, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 5.650557620817844, | |
| "grad_norm": 0.5063146948814392, | |
| "learning_rate": 4.341020269258987e-05, | |
| "loss": 0.0564, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 5.687732342007435, | |
| "grad_norm": 0.5695778131484985, | |
| "learning_rate": 4.2801286030943985e-05, | |
| "loss": 0.0551, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 5.724907063197026, | |
| "grad_norm": 0.5160698294639587, | |
| "learning_rate": 4.219345771629333e-05, | |
| "loss": 0.0468, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 5.762081784386617, | |
| "grad_norm": 0.38890841603279114, | |
| "learning_rate": 4.1586809643957345e-05, | |
| "loss": 0.042, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 5.799256505576208, | |
| "grad_norm": 0.6440842747688293, | |
| "learning_rate": 4.098143353081902e-05, | |
| "loss": 0.0448, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 5.836431226765799, | |
| "grad_norm": 0.37773260474205017, | |
| "learning_rate": 4.0377420901458506e-05, | |
| "loss": 0.0487, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 5.87360594795539, | |
| "grad_norm": 0.44040870666503906, | |
| "learning_rate": 3.977486307431589e-05, | |
| "loss": 0.0433, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 5.910780669144981, | |
| "grad_norm": 0.36806249618530273, | |
| "learning_rate": 3.9173851147885075e-05, | |
| "loss": 0.0398, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 5.947955390334572, | |
| "grad_norm": 0.49731066823005676, | |
| "learning_rate": 3.857447598694094e-05, | |
| "loss": 0.0396, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 5.985130111524164, | |
| "grad_norm": 0.5180796384811401, | |
| "learning_rate": 3.797682820880184e-05, | |
| "loss": 0.0461, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 6.022304832713755, | |
| "grad_norm": 0.34899625182151794, | |
| "learning_rate": 3.7380998169629476e-05, | |
| "loss": 0.0433, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 6.059479553903346, | |
| "grad_norm": 0.32863014936447144, | |
| "learning_rate": 3.678707595076834e-05, | |
| "loss": 0.0396, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 6.096654275092937, | |
| "grad_norm": 0.5975285768508911, | |
| "learning_rate": 3.619515134512656e-05, | |
| "loss": 0.0418, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 6.133828996282528, | |
| "grad_norm": 0.6041769981384277, | |
| "learning_rate": 3.560531384360055e-05, | |
| "loss": 0.0373, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 6.171003717472119, | |
| "grad_norm": 0.5462443828582764, | |
| "learning_rate": 3.501765262154513e-05, | |
| "loss": 0.0455, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 6.20817843866171, | |
| "grad_norm": 0.4584652781486511, | |
| "learning_rate": 3.4432256525291465e-05, | |
| "loss": 0.0424, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 6.245353159851301, | |
| "grad_norm": 1.1305296421051025, | |
| "learning_rate": 3.3849214058714704e-05, | |
| "loss": 0.0463, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 6.282527881040892, | |
| "grad_norm": 0.4728774428367615, | |
| "learning_rate": 3.326861336985341e-05, | |
| "loss": 0.0422, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 6.319702602230484, | |
| "grad_norm": 0.347769558429718, | |
| "learning_rate": 3.269054223758279e-05, | |
| "loss": 0.042, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 6.356877323420075, | |
| "grad_norm": 0.4270273447036743, | |
| "learning_rate": 3.2115088058343725e-05, | |
| "loss": 0.046, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 6.394052044609666, | |
| "grad_norm": 0.46025773882865906, | |
| "learning_rate": 3.1542337832929644e-05, | |
| "loss": 0.0452, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 6.431226765799257, | |
| "grad_norm": 0.5107239484786987, | |
| "learning_rate": 3.0972378153333145e-05, | |
| "loss": 0.0416, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 6.4684014869888475, | |
| "grad_norm": 0.309860497713089, | |
| "learning_rate": 3.0405295189654537e-05, | |
| "loss": 0.0394, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 6.5055762081784385, | |
| "grad_norm": 0.33350470662117004, | |
| "learning_rate": 2.9841174677074035e-05, | |
| "loss": 0.043, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 6.5427509293680295, | |
| "grad_norm": 0.42869654297828674, | |
| "learning_rate": 2.9280101902889824e-05, | |
| "loss": 0.0446, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 6.5799256505576205, | |
| "grad_norm": 0.2736971974372864, | |
| "learning_rate": 2.8722161693623772e-05, | |
| "loss": 0.0393, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 6.617100371747211, | |
| "grad_norm": 0.4107191562652588, | |
| "learning_rate": 2.8167438402196805e-05, | |
| "loss": 0.0412, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 6.654275092936803, | |
| "grad_norm": 0.5272426009178162, | |
| "learning_rate": 2.761601589517595e-05, | |
| "loss": 0.0502, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 6.691449814126394, | |
| "grad_norm": 0.48449358344078064, | |
| "learning_rate": 2.706797754009476e-05, | |
| "loss": 0.0416, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 6.728624535315985, | |
| "grad_norm": 0.46288979053497314, | |
| "learning_rate": 2.6523406192849386e-05, | |
| "loss": 0.0431, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 6.765799256505576, | |
| "grad_norm": 0.37995341420173645, | |
| "learning_rate": 2.5982384185171906e-05, | |
| "loss": 0.0365, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 6.802973977695167, | |
| "grad_norm": 0.3601038157939911, | |
| "learning_rate": 2.544499331218274e-05, | |
| "loss": 0.0523, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 6.840148698884758, | |
| "grad_norm": 0.3422638177871704, | |
| "learning_rate": 2.4911314820024568e-05, | |
| "loss": 0.0398, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 6.877323420074349, | |
| "grad_norm": 0.4652217626571655, | |
| "learning_rate": 2.438142939357882e-05, | |
| "loss": 0.0384, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 6.91449814126394, | |
| "grad_norm": 0.4344288408756256, | |
| "learning_rate": 2.38554171442674e-05, | |
| "loss": 0.0358, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 6.951672862453531, | |
| "grad_norm": 0.550430953502655, | |
| "learning_rate": 2.3333357597940793e-05, | |
| "loss": 0.045, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 6.988847583643123, | |
| "grad_norm": 0.5860105156898499, | |
| "learning_rate": 2.281532968285494e-05, | |
| "loss": 0.0425, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 7.026022304832714, | |
| "grad_norm": 0.3104412257671356, | |
| "learning_rate": 2.230141171773836e-05, | |
| "loss": 0.0377, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 7.063197026022305, | |
| "grad_norm": 0.629639744758606, | |
| "learning_rate": 2.179168139995134e-05, | |
| "loss": 0.0398, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 7.100371747211896, | |
| "grad_norm": 0.280269980430603, | |
| "learning_rate": 2.1286215793739302e-05, | |
| "loss": 0.0337, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 7.137546468401487, | |
| "grad_norm": 1.1272166967391968, | |
| "learning_rate": 2.0785091318581577e-05, | |
| "loss": 0.0409, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 7.174721189591078, | |
| "grad_norm": 0.33485621213912964, | |
| "learning_rate": 2.0288383737638006e-05, | |
| "loss": 0.0403, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 7.211895910780669, | |
| "grad_norm": 0.6893181204795837, | |
| "learning_rate": 1.9796168146294412e-05, | |
| "loss": 0.0428, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 7.24907063197026, | |
| "grad_norm": 1.7540059089660645, | |
| "learning_rate": 1.9308518960809353e-05, | |
| "loss": 0.0365, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 7.286245353159852, | |
| "grad_norm": 0.6964354515075684, | |
| "learning_rate": 1.8825509907063327e-05, | |
| "loss": 0.0413, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 7.323420074349443, | |
| "grad_norm": 0.5227733850479126, | |
| "learning_rate": 1.83472140094124e-05, | |
| "loss": 0.0356, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 7.360594795539034, | |
| "grad_norm": 0.3736005127429962, | |
| "learning_rate": 1.7873703579648034e-05, | |
| "loss": 0.0371, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 7.397769516728625, | |
| "grad_norm": 0.6643156409263611, | |
| "learning_rate": 1.7405050206064373e-05, | |
| "loss": 0.0412, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 7.434944237918216, | |
| "grad_norm": 0.34871557354927063, | |
| "learning_rate": 1.694132474263526e-05, | |
| "loss": 0.0469, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.4721189591078065, | |
| "grad_norm": 0.44656774401664734, | |
| "learning_rate": 1.6482597298301916e-05, | |
| "loss": 0.0352, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 7.5092936802973975, | |
| "grad_norm": 0.28243181109428406, | |
| "learning_rate": 1.6028937226373536e-05, | |
| "loss": 0.0387, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 7.5464684014869885, | |
| "grad_norm": 0.2621857225894928, | |
| "learning_rate": 1.5580413114042e-05, | |
| "loss": 0.0403, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 7.58364312267658, | |
| "grad_norm": 0.3578774929046631, | |
| "learning_rate": 1.5137092772012368e-05, | |
| "loss": 0.039, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 7.620817843866171, | |
| "grad_norm": 0.41617679595947266, | |
| "learning_rate": 1.469904322425092e-05, | |
| "loss": 0.0414, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 7.657992565055762, | |
| "grad_norm": 0.44881588220596313, | |
| "learning_rate": 1.4266330697851954e-05, | |
| "loss": 0.0386, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 7.695167286245353, | |
| "grad_norm": 0.4774656295776367, | |
| "learning_rate": 1.3839020613025266e-05, | |
| "loss": 0.0394, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 7.732342007434944, | |
| "grad_norm": 0.3506743609905243, | |
| "learning_rate": 1.3417177573205398e-05, | |
| "loss": 0.044, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 7.769516728624535, | |
| "grad_norm": 0.35965225100517273, | |
| "learning_rate": 1.3000865355284563e-05, | |
| "loss": 0.0377, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 7.806691449814126, | |
| "grad_norm": 0.3799026906490326, | |
| "learning_rate": 1.2590146899970446e-05, | |
| "loss": 0.0359, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 7.843866171003717, | |
| "grad_norm": 0.38570570945739746, | |
| "learning_rate": 1.218508430227035e-05, | |
| "loss": 0.038, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 7.881040892193308, | |
| "grad_norm": 0.4911622107028961, | |
| "learning_rate": 1.1785738802103396e-05, | |
| "loss": 0.0373, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 7.9182156133829, | |
| "grad_norm": 0.8531225919723511, | |
| "learning_rate": 1.1392170775041788e-05, | |
| "loss": 0.0386, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 7.955390334572491, | |
| "grad_norm": 0.41892483830451965, | |
| "learning_rate": 1.1004439723182942e-05, | |
| "loss": 0.0421, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 7.992565055762082, | |
| "grad_norm": 0.45428428053855896, | |
| "learning_rate": 1.0622604266153485e-05, | |
| "loss": 0.041, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 8.029739776951672, | |
| "grad_norm": 0.30198222398757935, | |
| "learning_rate": 1.0246722132246856e-05, | |
| "loss": 0.0366, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 8.066914498141264, | |
| "grad_norm": 0.3173958659172058, | |
| "learning_rate": 9.876850149695555e-06, | |
| "loss": 0.0341, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 8.104089219330856, | |
| "grad_norm": 0.4124772548675537, | |
| "learning_rate": 9.513044238079427e-06, | |
| "loss": 0.034, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 8.141263940520446, | |
| "grad_norm": 0.2519373893737793, | |
| "learning_rate": 9.15535939987151e-06, | |
| "loss": 0.0313, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 8.178438661710038, | |
| "grad_norm": 0.41054391860961914, | |
| "learning_rate": 8.803849712122292e-06, | |
| "loss": 0.0371, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 8.215613382899628, | |
| "grad_norm": 0.4737650454044342, | |
| "learning_rate": 8.458568318284088e-06, | |
| "loss": 0.0427, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 8.25278810408922, | |
| "grad_norm": 0.2717382609844208, | |
| "learning_rate": 8.11956742017641e-06, | |
| "loss": 0.0354, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 8.28996282527881, | |
| "grad_norm": 0.3701151907444, | |
| "learning_rate": 7.786898270093846e-06, | |
| "loss": 0.0347, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 8.327137546468402, | |
| "grad_norm": 0.3946413993835449, | |
| "learning_rate": 7.460611163057346e-06, | |
| "loss": 0.0314, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 8.364312267657992, | |
| "grad_norm": 0.4067074656486511, | |
| "learning_rate": 7.140755429210316e-06, | |
| "loss": 0.0365, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 8.401486988847584, | |
| "grad_norm": 0.2950913906097412, | |
| "learning_rate": 6.827379426360614e-06, | |
| "loss": 0.0407, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 8.438661710037175, | |
| "grad_norm": 0.22738321125507355, | |
| "learning_rate": 6.52053053266945e-06, | |
| "loss": 0.0359, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 8.475836431226766, | |
| "grad_norm": 0.4955553114414215, | |
| "learning_rate": 6.22025513948854e-06, | |
| "loss": 0.033, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 8.513011152416357, | |
| "grad_norm": 0.31211531162261963, | |
| "learning_rate": 5.926598644346259e-06, | |
| "loss": 0.0378, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 8.550185873605948, | |
| "grad_norm": 0.28172406554222107, | |
| "learning_rate": 5.639605444084273e-06, | |
| "loss": 0.0358, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 8.58736059479554, | |
| "grad_norm": 0.5845323204994202, | |
| "learning_rate": 5.3593189281452625e-06, | |
| "loss": 0.0382, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 8.62453531598513, | |
| "grad_norm": 0.27726075053215027, | |
| "learning_rate": 5.085781472013051e-06, | |
| "loss": 0.0326, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 8.661710037174721, | |
| "grad_norm": 0.504346489906311, | |
| "learning_rate": 4.819034430806046e-06, | |
| "loss": 0.0297, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 8.698884758364311, | |
| "grad_norm": 0.23243720829486847, | |
| "learning_rate": 4.559118133024853e-06, | |
| "loss": 0.0405, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 8.736059479553903, | |
| "grad_norm": 0.5230311155319214, | |
| "learning_rate": 4.3060718744552256e-06, | |
| "loss": 0.0443, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 8.773234200743495, | |
| "grad_norm": 0.38711124658584595, | |
| "learning_rate": 4.05993391222701e-06, | |
| "loss": 0.0382, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 8.810408921933085, | |
| "grad_norm": 0.49542826414108276, | |
| "learning_rate": 3.820741459030253e-06, | |
| "loss": 0.0324, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 8.847583643122677, | |
| "grad_norm": 0.41099298000335693, | |
| "learning_rate": 3.5885306774891215e-06, | |
| "loss": 0.0302, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 8.884758364312267, | |
| "grad_norm": 0.2901303768157959, | |
| "learning_rate": 3.3633366746946004e-06, | |
| "loss": 0.0328, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 8.921933085501859, | |
| "grad_norm": 0.3307854235172272, | |
| "learning_rate": 3.145193496896809e-06, | |
| "loss": 0.0385, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 8.95910780669145, | |
| "grad_norm": 0.5515095591545105, | |
| "learning_rate": 2.934134124357646e-06, | |
| "loss": 0.0386, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 8.996282527881041, | |
| "grad_norm": 0.3325430154800415, | |
| "learning_rate": 2.7301904663646516e-06, | |
| "loss": 0.0418, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 9.033457249070633, | |
| "grad_norm": 0.38487276434898376, | |
| "learning_rate": 2.5333933564067492e-06, | |
| "loss": 0.0466, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 9.070631970260223, | |
| "grad_norm": 0.4031588137149811, | |
| "learning_rate": 2.343772547512613e-06, | |
| "loss": 0.0299, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 9.107806691449815, | |
| "grad_norm": 0.4202631115913391, | |
| "learning_rate": 2.1613567077524875e-06, | |
| "loss": 0.0289, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 9.144981412639405, | |
| "grad_norm": 0.3162038028240204, | |
| "learning_rate": 1.9861734159038968e-06, | |
| "loss": 0.0297, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 9.182156133828997, | |
| "grad_norm": 0.37053269147872925, | |
| "learning_rate": 1.8182491572821536e-06, | |
| "loss": 0.03, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 9.219330855018587, | |
| "grad_norm": 0.36643552780151367, | |
| "learning_rate": 1.6576093197361253e-06, | |
| "loss": 0.0356, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 9.256505576208179, | |
| "grad_norm": 0.4086417257785797, | |
| "learning_rate": 1.5042781898099434e-06, | |
| "loss": 0.0296, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 9.293680297397769, | |
| "grad_norm": 0.9764392971992493, | |
| "learning_rate": 1.3582789490712179e-06, | |
| "loss": 0.03, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 9.33085501858736, | |
| "grad_norm": 0.24946707487106323, | |
| "learning_rate": 1.2196336706062739e-06, | |
| "loss": 0.0342, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 9.368029739776953, | |
| "grad_norm": 0.3657890856266022, | |
| "learning_rate": 1.0883633156830553e-06, | |
| "loss": 0.0294, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 9.405204460966543, | |
| "grad_norm": 0.34119731187820435, | |
| "learning_rate": 9.644877305819976e-07, | |
| "loss": 0.034, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 9.442379182156134, | |
| "grad_norm": 0.5186340808868408, | |
| "learning_rate": 8.480256435956124e-07, | |
| "loss": 0.0355, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 9.479553903345725, | |
| "grad_norm": 0.790932834148407, | |
| "learning_rate": 7.389946621969679e-07, | |
| "loss": 0.0409, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 9.516728624535316, | |
| "grad_norm": 0.3314131796360016, | |
| "learning_rate": 6.374112703777301e-07, | |
| "loss": 0.0321, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 9.553903345724907, | |
| "grad_norm": 0.6170405149459839, | |
| "learning_rate": 5.432908261559733e-07, | |
| "loss": 0.0352, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 9.591078066914498, | |
| "grad_norm": 0.30839791893959045, | |
| "learning_rate": 4.5664755925426403e-07, | |
| "loss": 0.0283, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 9.628252788104088, | |
| "grad_norm": 0.2792086899280548, | |
| "learning_rate": 3.7749456894834447e-07, | |
| "loss": 0.035, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 9.66542750929368, | |
| "grad_norm": 0.3965803384780884, | |
| "learning_rate": 3.058438220866544e-07, | |
| "loss": 0.0324, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 9.702602230483272, | |
| "grad_norm": 0.3901006877422333, | |
| "learning_rate": 2.4170615128117357e-07, | |
| "loss": 0.0323, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 9.739776951672862, | |
| "grad_norm": 0.32952436804771423, | |
| "learning_rate": 1.850912532696092e-07, | |
| "loss": 0.0281, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 9.776951672862454, | |
| "grad_norm": 0.2635156214237213, | |
| "learning_rate": 1.3600768744944647e-07, | |
| "loss": 0.0282, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 9.814126394052044, | |
| "grad_norm": 0.1897970736026764, | |
| "learning_rate": 9.446287458383385e-08, | |
| "loss": 0.0388, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 9.851301115241636, | |
| "grad_norm": 0.3042493462562561, | |
| "learning_rate": 6.046309567968588e-08, | |
| "loss": 0.0316, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 9.888475836431226, | |
| "grad_norm": 0.42293256521224976, | |
| "learning_rate": 3.4013491038087375e-08, | |
| "loss": 0.0299, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 9.925650557620818, | |
| "grad_norm": 0.7281969785690308, | |
| "learning_rate": 1.511805947714273e-08, | |
| "loss": 0.0412, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 9.962825278810408, | |
| "grad_norm": 0.3139171004295349, | |
| "learning_rate": 3.7796577274096245e-09, | |
| "loss": 0.0341, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.9909800887107849, | |
| "learning_rate": 0.0, | |
| "loss": 0.0364, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 2690, | |
| "total_flos": 2.2056377044963104e+17, | |
| "train_loss": 0.06680747533154753, | |
| "train_runtime": 2132.8764, | |
| "train_samples_per_second": 33.978, | |
| "train_steps_per_second": 1.261 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2690, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 10000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.2056377044963104e+17, | |
| "train_batch_size": 27, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |