|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9834710743801653, |
|
"eval_steps": 500, |
|
"global_step": 900, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0022038567493112946, |
|
"grad_norm": 53.35355576018126, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 3.7938, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004407713498622589, |
|
"grad_norm": 56.29807371103618, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 3.9718, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006611570247933884, |
|
"grad_norm": 61.23476417502119, |
|
"learning_rate": 1.8e-07, |
|
"loss": 4.0815, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008815426997245178, |
|
"grad_norm": 57.23654746894239, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 4.0256, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011019283746556474, |
|
"grad_norm": 53.79273733579391, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 3.9356, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013223140495867768, |
|
"grad_norm": 54.1365703275303, |
|
"learning_rate": 3.6e-07, |
|
"loss": 4.0175, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.015426997245179064, |
|
"grad_norm": 59.57834616370979, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 4.0324, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.017630853994490357, |
|
"grad_norm": 52.92978303398463, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 3.9872, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.019834710743801654, |
|
"grad_norm": 53.00990339068272, |
|
"learning_rate": 5.4e-07, |
|
"loss": 3.9189, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02203856749311295, |
|
"grad_norm": 49.29917728226077, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 3.8985, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.024242424242424242, |
|
"grad_norm": 51.04476681098937, |
|
"learning_rate": 6.6e-07, |
|
"loss": 4.0225, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.026446280991735537, |
|
"grad_norm": 48.910852181006604, |
|
"learning_rate": 7.2e-07, |
|
"loss": 3.8215, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02865013774104683, |
|
"grad_norm": 47.57420311673295, |
|
"learning_rate": 7.8e-07, |
|
"loss": 3.8253, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03085399449035813, |
|
"grad_norm": 48.04076110975242, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 4.0329, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03305785123966942, |
|
"grad_norm": 45.10942040856652, |
|
"learning_rate": 9e-07, |
|
"loss": 3.6307, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03526170798898071, |
|
"grad_norm": 43.52414046751455, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 3.5453, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03746556473829201, |
|
"grad_norm": 42.85848752134229, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 3.6339, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03966942148760331, |
|
"grad_norm": 33.19526904841194, |
|
"learning_rate": 1.08e-06, |
|
"loss": 3.4207, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0418732782369146, |
|
"grad_norm": 31.873243117941165, |
|
"learning_rate": 1.14e-06, |
|
"loss": 3.191, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0440771349862259, |
|
"grad_norm": 29.808709603971984, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 3.2128, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04628099173553719, |
|
"grad_norm": 31.51951976510752, |
|
"learning_rate": 1.26e-06, |
|
"loss": 3.3406, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.048484848484848485, |
|
"grad_norm": 27.823761692798925, |
|
"learning_rate": 1.32e-06, |
|
"loss": 3.2884, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05068870523415978, |
|
"grad_norm": 26.429345221994904, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 3.0015, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05289256198347107, |
|
"grad_norm": 18.155827199892627, |
|
"learning_rate": 1.44e-06, |
|
"loss": 2.892, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05509641873278237, |
|
"grad_norm": 13.907874127382469, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.6402, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05730027548209366, |
|
"grad_norm": 13.424593330905749, |
|
"learning_rate": 1.56e-06, |
|
"loss": 2.7094, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05950413223140496, |
|
"grad_norm": 13.854259069645929, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 2.8064, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06170798898071626, |
|
"grad_norm": 11.350285228526166, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 2.7105, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06391184573002755, |
|
"grad_norm": 11.129757770703522, |
|
"learning_rate": 1.7399999999999999e-06, |
|
"loss": 2.6137, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06611570247933884, |
|
"grad_norm": 10.863006958008057, |
|
"learning_rate": 1.8e-06, |
|
"loss": 2.6008, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06831955922865014, |
|
"grad_norm": 9.805882809079606, |
|
"learning_rate": 1.86e-06, |
|
"loss": 2.5766, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07052341597796143, |
|
"grad_norm": 11.265746488422517, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 2.5667, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 9.025293134976716, |
|
"learning_rate": 1.98e-06, |
|
"loss": 2.4655, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07493112947658402, |
|
"grad_norm": 11.70511117627225, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 2.5067, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07713498622589532, |
|
"grad_norm": 10.81705842291705, |
|
"learning_rate": 2.1e-06, |
|
"loss": 2.2768, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07933884297520662, |
|
"grad_norm": 8.888943224142057, |
|
"learning_rate": 2.16e-06, |
|
"loss": 2.314, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0815426997245179, |
|
"grad_norm": 10.751954789293142, |
|
"learning_rate": 2.22e-06, |
|
"loss": 2.558, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0837465564738292, |
|
"grad_norm": 9.989373040049472, |
|
"learning_rate": 2.28e-06, |
|
"loss": 2.1834, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0859504132231405, |
|
"grad_norm": 11.453837389192644, |
|
"learning_rate": 2.34e-06, |
|
"loss": 2.3084, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0881542699724518, |
|
"grad_norm": 14.679296883424508, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.3127, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09035812672176309, |
|
"grad_norm": 19.06017003733088, |
|
"learning_rate": 2.4599999999999997e-06, |
|
"loss": 2.1885, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09256198347107437, |
|
"grad_norm": 25.283284564533062, |
|
"learning_rate": 2.52e-06, |
|
"loss": 2.1116, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09476584022038567, |
|
"grad_norm": 25.192909931447222, |
|
"learning_rate": 2.58e-06, |
|
"loss": 1.9283, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09696969696969697, |
|
"grad_norm": 17.926763299091263, |
|
"learning_rate": 2.64e-06, |
|
"loss": 1.8125, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09917355371900827, |
|
"grad_norm": 15.969502217016815, |
|
"learning_rate": 2.7e-06, |
|
"loss": 1.8421, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10137741046831956, |
|
"grad_norm": 13.673145143593812, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 1.7697, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10358126721763085, |
|
"grad_norm": 15.24764944924423, |
|
"learning_rate": 2.82e-06, |
|
"loss": 1.8159, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10578512396694215, |
|
"grad_norm": 15.912109474132139, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.7813, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.10798898071625344, |
|
"grad_norm": 17.737822555539395, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 1.6972, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11019283746556474, |
|
"grad_norm": 14.271394993258342, |
|
"learning_rate": 3e-06, |
|
"loss": 1.6323, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11239669421487604, |
|
"grad_norm": 17.453281450383354, |
|
"learning_rate": 2.9999996311880685e-06, |
|
"loss": 1.6936, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11460055096418732, |
|
"grad_norm": 18.936110023489377, |
|
"learning_rate": 2.999998524752454e-06, |
|
"loss": 1.5923, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11680440771349862, |
|
"grad_norm": 17.54586534165411, |
|
"learning_rate": 2.9999966806937017e-06, |
|
"loss": 1.4402, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11900826446280992, |
|
"grad_norm": 19.941262856339648, |
|
"learning_rate": 2.9999940990127173e-06, |
|
"loss": 1.6165, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 16.195665957935688, |
|
"learning_rate": 2.9999907797107714e-06, |
|
"loss": 1.6445, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12341597796143251, |
|
"grad_norm": 17.906162449636255, |
|
"learning_rate": 2.9999867227894957e-06, |
|
"loss": 1.4222, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1256198347107438, |
|
"grad_norm": 14.766409204808326, |
|
"learning_rate": 2.9999819282508854e-06, |
|
"loss": 1.5734, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1278236914600551, |
|
"grad_norm": 15.330559102455767, |
|
"learning_rate": 2.999976396097298e-06, |
|
"loss": 1.3955, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13002754820936638, |
|
"grad_norm": 17.671499530692422, |
|
"learning_rate": 2.999970126331454e-06, |
|
"loss": 1.3905, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1322314049586777, |
|
"grad_norm": 16.161094239545125, |
|
"learning_rate": 2.999963118956437e-06, |
|
"loss": 1.4074, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13443526170798897, |
|
"grad_norm": 14.463604561427449, |
|
"learning_rate": 2.999955373975692e-06, |
|
"loss": 1.3949, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13663911845730028, |
|
"grad_norm": 22.599162124810228, |
|
"learning_rate": 2.9999468913930286e-06, |
|
"loss": 1.2523, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13884297520661157, |
|
"grad_norm": 15.721083306967905, |
|
"learning_rate": 2.999937671212617e-06, |
|
"loss": 1.2407, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.14104683195592285, |
|
"grad_norm": 22.450444426883404, |
|
"learning_rate": 2.999927713438992e-06, |
|
"loss": 1.31, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14325068870523416, |
|
"grad_norm": 13.111698223582062, |
|
"learning_rate": 2.9999170180770503e-06, |
|
"loss": 1.3233, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 17.736015434280038, |
|
"learning_rate": 2.999905585132051e-06, |
|
"loss": 1.2608, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14765840220385676, |
|
"grad_norm": 13.247075058563684, |
|
"learning_rate": 2.9998934146096163e-06, |
|
"loss": 1.226, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14986225895316804, |
|
"grad_norm": 16.093995265042583, |
|
"learning_rate": 2.9998805065157313e-06, |
|
"loss": 1.1238, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15206611570247933, |
|
"grad_norm": 11.789269283624224, |
|
"learning_rate": 2.9998668608567433e-06, |
|
"loss": 1.2928, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15426997245179064, |
|
"grad_norm": 12.799525613499874, |
|
"learning_rate": 2.9998524776393626e-06, |
|
"loss": 1.1069, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15647382920110192, |
|
"grad_norm": 17.988308354823435, |
|
"learning_rate": 2.9998373568706622e-06, |
|
"loss": 1.0719, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15867768595041323, |
|
"grad_norm": 15.73635047176777, |
|
"learning_rate": 2.9998214985580777e-06, |
|
"loss": 1.2518, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.16088154269972452, |
|
"grad_norm": 14.654557879657558, |
|
"learning_rate": 2.9998049027094073e-06, |
|
"loss": 1.0366, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1630853994490358, |
|
"grad_norm": 10.392700397421045, |
|
"learning_rate": 2.9997875693328126e-06, |
|
"loss": 1.1564, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1652892561983471, |
|
"grad_norm": 13.308000530271709, |
|
"learning_rate": 2.9997694984368163e-06, |
|
"loss": 1.2075, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1674931129476584, |
|
"grad_norm": 13.182861834247353, |
|
"learning_rate": 2.999750690030305e-06, |
|
"loss": 1.1836, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1696969696969697, |
|
"grad_norm": 11.679937122230694, |
|
"learning_rate": 2.9997311441225286e-06, |
|
"loss": 0.9576, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.171900826446281, |
|
"grad_norm": 11.700036415813958, |
|
"learning_rate": 2.9997108607230975e-06, |
|
"loss": 1.1455, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17410468319559227, |
|
"grad_norm": 21.553282982313565, |
|
"learning_rate": 2.9996898398419867e-06, |
|
"loss": 0.9717, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1763085399449036, |
|
"grad_norm": 12.956436863018121, |
|
"learning_rate": 2.9996680814895334e-06, |
|
"loss": 0.9445, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17851239669421487, |
|
"grad_norm": 11.392114741947795, |
|
"learning_rate": 2.9996455856764373e-06, |
|
"loss": 1.1615, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.18071625344352618, |
|
"grad_norm": 12.793644484917351, |
|
"learning_rate": 2.99962235241376e-06, |
|
"loss": 0.9005, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18292011019283747, |
|
"grad_norm": 11.278622938092001, |
|
"learning_rate": 2.9995983817129273e-06, |
|
"loss": 0.8012, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.18512396694214875, |
|
"grad_norm": 11.924228852181994, |
|
"learning_rate": 2.9995736735857256e-06, |
|
"loss": 0.8359, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18732782369146006, |
|
"grad_norm": 10.574117414679797, |
|
"learning_rate": 2.9995482280443065e-06, |
|
"loss": 0.8495, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18953168044077134, |
|
"grad_norm": 11.186152581473769, |
|
"learning_rate": 2.999522045101182e-06, |
|
"loss": 0.9247, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.19173553719008266, |
|
"grad_norm": 10.654547641676169, |
|
"learning_rate": 2.999495124769228e-06, |
|
"loss": 0.9681, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.19393939393939394, |
|
"grad_norm": 8.61313592647621, |
|
"learning_rate": 2.9994674670616814e-06, |
|
"loss": 0.9317, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.19614325068870522, |
|
"grad_norm": 18.887157394643115, |
|
"learning_rate": 2.9994390719921445e-06, |
|
"loss": 0.9185, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.19834710743801653, |
|
"grad_norm": 16.89569803683573, |
|
"learning_rate": 2.9994099395745795e-06, |
|
"loss": 1.0256, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20055096418732782, |
|
"grad_norm": 8.519494558720615, |
|
"learning_rate": 2.9993800698233127e-06, |
|
"loss": 0.72, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.20275482093663913, |
|
"grad_norm": 9.970310282841208, |
|
"learning_rate": 2.999349462753032e-06, |
|
"loss": 0.7108, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2049586776859504, |
|
"grad_norm": 10.966172547876596, |
|
"learning_rate": 2.9993181183787886e-06, |
|
"loss": 0.8898, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2071625344352617, |
|
"grad_norm": 8.457980829767811, |
|
"learning_rate": 2.999286036715997e-06, |
|
"loss": 0.7737, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.209366391184573, |
|
"grad_norm": 10.709756234820391, |
|
"learning_rate": 2.999253217780432e-06, |
|
"loss": 0.9179, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2115702479338843, |
|
"grad_norm": 11.769839019646232, |
|
"learning_rate": 2.999219661588233e-06, |
|
"loss": 0.8461, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2137741046831956, |
|
"grad_norm": 8.715508819021125, |
|
"learning_rate": 2.9991853681559014e-06, |
|
"loss": 0.8488, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2159779614325069, |
|
"grad_norm": 11.957028477448837, |
|
"learning_rate": 2.9991503375003e-06, |
|
"loss": 0.7625, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 9.66498196931626, |
|
"learning_rate": 2.999114569638656e-06, |
|
"loss": 1.1074, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22038567493112948, |
|
"grad_norm": 10.269942780716466, |
|
"learning_rate": 2.999078064588558e-06, |
|
"loss": 0.6719, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22258953168044077, |
|
"grad_norm": 10.52874796702632, |
|
"learning_rate": 2.9990408223679575e-06, |
|
"loss": 0.8706, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.22479338842975208, |
|
"grad_norm": 11.95952602668835, |
|
"learning_rate": 2.999002842995168e-06, |
|
"loss": 0.6669, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22699724517906336, |
|
"grad_norm": 9.38549802466762, |
|
"learning_rate": 2.998964126488866e-06, |
|
"loss": 0.7366, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.22920110192837465, |
|
"grad_norm": 9.976231066334218, |
|
"learning_rate": 2.9989246728680897e-06, |
|
"loss": 0.6534, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.23140495867768596, |
|
"grad_norm": 9.102475454497984, |
|
"learning_rate": 2.9988844821522415e-06, |
|
"loss": 0.7173, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23360881542699724, |
|
"grad_norm": 9.491160186253202, |
|
"learning_rate": 2.9988435543610844e-06, |
|
"loss": 0.6074, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.23581267217630855, |
|
"grad_norm": 10.513632694996241, |
|
"learning_rate": 2.998801889514745e-06, |
|
"loss": 0.6602, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.23801652892561984, |
|
"grad_norm": 9.646787450212365, |
|
"learning_rate": 2.998759487633712e-06, |
|
"loss": 0.6238, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.24022038567493112, |
|
"grad_norm": 9.92201571128595, |
|
"learning_rate": 2.9987163487388357e-06, |
|
"loss": 0.7945, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 9.806488011921113, |
|
"learning_rate": 2.9986724728513302e-06, |
|
"loss": 0.9243, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24462809917355371, |
|
"grad_norm": 8.879233481017383, |
|
"learning_rate": 2.9986278599927717e-06, |
|
"loss": 0.6467, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.24683195592286503, |
|
"grad_norm": 10.905347276046735, |
|
"learning_rate": 2.9985825101850986e-06, |
|
"loss": 0.8978, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2490358126721763, |
|
"grad_norm": 8.064502908123723, |
|
"learning_rate": 2.9985364234506108e-06, |
|
"loss": 0.6498, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2512396694214876, |
|
"grad_norm": 10.946829774416697, |
|
"learning_rate": 2.998489599811972e-06, |
|
"loss": 0.8144, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2534435261707989, |
|
"grad_norm": 9.718407483089358, |
|
"learning_rate": 2.998442039292208e-06, |
|
"loss": 0.4826, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2556473829201102, |
|
"grad_norm": 7.869198489038032, |
|
"learning_rate": 2.998393741914706e-06, |
|
"loss": 0.5715, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2578512396694215, |
|
"grad_norm": 10.393656330664765, |
|
"learning_rate": 2.998344707703216e-06, |
|
"loss": 0.7082, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.26005509641873276, |
|
"grad_norm": 10.247308720424776, |
|
"learning_rate": 2.9982949366818514e-06, |
|
"loss": 0.6026, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2622589531680441, |
|
"grad_norm": 15.762210245537455, |
|
"learning_rate": 2.9982444288750864e-06, |
|
"loss": 0.7132, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2644628099173554, |
|
"grad_norm": 8.438074902652675, |
|
"learning_rate": 2.9981931843077586e-06, |
|
"loss": 0.7253, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 9.95077096598991, |
|
"learning_rate": 2.9981412030050676e-06, |
|
"loss": 0.5475, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.26887052341597795, |
|
"grad_norm": 12.749558420196946, |
|
"learning_rate": 2.9980884849925743e-06, |
|
"loss": 0.5953, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.27107438016528923, |
|
"grad_norm": 7.913579849257431, |
|
"learning_rate": 2.9980350302962033e-06, |
|
"loss": 0.5146, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.27327823691460057, |
|
"grad_norm": 9.250651892900226, |
|
"learning_rate": 2.9979808389422417e-06, |
|
"loss": 0.6412, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.27548209366391185, |
|
"grad_norm": 9.158788687798582, |
|
"learning_rate": 2.997925910957337e-06, |
|
"loss": 0.5092, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.27768595041322314, |
|
"grad_norm": 10.088855837632275, |
|
"learning_rate": 2.9978702463684995e-06, |
|
"loss": 0.725, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2798898071625344, |
|
"grad_norm": 9.915909343545366, |
|
"learning_rate": 2.9978138452031037e-06, |
|
"loss": 0.5691, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2820936639118457, |
|
"grad_norm": 9.617311338418991, |
|
"learning_rate": 2.997756707488884e-06, |
|
"loss": 0.709, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.28429752066115704, |
|
"grad_norm": 8.156885740814575, |
|
"learning_rate": 2.997698833253938e-06, |
|
"loss": 0.4996, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2865013774104683, |
|
"grad_norm": 10.82636109019181, |
|
"learning_rate": 2.997640222526725e-06, |
|
"loss": 0.5769, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2887052341597796, |
|
"grad_norm": 9.323333527183065, |
|
"learning_rate": 2.997580875336067e-06, |
|
"loss": 0.6782, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 8.168393819228521, |
|
"learning_rate": 2.9975207917111483e-06, |
|
"loss": 0.519, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2931129476584022, |
|
"grad_norm": 8.430243817884287, |
|
"learning_rate": 2.9974599716815145e-06, |
|
"loss": 0.5014, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2953168044077135, |
|
"grad_norm": 10.621443566182664, |
|
"learning_rate": 2.997398415277074e-06, |
|
"loss": 0.7323, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2975206611570248, |
|
"grad_norm": 10.903203705940129, |
|
"learning_rate": 2.997336122528097e-06, |
|
"loss": 0.6959, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2997245179063361, |
|
"grad_norm": 13.310218126786243, |
|
"learning_rate": 2.997273093465216e-06, |
|
"loss": 0.6452, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.30192837465564737, |
|
"grad_norm": 9.111054312909284, |
|
"learning_rate": 2.997209328119425e-06, |
|
"loss": 0.5339, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.30413223140495865, |
|
"grad_norm": 8.5483917916211, |
|
"learning_rate": 2.997144826522082e-06, |
|
"loss": 0.7473, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.30633608815427, |
|
"grad_norm": 8.974042959614973, |
|
"learning_rate": 2.9970795887049043e-06, |
|
"loss": 0.6025, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3085399449035813, |
|
"grad_norm": 9.761885242579995, |
|
"learning_rate": 2.9970136146999727e-06, |
|
"loss": 0.5253, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31074380165289256, |
|
"grad_norm": 8.072862771710003, |
|
"learning_rate": 2.9969469045397298e-06, |
|
"loss": 0.6012, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.31294765840220384, |
|
"grad_norm": 7.269223969882805, |
|
"learning_rate": 2.996879458256981e-06, |
|
"loss": 0.4309, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3151515151515151, |
|
"grad_norm": 8.584363487659544, |
|
"learning_rate": 2.996811275884892e-06, |
|
"loss": 0.4436, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.31735537190082647, |
|
"grad_norm": 8.732413459437103, |
|
"learning_rate": 2.9967423574569928e-06, |
|
"loss": 0.3715, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.31955922865013775, |
|
"grad_norm": 11.467423318928054, |
|
"learning_rate": 2.9966727030071725e-06, |
|
"loss": 0.5512, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.32176308539944903, |
|
"grad_norm": 10.868202581950138, |
|
"learning_rate": 2.996602312569684e-06, |
|
"loss": 0.4402, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3239669421487603, |
|
"grad_norm": 9.203463518102229, |
|
"learning_rate": 2.9965311861791427e-06, |
|
"loss": 0.4785, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3261707988980716, |
|
"grad_norm": 9.705531063021647, |
|
"learning_rate": 2.9964593238705237e-06, |
|
"loss": 0.5051, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.32837465564738294, |
|
"grad_norm": 8.25057132852951, |
|
"learning_rate": 2.996386725679166e-06, |
|
"loss": 0.4323, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"grad_norm": 8.259086063115724, |
|
"learning_rate": 2.996313391640769e-06, |
|
"loss": 0.7087, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3327823691460055, |
|
"grad_norm": 9.085318085110034, |
|
"learning_rate": 2.9962393217913956e-06, |
|
"loss": 0.5809, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3349862258953168, |
|
"grad_norm": 11.724016100603865, |
|
"learning_rate": 2.996164516167469e-06, |
|
"loss": 0.4559, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3371900826446281, |
|
"grad_norm": 11.692141310097577, |
|
"learning_rate": 2.996088974805775e-06, |
|
"loss": 0.63, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3393939393939394, |
|
"grad_norm": 14.302015659156769, |
|
"learning_rate": 2.9960126977434607e-06, |
|
"loss": 0.4642, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3415977961432507, |
|
"grad_norm": 10.601023905940114, |
|
"learning_rate": 2.9959356850180356e-06, |
|
"loss": 0.6745, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.343801652892562, |
|
"grad_norm": 7.373808999878665, |
|
"learning_rate": 2.9958579366673703e-06, |
|
"loss": 0.5854, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.34600550964187327, |
|
"grad_norm": 9.158314159683968, |
|
"learning_rate": 2.995779452729698e-06, |
|
"loss": 0.5532, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.34820936639118455, |
|
"grad_norm": 13.128561412436367, |
|
"learning_rate": 2.995700233243613e-06, |
|
"loss": 0.5181, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3504132231404959, |
|
"grad_norm": 9.448628448642765, |
|
"learning_rate": 2.995620278248071e-06, |
|
"loss": 0.4891, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3526170798898072, |
|
"grad_norm": 10.213114577672373, |
|
"learning_rate": 2.995539587782391e-06, |
|
"loss": 0.5077, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35482093663911846, |
|
"grad_norm": 8.38970998579396, |
|
"learning_rate": 2.9954581618862503e-06, |
|
"loss": 0.405, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.35702479338842974, |
|
"grad_norm": 13.665397680766324, |
|
"learning_rate": 2.995376000599692e-06, |
|
"loss": 0.5125, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.359228650137741, |
|
"grad_norm": 14.179230821803024, |
|
"learning_rate": 2.995293103963118e-06, |
|
"loss": 0.4435, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.36143250688705236, |
|
"grad_norm": 8.756398485153353, |
|
"learning_rate": 2.9952094720172932e-06, |
|
"loss": 0.5425, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 11.043239379621582, |
|
"learning_rate": 2.995125104803343e-06, |
|
"loss": 0.3934, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.36584022038567493, |
|
"grad_norm": 10.923159516630248, |
|
"learning_rate": 2.9950400023627547e-06, |
|
"loss": 0.505, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3680440771349862, |
|
"grad_norm": 10.628828504738241, |
|
"learning_rate": 2.994954164737378e-06, |
|
"loss": 0.3617, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3702479338842975, |
|
"grad_norm": 15.635865238422193, |
|
"learning_rate": 2.994867591969423e-06, |
|
"loss": 0.6068, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.37245179063360884, |
|
"grad_norm": 12.758012717712035, |
|
"learning_rate": 2.994780284101462e-06, |
|
"loss": 0.5216, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3746556473829201, |
|
"grad_norm": 15.578582001690497, |
|
"learning_rate": 2.994692241176429e-06, |
|
"loss": 0.5622, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3768595041322314, |
|
"grad_norm": 7.9750143389034855, |
|
"learning_rate": 2.994603463237618e-06, |
|
"loss": 0.3273, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3790633608815427, |
|
"grad_norm": 12.572717399971113, |
|
"learning_rate": 2.9945139503286868e-06, |
|
"loss": 0.4176, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.38126721763085397, |
|
"grad_norm": 20.36436189716221, |
|
"learning_rate": 2.9944237024936524e-06, |
|
"loss": 0.4739, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3834710743801653, |
|
"grad_norm": 10.297340479801056, |
|
"learning_rate": 2.994332719776894e-06, |
|
"loss": 0.4775, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3856749311294766, |
|
"grad_norm": 10.284849583006528, |
|
"learning_rate": 2.994241002223153e-06, |
|
"loss": 0.4431, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3878787878787879, |
|
"grad_norm": 10.928184893741808, |
|
"learning_rate": 2.994148549877531e-06, |
|
"loss": 0.3293, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.39008264462809916, |
|
"grad_norm": 13.263047600878323, |
|
"learning_rate": 2.9940553627854913e-06, |
|
"loss": 0.3778, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.39228650137741045, |
|
"grad_norm": 7.530956085027773, |
|
"learning_rate": 2.993961440992859e-06, |
|
"loss": 0.4591, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3944903581267218, |
|
"grad_norm": 10.83288966187011, |
|
"learning_rate": 2.993866784545819e-06, |
|
"loss": 0.2411, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.39669421487603307, |
|
"grad_norm": 19.45648393646713, |
|
"learning_rate": 2.99377139349092e-06, |
|
"loss": 0.4308, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.39889807162534435, |
|
"grad_norm": 11.172989386477646, |
|
"learning_rate": 2.9936752678750693e-06, |
|
"loss": 0.5432, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.40110192837465564, |
|
"grad_norm": 8.31782661386756, |
|
"learning_rate": 2.993578407745538e-06, |
|
"loss": 0.3763, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4033057851239669, |
|
"grad_norm": 11.442522440089068, |
|
"learning_rate": 2.993480813149955e-06, |
|
"loss": 0.517, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.40550964187327826, |
|
"grad_norm": 8.9587829278875, |
|
"learning_rate": 2.993382484136314e-06, |
|
"loss": 0.3016, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.40771349862258954, |
|
"grad_norm": 7.108332306844178, |
|
"learning_rate": 2.9932834207529676e-06, |
|
"loss": 0.2971, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4099173553719008, |
|
"grad_norm": 10.329961838873576, |
|
"learning_rate": 2.9931836230486303e-06, |
|
"loss": 0.5498, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4121212121212121, |
|
"grad_norm": 13.216403474783826, |
|
"learning_rate": 2.9930830910723772e-06, |
|
"loss": 0.2092, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4143250688705234, |
|
"grad_norm": 8.671324083824599, |
|
"learning_rate": 2.9929818248736447e-06, |
|
"loss": 0.2558, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.41652892561983473, |
|
"grad_norm": 9.710171011043373, |
|
"learning_rate": 2.992879824502232e-06, |
|
"loss": 0.3006, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.418732782369146, |
|
"grad_norm": 21.813616228299125, |
|
"learning_rate": 2.9927770900082955e-06, |
|
"loss": 0.5329, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4209366391184573, |
|
"grad_norm": 13.323515391705687, |
|
"learning_rate": 2.992673621442356e-06, |
|
"loss": 0.4125, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.4231404958677686, |
|
"grad_norm": 9.278125853902523, |
|
"learning_rate": 2.992569418855294e-06, |
|
"loss": 0.3332, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.42534435261707987, |
|
"grad_norm": 10.791350469404003, |
|
"learning_rate": 2.9924644822983503e-06, |
|
"loss": 0.5235, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4275482093663912, |
|
"grad_norm": 8.898978752132685, |
|
"learning_rate": 2.9923588118231285e-06, |
|
"loss": 0.5324, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4297520661157025, |
|
"grad_norm": 8.533915405225065, |
|
"learning_rate": 2.992252407481591e-06, |
|
"loss": 0.518, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4319559228650138, |
|
"grad_norm": 10.992324651322523, |
|
"learning_rate": 2.9921452693260635e-06, |
|
"loss": 0.5302, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.43415977961432506, |
|
"grad_norm": 23.456111817713143, |
|
"learning_rate": 2.9920373974092295e-06, |
|
"loss": 0.4521, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 14.786200878275693, |
|
"learning_rate": 2.9919287917841353e-06, |
|
"loss": 0.5255, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4385674931129477, |
|
"grad_norm": 11.085386121303593, |
|
"learning_rate": 2.9918194525041885e-06, |
|
"loss": 0.2744, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.44077134986225897, |
|
"grad_norm": 8.906532048314519, |
|
"learning_rate": 2.9917093796231553e-06, |
|
"loss": 0.5756, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.44297520661157025, |
|
"grad_norm": 7.951838632934386, |
|
"learning_rate": 2.9915985731951645e-06, |
|
"loss": 0.5796, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.44517906336088153, |
|
"grad_norm": 17.58502984886795, |
|
"learning_rate": 2.9914870332747063e-06, |
|
"loss": 0.489, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4473829201101928, |
|
"grad_norm": 15.295500415608018, |
|
"learning_rate": 2.9913747599166284e-06, |
|
"loss": 0.4597, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.44958677685950416, |
|
"grad_norm": 13.049299141244818, |
|
"learning_rate": 2.9912617531761423e-06, |
|
"loss": 0.4749, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.45179063360881544, |
|
"grad_norm": 9.931073149347117, |
|
"learning_rate": 2.9911480131088188e-06, |
|
"loss": 0.4997, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4539944903581267, |
|
"grad_norm": 8.566494688187221, |
|
"learning_rate": 2.991033539770589e-06, |
|
"loss": 0.377, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.456198347107438, |
|
"grad_norm": 13.85397292070045, |
|
"learning_rate": 2.990918333217746e-06, |
|
"loss": 0.3859, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4584022038567493, |
|
"grad_norm": 11.550854145013796, |
|
"learning_rate": 2.990802393506942e-06, |
|
"loss": 0.5885, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.46060606060606063, |
|
"grad_norm": 6.01047647343179, |
|
"learning_rate": 2.99068572069519e-06, |
|
"loss": 0.323, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.4628099173553719, |
|
"grad_norm": 8.224271030300471, |
|
"learning_rate": 2.9905683148398643e-06, |
|
"loss": 0.5296, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4650137741046832, |
|
"grad_norm": 15.640561146666892, |
|
"learning_rate": 2.9904501759986982e-06, |
|
"loss": 0.507, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.4672176308539945, |
|
"grad_norm": 10.955257491240019, |
|
"learning_rate": 2.9903313042297874e-06, |
|
"loss": 0.3995, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.46942148760330576, |
|
"grad_norm": 7.323757447082715, |
|
"learning_rate": 2.9902116995915872e-06, |
|
"loss": 0.3823, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4716253443526171, |
|
"grad_norm": 7.703416812989557, |
|
"learning_rate": 2.9900913621429124e-06, |
|
"loss": 0.2121, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4738292011019284, |
|
"grad_norm": 7.11349178483258, |
|
"learning_rate": 2.9899702919429383e-06, |
|
"loss": 0.4344, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.47603305785123967, |
|
"grad_norm": 14.027780027978222, |
|
"learning_rate": 2.989848489051203e-06, |
|
"loss": 0.3547, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.47823691460055096, |
|
"grad_norm": 6.760823267329529, |
|
"learning_rate": 2.9897259535276007e-06, |
|
"loss": 0.2463, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.48044077134986224, |
|
"grad_norm": 7.913445075265595, |
|
"learning_rate": 2.9896026854323896e-06, |
|
"loss": 0.4024, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.4826446280991736, |
|
"grad_norm": 11.412613438060973, |
|
"learning_rate": 2.9894786848261863e-06, |
|
"loss": 0.3003, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 9.62241009305966, |
|
"learning_rate": 2.9893539517699683e-06, |
|
"loss": 0.3287, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.48705234159779615, |
|
"grad_norm": 7.753152354810825, |
|
"learning_rate": 2.989228486325072e-06, |
|
"loss": 0.3269, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.48925619834710743, |
|
"grad_norm": 7.295729043500175, |
|
"learning_rate": 2.989102288553196e-06, |
|
"loss": 0.3438, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4914600550964187, |
|
"grad_norm": 9.153774556100242, |
|
"learning_rate": 2.9889753585163977e-06, |
|
"loss": 0.3999, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.49366391184573005, |
|
"grad_norm": 13.438636520374068, |
|
"learning_rate": 2.988847696277095e-06, |
|
"loss": 0.2573, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.49586776859504134, |
|
"grad_norm": 10.228395361152069, |
|
"learning_rate": 2.9887193018980653e-06, |
|
"loss": 0.252, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4980716253443526, |
|
"grad_norm": 10.244058650244439, |
|
"learning_rate": 2.9885901754424465e-06, |
|
"loss": 0.4004, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5002754820936639, |
|
"grad_norm": 6.461802138295602, |
|
"learning_rate": 2.9884603169737363e-06, |
|
"loss": 0.2336, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5024793388429752, |
|
"grad_norm": 11.410239126771371, |
|
"learning_rate": 2.988329726555793e-06, |
|
"loss": 0.3149, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5046831955922865, |
|
"grad_norm": 10.968492435160934, |
|
"learning_rate": 2.988198404252834e-06, |
|
"loss": 0.3722, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5068870523415978, |
|
"grad_norm": 12.497668168814954, |
|
"learning_rate": 2.988066350129437e-06, |
|
"loss": 0.4878, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 11.630857036501677, |
|
"learning_rate": 2.9879335642505396e-06, |
|
"loss": 0.2971, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5112947658402204, |
|
"grad_norm": 6.441981218733046, |
|
"learning_rate": 2.9878000466814392e-06, |
|
"loss": 0.3021, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5134986225895317, |
|
"grad_norm": 8.17525488032321, |
|
"learning_rate": 2.987665797487793e-06, |
|
"loss": 0.2385, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.515702479338843, |
|
"grad_norm": 25.7467268298053, |
|
"learning_rate": 2.9875308167356174e-06, |
|
"loss": 0.2288, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5179063360881543, |
|
"grad_norm": 10.459874704987152, |
|
"learning_rate": 2.9873951044912893e-06, |
|
"loss": 0.2691, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5201101928374655, |
|
"grad_norm": 7.962423185829474, |
|
"learning_rate": 2.987258660821546e-06, |
|
"loss": 0.3499, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5223140495867769, |
|
"grad_norm": 8.351626000174596, |
|
"learning_rate": 2.9871214857934823e-06, |
|
"loss": 0.2256, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5245179063360882, |
|
"grad_norm": 8.76914626967428, |
|
"learning_rate": 2.9869835794745546e-06, |
|
"loss": 0.3521, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5267217630853994, |
|
"grad_norm": 12.774308842259753, |
|
"learning_rate": 2.986844941932578e-06, |
|
"loss": 0.263, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5289256198347108, |
|
"grad_norm": 8.662606106336478, |
|
"learning_rate": 2.9867055732357275e-06, |
|
"loss": 0.3717, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.531129476584022, |
|
"grad_norm": 7.44373651269041, |
|
"learning_rate": 2.986565473452538e-06, |
|
"loss": 0.3603, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 16.113652444436543, |
|
"learning_rate": 2.986424642651902e-06, |
|
"loss": 0.4322, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5355371900826447, |
|
"grad_norm": 7.235187047429284, |
|
"learning_rate": 2.986283080903075e-06, |
|
"loss": 0.2718, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5377410468319559, |
|
"grad_norm": 8.981158375257591, |
|
"learning_rate": 2.986140788275668e-06, |
|
"loss": 0.2909, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5399449035812672, |
|
"grad_norm": 7.728183713731981, |
|
"learning_rate": 2.9859977648396547e-06, |
|
"loss": 0.2688, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5421487603305785, |
|
"grad_norm": 11.385723529297572, |
|
"learning_rate": 2.985854010665366e-06, |
|
"loss": 0.2951, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5443526170798898, |
|
"grad_norm": 22.421774240399724, |
|
"learning_rate": 2.985709525823493e-06, |
|
"loss": 0.2722, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5465564738292011, |
|
"grad_norm": 26.25406750616081, |
|
"learning_rate": 2.9855643103850863e-06, |
|
"loss": 0.2057, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5487603305785124, |
|
"grad_norm": 14.419670718604909, |
|
"learning_rate": 2.985418364421555e-06, |
|
"loss": 0.1859, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5509641873278237, |
|
"grad_norm": 7.18194575744084, |
|
"learning_rate": 2.9852716880046687e-06, |
|
"loss": 0.1823, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5531680440771349, |
|
"grad_norm": 11.041149585270512, |
|
"learning_rate": 2.9851242812065544e-06, |
|
"loss": 0.4397, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5553719008264463, |
|
"grad_norm": 9.800903211627961, |
|
"learning_rate": 2.9849761440996997e-06, |
|
"loss": 0.3035, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5575757575757576, |
|
"grad_norm": 5.42248846810359, |
|
"learning_rate": 2.9848272767569515e-06, |
|
"loss": 0.4126, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5597796143250688, |
|
"grad_norm": 12.064336260743763, |
|
"learning_rate": 2.9846776792515146e-06, |
|
"loss": 0.4134, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5619834710743802, |
|
"grad_norm": 9.069153337986716, |
|
"learning_rate": 2.9845273516569534e-06, |
|
"loss": 0.3347, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5641873278236914, |
|
"grad_norm": 9.708719457668808, |
|
"learning_rate": 2.9843762940471915e-06, |
|
"loss": 0.1482, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5663911845730027, |
|
"grad_norm": 10.658478324774729, |
|
"learning_rate": 2.984224506496512e-06, |
|
"loss": 0.4586, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5685950413223141, |
|
"grad_norm": 7.67634362110084, |
|
"learning_rate": 2.984071989079555e-06, |
|
"loss": 0.3101, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5707988980716253, |
|
"grad_norm": 9.863415592665492, |
|
"learning_rate": 2.983918741871322e-06, |
|
"loss": 0.283, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5730027548209367, |
|
"grad_norm": 15.328770243567073, |
|
"learning_rate": 2.983764764947172e-06, |
|
"loss": 0.3214, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5752066115702479, |
|
"grad_norm": 5.602530886005082, |
|
"learning_rate": 2.983610058382822e-06, |
|
"loss": 0.1458, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5774104683195592, |
|
"grad_norm": 9.188539359413474, |
|
"learning_rate": 2.9834546222543503e-06, |
|
"loss": 0.2688, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5796143250688706, |
|
"grad_norm": 8.913852840097164, |
|
"learning_rate": 2.9832984566381913e-06, |
|
"loss": 0.2904, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 8.408585828146748, |
|
"learning_rate": 2.98314156161114e-06, |
|
"loss": 0.3473, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5840220385674931, |
|
"grad_norm": 7.011982039251787, |
|
"learning_rate": 2.9829839372503496e-06, |
|
"loss": 0.2214, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5862258953168044, |
|
"grad_norm": 12.208817142819923, |
|
"learning_rate": 2.982825583633331e-06, |
|
"loss": 0.228, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5884297520661157, |
|
"grad_norm": 22.300240567856033, |
|
"learning_rate": 2.982666500837955e-06, |
|
"loss": 0.3841, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.590633608815427, |
|
"grad_norm": 10.204434125277357, |
|
"learning_rate": 2.9825066889424507e-06, |
|
"loss": 0.4413, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5928374655647383, |
|
"grad_norm": 10.394116435568966, |
|
"learning_rate": 2.9823461480254046e-06, |
|
"loss": 0.2164, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5950413223140496, |
|
"grad_norm": 24.930405170814677, |
|
"learning_rate": 2.982184878165763e-06, |
|
"loss": 0.404, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5972451790633608, |
|
"grad_norm": 16.529549906687567, |
|
"learning_rate": 2.982022879442831e-06, |
|
"loss": 0.1971, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5994490358126722, |
|
"grad_norm": 6.802656347424857, |
|
"learning_rate": 2.98186015193627e-06, |
|
"loss": 0.2763, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6016528925619835, |
|
"grad_norm": 9.967577482684383, |
|
"learning_rate": 2.9816966957261022e-06, |
|
"loss": 0.505, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.6038567493112947, |
|
"grad_norm": 5.5700188781618944, |
|
"learning_rate": 2.981532510892707e-06, |
|
"loss": 0.3295, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 13.138835779765238, |
|
"learning_rate": 2.981367597516821e-06, |
|
"loss": 0.2216, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6082644628099173, |
|
"grad_norm": 8.78165681806648, |
|
"learning_rate": 2.9812019556795414e-06, |
|
"loss": 0.4269, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6104683195592286, |
|
"grad_norm": 7.519045608568057, |
|
"learning_rate": 2.981035585462322e-06, |
|
"loss": 0.3028, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.61267217630854, |
|
"grad_norm": 12.757117931598634, |
|
"learning_rate": 2.9808684869469756e-06, |
|
"loss": 0.2731, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6148760330578512, |
|
"grad_norm": 14.32345009190218, |
|
"learning_rate": 2.9807006602156723e-06, |
|
"loss": 0.439, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6170798898071626, |
|
"grad_norm": 10.78268372690156, |
|
"learning_rate": 2.9805321053509414e-06, |
|
"loss": 0.3267, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6192837465564738, |
|
"grad_norm": 20.968509670896452, |
|
"learning_rate": 2.9803628224356688e-06, |
|
"loss": 0.1478, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6214876033057851, |
|
"grad_norm": 13.939089444718862, |
|
"learning_rate": 2.9801928115531e-06, |
|
"loss": 0.2811, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6236914600550965, |
|
"grad_norm": 19.46588188306463, |
|
"learning_rate": 2.9800220727868367e-06, |
|
"loss": 0.2355, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6258953168044077, |
|
"grad_norm": 23.31435507775797, |
|
"learning_rate": 2.9798506062208408e-06, |
|
"loss": 0.3445, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.628099173553719, |
|
"grad_norm": 7.113401931682151, |
|
"learning_rate": 2.9796784119394296e-06, |
|
"loss": 0.1838, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6303030303030303, |
|
"grad_norm": 9.83090974685671, |
|
"learning_rate": 2.979505490027281e-06, |
|
"loss": 0.3413, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6325068870523416, |
|
"grad_norm": 11.816944932544452, |
|
"learning_rate": 2.979331840569428e-06, |
|
"loss": 0.2624, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6347107438016529, |
|
"grad_norm": 5.505913040295101, |
|
"learning_rate": 2.979157463651263e-06, |
|
"loss": 0.2669, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6369146005509642, |
|
"grad_norm": 7.9610097101417745, |
|
"learning_rate": 2.978982359358536e-06, |
|
"loss": 0.2418, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6391184573002755, |
|
"grad_norm": 14.054084856949025, |
|
"learning_rate": 2.9788065277773537e-06, |
|
"loss": 0.3091, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6413223140495867, |
|
"grad_norm": 6.994997659089347, |
|
"learning_rate": 2.978629968994182e-06, |
|
"loss": 0.3571, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6435261707988981, |
|
"grad_norm": 4.900411467720552, |
|
"learning_rate": 2.978452683095843e-06, |
|
"loss": 0.2389, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6457300275482094, |
|
"grad_norm": 5.986010486219538, |
|
"learning_rate": 2.978274670169517e-06, |
|
"loss": 0.4119, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6479338842975206, |
|
"grad_norm": 9.99633467057808, |
|
"learning_rate": 2.978095930302741e-06, |
|
"loss": 0.1945, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.650137741046832, |
|
"grad_norm": 8.88618573335772, |
|
"learning_rate": 2.9779164635834117e-06, |
|
"loss": 0.3021, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6523415977961432, |
|
"grad_norm": 6.1713366384622885, |
|
"learning_rate": 2.9777362700997813e-06, |
|
"loss": 0.3536, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 6.269429712344906, |
|
"learning_rate": 2.9775553499404588e-06, |
|
"loss": 0.2532, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6567493112947659, |
|
"grad_norm": 8.01546204853658, |
|
"learning_rate": 2.9773737031944123e-06, |
|
"loss": 0.2054, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6589531680440771, |
|
"grad_norm": 5.834139768693889, |
|
"learning_rate": 2.9771913299509667e-06, |
|
"loss": 0.3205, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"grad_norm": 5.20599030336784, |
|
"learning_rate": 2.977008230299803e-06, |
|
"loss": 0.233, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6633608815426997, |
|
"grad_norm": 7.747852317820463, |
|
"learning_rate": 2.9768244043309612e-06, |
|
"loss": 0.1853, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.665564738292011, |
|
"grad_norm": 7.280420219391244, |
|
"learning_rate": 2.9766398521348376e-06, |
|
"loss": 0.2404, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.6677685950413224, |
|
"grad_norm": 8.649220755714195, |
|
"learning_rate": 2.9764545738021847e-06, |
|
"loss": 0.2864, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6699724517906336, |
|
"grad_norm": 7.124734041126083, |
|
"learning_rate": 2.976268569424114e-06, |
|
"loss": 0.5867, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6721763085399449, |
|
"grad_norm": 8.708663746697807, |
|
"learning_rate": 2.9760818390920924e-06, |
|
"loss": 0.2924, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6743801652892562, |
|
"grad_norm": 13.18814959982244, |
|
"learning_rate": 2.9758943828979446e-06, |
|
"loss": 0.3782, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6765840220385675, |
|
"grad_norm": 12.243598876193756, |
|
"learning_rate": 2.975706200933852e-06, |
|
"loss": 0.2185, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6787878787878788, |
|
"grad_norm": 5.633700961339941, |
|
"learning_rate": 2.975517293292353e-06, |
|
"loss": 0.3752, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6809917355371901, |
|
"grad_norm": 10.334114912832058, |
|
"learning_rate": 2.9753276600663423e-06, |
|
"loss": 0.3026, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6831955922865014, |
|
"grad_norm": 13.08744426612834, |
|
"learning_rate": 2.9751373013490727e-06, |
|
"loss": 0.3382, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6853994490358126, |
|
"grad_norm": 9.504458358236962, |
|
"learning_rate": 2.9749462172341524e-06, |
|
"loss": 0.5009, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.687603305785124, |
|
"grad_norm": 6.3562762612917245, |
|
"learning_rate": 2.9747544078155472e-06, |
|
"loss": 0.3382, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6898071625344353, |
|
"grad_norm": 12.917546406900222, |
|
"learning_rate": 2.974561873187579e-06, |
|
"loss": 0.2846, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6920110192837465, |
|
"grad_norm": 6.944988521051594, |
|
"learning_rate": 2.9743686134449267e-06, |
|
"loss": 0.3612, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6942148760330579, |
|
"grad_norm": 7.791757617860102, |
|
"learning_rate": 2.974174628682626e-06, |
|
"loss": 0.4309, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6964187327823691, |
|
"grad_norm": 6.838689832113007, |
|
"learning_rate": 2.973979918996068e-06, |
|
"loss": 0.3473, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6986225895316804, |
|
"grad_norm": 6.638427266811496, |
|
"learning_rate": 2.973784484481001e-06, |
|
"loss": 0.3482, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7008264462809918, |
|
"grad_norm": 15.019684641662097, |
|
"learning_rate": 2.9735883252335305e-06, |
|
"loss": 0.3447, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.703030303030303, |
|
"grad_norm": 7.657611207942621, |
|
"learning_rate": 2.9733914413501172e-06, |
|
"loss": 0.1998, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.7052341597796143, |
|
"grad_norm": 12.679008507955187, |
|
"learning_rate": 2.973193832927579e-06, |
|
"loss": 0.2036, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7074380165289256, |
|
"grad_norm": 7.001507137978587, |
|
"learning_rate": 2.9729955000630886e-06, |
|
"loss": 0.4147, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7096418732782369, |
|
"grad_norm": 7.424077459455387, |
|
"learning_rate": 2.972796442854178e-06, |
|
"loss": 0.3658, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.7118457300275483, |
|
"grad_norm": 9.447237658007161, |
|
"learning_rate": 2.9725966613987312e-06, |
|
"loss": 0.3984, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7140495867768595, |
|
"grad_norm": 21.361023805823653, |
|
"learning_rate": 2.972396155794992e-06, |
|
"loss": 0.371, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7162534435261708, |
|
"grad_norm": 8.834296167166244, |
|
"learning_rate": 2.9721949261415592e-06, |
|
"loss": 0.3004, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.718457300275482, |
|
"grad_norm": 6.742790410332544, |
|
"learning_rate": 2.971992972537386e-06, |
|
"loss": 0.1565, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.7206611570247934, |
|
"grad_norm": 11.707104581221218, |
|
"learning_rate": 2.9717902950817833e-06, |
|
"loss": 0.2299, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.7228650137741047, |
|
"grad_norm": 5.476783766315915, |
|
"learning_rate": 2.9715868938744182e-06, |
|
"loss": 0.2493, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.725068870523416, |
|
"grad_norm": 11.160688397190603, |
|
"learning_rate": 2.971382769015313e-06, |
|
"loss": 0.3175, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 7.043330544504408, |
|
"learning_rate": 2.971177920604846e-06, |
|
"loss": 0.1722, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7294765840220385, |
|
"grad_norm": 5.73739903037248, |
|
"learning_rate": 2.97097234874375e-06, |
|
"loss": 0.2176, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7316804407713499, |
|
"grad_norm": 6.277934895839506, |
|
"learning_rate": 2.9707660535331166e-06, |
|
"loss": 0.2065, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7338842975206612, |
|
"grad_norm": 20.30577221870282, |
|
"learning_rate": 2.9705590350743904e-06, |
|
"loss": 0.1991, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7360881542699724, |
|
"grad_norm": 7.695272096611418, |
|
"learning_rate": 2.970351293469372e-06, |
|
"loss": 0.2131, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.7382920110192838, |
|
"grad_norm": 4.916567786531378, |
|
"learning_rate": 2.9701428288202194e-06, |
|
"loss": 0.3827, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.740495867768595, |
|
"grad_norm": 7.019231101260278, |
|
"learning_rate": 2.9699336412294444e-06, |
|
"loss": 0.2738, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.7426997245179063, |
|
"grad_norm": 7.975195630484479, |
|
"learning_rate": 2.9697237307999146e-06, |
|
"loss": 0.2243, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.7449035812672177, |
|
"grad_norm": 7.561687409407015, |
|
"learning_rate": 2.9695130976348534e-06, |
|
"loss": 0.2396, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.7471074380165289, |
|
"grad_norm": 7.638419171445274, |
|
"learning_rate": 2.9693017418378396e-06, |
|
"loss": 0.276, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.7493112947658402, |
|
"grad_norm": 12.616136018780299, |
|
"learning_rate": 2.9690896635128073e-06, |
|
"loss": 0.3787, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7515151515151515, |
|
"grad_norm": 6.550803777717444, |
|
"learning_rate": 2.968876862764046e-06, |
|
"loss": 0.2226, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.7537190082644628, |
|
"grad_norm": 9.06584766964822, |
|
"learning_rate": 2.9686633396962003e-06, |
|
"loss": 0.369, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.7559228650137741, |
|
"grad_norm": 5.386215213674309, |
|
"learning_rate": 2.9684490944142697e-06, |
|
"loss": 0.1736, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7581267217630854, |
|
"grad_norm": 15.906647139869929, |
|
"learning_rate": 2.9682341270236085e-06, |
|
"loss": 0.2781, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.7603305785123967, |
|
"grad_norm": 12.1772384094106, |
|
"learning_rate": 2.9680184376299283e-06, |
|
"loss": 0.1544, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7625344352617079, |
|
"grad_norm": 17.88186148600346, |
|
"learning_rate": 2.967802026339293e-06, |
|
"loss": 0.2988, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7647382920110193, |
|
"grad_norm": 21.268773226628287, |
|
"learning_rate": 2.9675848932581235e-06, |
|
"loss": 0.2821, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7669421487603306, |
|
"grad_norm": 11.820036760533492, |
|
"learning_rate": 2.9673670384931945e-06, |
|
"loss": 0.3498, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7691460055096419, |
|
"grad_norm": 7.530415574189705, |
|
"learning_rate": 2.967148462151635e-06, |
|
"loss": 0.1424, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7713498622589532, |
|
"grad_norm": 6.807351681100111, |
|
"learning_rate": 2.9669291643409314e-06, |
|
"loss": 0.2128, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7735537190082644, |
|
"grad_norm": 7.234299741762421, |
|
"learning_rate": 2.9667091451689224e-06, |
|
"loss": 0.2932, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7757575757575758, |
|
"grad_norm": 6.532060796534973, |
|
"learning_rate": 2.9664884047438023e-06, |
|
"loss": 0.2665, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7779614325068871, |
|
"grad_norm": 10.34906346210999, |
|
"learning_rate": 2.96626694317412e-06, |
|
"loss": 0.3598, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.7801652892561983, |
|
"grad_norm": 8.210171975505114, |
|
"learning_rate": 2.966044760568779e-06, |
|
"loss": 0.4052, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7823691460055097, |
|
"grad_norm": 4.541296912666131, |
|
"learning_rate": 2.9658218570370374e-06, |
|
"loss": 0.2954, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7845730027548209, |
|
"grad_norm": 5.703338007840205, |
|
"learning_rate": 2.965598232688508e-06, |
|
"loss": 0.3667, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7867768595041322, |
|
"grad_norm": 7.3908500819159135, |
|
"learning_rate": 2.965373887633158e-06, |
|
"loss": 0.0975, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7889807162534436, |
|
"grad_norm": 6.532653722744144, |
|
"learning_rate": 2.9651488219813086e-06, |
|
"loss": 0.2558, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7911845730027548, |
|
"grad_norm": 5.276369110257269, |
|
"learning_rate": 2.964923035843636e-06, |
|
"loss": 0.2056, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7933884297520661, |
|
"grad_norm": 7.121048051130931, |
|
"learning_rate": 2.96469652933117e-06, |
|
"loss": 0.1641, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7955922865013774, |
|
"grad_norm": 11.053322087508501, |
|
"learning_rate": 2.9644693025552957e-06, |
|
"loss": 0.4164, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7977961432506887, |
|
"grad_norm": 6.503259157403907, |
|
"learning_rate": 2.964241355627751e-06, |
|
"loss": 0.2031, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 7.680655263285343, |
|
"learning_rate": 2.9640126886606287e-06, |
|
"loss": 0.4177, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.8022038567493113, |
|
"grad_norm": 9.597872477527922, |
|
"learning_rate": 2.9637833017663757e-06, |
|
"loss": 0.2231, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.8044077134986226, |
|
"grad_norm": 5.447713777602937, |
|
"learning_rate": 2.9635531950577927e-06, |
|
"loss": 0.2174, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8066115702479338, |
|
"grad_norm": 7.189031762492828, |
|
"learning_rate": 2.963322368648035e-06, |
|
"loss": 0.2563, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.8088154269972452, |
|
"grad_norm": 7.898167623571763, |
|
"learning_rate": 2.963090822650611e-06, |
|
"loss": 0.3355, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8110192837465565, |
|
"grad_norm": 12.577262656676943, |
|
"learning_rate": 2.9628585571793833e-06, |
|
"loss": 0.2235, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.8132231404958677, |
|
"grad_norm": 10.849928883515854, |
|
"learning_rate": 2.962625572348568e-06, |
|
"loss": 0.3533, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8154269972451791, |
|
"grad_norm": 7.2964754371110665, |
|
"learning_rate": 2.9623918682727352e-06, |
|
"loss": 0.2871, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8176308539944903, |
|
"grad_norm": 20.660728227860485, |
|
"learning_rate": 2.9621574450668096e-06, |
|
"loss": 0.2393, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8198347107438017, |
|
"grad_norm": 7.220865047189661, |
|
"learning_rate": 2.9619223028460675e-06, |
|
"loss": 0.1926, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.822038567493113, |
|
"grad_norm": 4.852195073645113, |
|
"learning_rate": 2.961686441726141e-06, |
|
"loss": 0.2192, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.8242424242424242, |
|
"grad_norm": 9.027190273331941, |
|
"learning_rate": 2.9614498618230133e-06, |
|
"loss": 0.1366, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.8264462809917356, |
|
"grad_norm": 6.456919976931707, |
|
"learning_rate": 2.961212563253023e-06, |
|
"loss": 0.197, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8286501377410468, |
|
"grad_norm": 5.401198387054753, |
|
"learning_rate": 2.9609745461328625e-06, |
|
"loss": 0.3046, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.8308539944903581, |
|
"grad_norm": 7.216796018509394, |
|
"learning_rate": 2.9607358105795746e-06, |
|
"loss": 0.206, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.8330578512396695, |
|
"grad_norm": 6.594512431937312, |
|
"learning_rate": 2.9604963567105588e-06, |
|
"loss": 0.1903, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8352617079889807, |
|
"grad_norm": 7.862389931194225, |
|
"learning_rate": 2.960256184643566e-06, |
|
"loss": 0.3653, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.837465564738292, |
|
"grad_norm": 7.885741556216934, |
|
"learning_rate": 2.9600152944967004e-06, |
|
"loss": 0.2993, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8396694214876033, |
|
"grad_norm": 8.760183620857365, |
|
"learning_rate": 2.9597736863884197e-06, |
|
"loss": 0.4891, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8418732782369146, |
|
"grad_norm": 5.626515592689077, |
|
"learning_rate": 2.9595313604375345e-06, |
|
"loss": 0.1762, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.8440771349862259, |
|
"grad_norm": 14.700970431401748, |
|
"learning_rate": 2.9592883167632086e-06, |
|
"loss": 0.4149, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8462809917355372, |
|
"grad_norm": 31.479222256604665, |
|
"learning_rate": 2.959044555484958e-06, |
|
"loss": 0.4708, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 15.30296170661704, |
|
"learning_rate": 2.9588000767226527e-06, |
|
"loss": 0.4, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8506887052341597, |
|
"grad_norm": 6.22490236971322, |
|
"learning_rate": 2.958554880596515e-06, |
|
"loss": 0.175, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.8528925619834711, |
|
"grad_norm": 17.884639044262485, |
|
"learning_rate": 2.9583089672271193e-06, |
|
"loss": 0.1644, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.8550964187327824, |
|
"grad_norm": 12.60321691171589, |
|
"learning_rate": 2.9580623367353934e-06, |
|
"loss": 0.3522, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.8573002754820936, |
|
"grad_norm": 8.03246045529426, |
|
"learning_rate": 2.9578149892426185e-06, |
|
"loss": 0.2052, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.859504132231405, |
|
"grad_norm": 10.469045062705694, |
|
"learning_rate": 2.9575669248704265e-06, |
|
"loss": 0.269, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8617079889807162, |
|
"grad_norm": 5.196595120525184, |
|
"learning_rate": 2.957318143740803e-06, |
|
"loss": 0.3053, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.8639118457300275, |
|
"grad_norm": 6.554368800886443, |
|
"learning_rate": 2.957068645976087e-06, |
|
"loss": 0.2728, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.8661157024793389, |
|
"grad_norm": 9.262942687282687, |
|
"learning_rate": 2.956818431698968e-06, |
|
"loss": 0.1268, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8683195592286501, |
|
"grad_norm": 4.271103765701704, |
|
"learning_rate": 2.9565675010324883e-06, |
|
"loss": 0.1553, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8705234159779615, |
|
"grad_norm": 8.88374891264536, |
|
"learning_rate": 2.956315854100043e-06, |
|
"loss": 0.191, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 6.6498851365809815, |
|
"learning_rate": 2.9560634910253803e-06, |
|
"loss": 0.2245, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.874931129476584, |
|
"grad_norm": 20.390664152678355, |
|
"learning_rate": 2.955810411932599e-06, |
|
"loss": 0.2601, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8771349862258954, |
|
"grad_norm": 10.13737807765569, |
|
"learning_rate": 2.9555566169461497e-06, |
|
"loss": 0.3495, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.8793388429752066, |
|
"grad_norm": 11.159270329579886, |
|
"learning_rate": 2.955302106190837e-06, |
|
"loss": 0.2861, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.8815426997245179, |
|
"grad_norm": 22.71705363617449, |
|
"learning_rate": 2.955046879791816e-06, |
|
"loss": 0.2585, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8837465564738292, |
|
"grad_norm": 34.11862174617697, |
|
"learning_rate": 2.9547909378745942e-06, |
|
"loss": 0.2624, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.8859504132231405, |
|
"grad_norm": 11.799082659394301, |
|
"learning_rate": 2.9545342805650304e-06, |
|
"loss": 0.4528, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.8881542699724518, |
|
"grad_norm": 5.250052295042655, |
|
"learning_rate": 2.954276907989336e-06, |
|
"loss": 0.2705, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.8903581267217631, |
|
"grad_norm": 5.906133153801479, |
|
"learning_rate": 2.9540188202740737e-06, |
|
"loss": 0.291, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.8925619834710744, |
|
"grad_norm": 8.561658386525929, |
|
"learning_rate": 2.953760017546158e-06, |
|
"loss": 0.3078, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8947658402203856, |
|
"grad_norm": 12.819512771737724, |
|
"learning_rate": 2.953500499932855e-06, |
|
"loss": 0.3115, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.896969696969697, |
|
"grad_norm": 7.709667951718197, |
|
"learning_rate": 2.9532402675617823e-06, |
|
"loss": 0.2463, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.8991735537190083, |
|
"grad_norm": 7.580380677651997, |
|
"learning_rate": 2.9529793205609085e-06, |
|
"loss": 0.3511, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.9013774104683195, |
|
"grad_norm": 6.15114721735365, |
|
"learning_rate": 2.952717659058555e-06, |
|
"loss": 0.2768, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.9035812672176309, |
|
"grad_norm": 6.10589706130033, |
|
"learning_rate": 2.9524552831833926e-06, |
|
"loss": 0.2892, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9057851239669421, |
|
"grad_norm": 13.28238874201893, |
|
"learning_rate": 2.9521921930644446e-06, |
|
"loss": 0.1546, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.9079889807162534, |
|
"grad_norm": 10.451785736319954, |
|
"learning_rate": 2.951928388831086e-06, |
|
"loss": 0.2112, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.9101928374655648, |
|
"grad_norm": 8.054370887201316, |
|
"learning_rate": 2.9516638706130427e-06, |
|
"loss": 0.2637, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.912396694214876, |
|
"grad_norm": 8.36403593102867, |
|
"learning_rate": 2.95139863854039e-06, |
|
"loss": 0.3171, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.9146005509641874, |
|
"grad_norm": 11.138721402799987, |
|
"learning_rate": 2.951132692743556e-06, |
|
"loss": 0.2595, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.9168044077134986, |
|
"grad_norm": 10.065171933068807, |
|
"learning_rate": 2.9508660333533202e-06, |
|
"loss": 0.2379, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.9190082644628099, |
|
"grad_norm": 9.372860713885858, |
|
"learning_rate": 2.950598660500811e-06, |
|
"loss": 0.4288, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.9212121212121213, |
|
"grad_norm": 6.0014294996569255, |
|
"learning_rate": 2.9503305743175096e-06, |
|
"loss": 0.3175, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.9234159779614325, |
|
"grad_norm": 6.0687105546706785, |
|
"learning_rate": 2.950061774935247e-06, |
|
"loss": 0.3117, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.9256198347107438, |
|
"grad_norm": 11.428281868746616, |
|
"learning_rate": 2.9497922624862047e-06, |
|
"loss": 0.2195, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9278236914600551, |
|
"grad_norm": 10.331982266269929, |
|
"learning_rate": 2.9495220371029163e-06, |
|
"loss": 0.2459, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.9300275482093664, |
|
"grad_norm": 8.067422045194437, |
|
"learning_rate": 2.949251098918263e-06, |
|
"loss": 0.3405, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.9322314049586777, |
|
"grad_norm": 6.509185070006409, |
|
"learning_rate": 2.9489794480654803e-06, |
|
"loss": 0.2208, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.934435261707989, |
|
"grad_norm": 5.996298899230075, |
|
"learning_rate": 2.948707084678152e-06, |
|
"loss": 0.2281, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.9366391184573003, |
|
"grad_norm": 5.800748387128115, |
|
"learning_rate": 2.9484340088902114e-06, |
|
"loss": 0.1947, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9388429752066115, |
|
"grad_norm": 9.4543217677755, |
|
"learning_rate": 2.9481602208359446e-06, |
|
"loss": 0.2713, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9410468319559229, |
|
"grad_norm": 5.8179050501506575, |
|
"learning_rate": 2.947885720649986e-06, |
|
"loss": 0.4038, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.9432506887052342, |
|
"grad_norm": 5.9968558032817025, |
|
"learning_rate": 2.947610508467321e-06, |
|
"loss": 0.308, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 5.218918783837259, |
|
"learning_rate": 2.947334584423285e-06, |
|
"loss": 0.1904, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.9476584022038568, |
|
"grad_norm": 5.893715073575893, |
|
"learning_rate": 2.947057948653564e-06, |
|
"loss": 0.3125, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.949862258953168, |
|
"grad_norm": 7.465509921171662, |
|
"learning_rate": 2.946780601294192e-06, |
|
"loss": 0.2722, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.9520661157024793, |
|
"grad_norm": 5.375598880031633, |
|
"learning_rate": 2.946502542481556e-06, |
|
"loss": 0.1337, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.9542699724517907, |
|
"grad_norm": 6.152314340379761, |
|
"learning_rate": 2.9462237723523904e-06, |
|
"loss": 0.1672, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.9564738292011019, |
|
"grad_norm": 7.412726910934929, |
|
"learning_rate": 2.94594429104378e-06, |
|
"loss": 0.2559, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.9586776859504132, |
|
"grad_norm": 4.711748751487062, |
|
"learning_rate": 2.94566409869316e-06, |
|
"loss": 0.156, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9608815426997245, |
|
"grad_norm": 7.236972071641592, |
|
"learning_rate": 2.945383195438314e-06, |
|
"loss": 0.3117, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.9630853994490358, |
|
"grad_norm": 14.403387281330506, |
|
"learning_rate": 2.9451015814173773e-06, |
|
"loss": 0.3208, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.9652892561983472, |
|
"grad_norm": 6.533624513809372, |
|
"learning_rate": 2.9448192567688325e-06, |
|
"loss": 0.1976, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.9674931129476584, |
|
"grad_norm": 4.256886100795899, |
|
"learning_rate": 2.944536221631512e-06, |
|
"loss": 0.1006, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 5.130456426054757, |
|
"learning_rate": 2.9442524761445994e-06, |
|
"loss": 0.2601, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.971900826446281, |
|
"grad_norm": 7.200199447067768, |
|
"learning_rate": 2.9439680204476253e-06, |
|
"loss": 0.1423, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.9741046831955923, |
|
"grad_norm": 9.478705252834285, |
|
"learning_rate": 2.943682854680471e-06, |
|
"loss": 0.2328, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9763085399449036, |
|
"grad_norm": 6.804473740823927, |
|
"learning_rate": 2.9433969789833666e-06, |
|
"loss": 0.1649, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.9785123966942149, |
|
"grad_norm": 8.72015955310415, |
|
"learning_rate": 2.9431103934968913e-06, |
|
"loss": 0.1498, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9807162534435262, |
|
"grad_norm": 8.837271411112722, |
|
"learning_rate": 2.942823098361973e-06, |
|
"loss": 0.2892, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9829201101928374, |
|
"grad_norm": 5.118218748161681, |
|
"learning_rate": 2.942535093719889e-06, |
|
"loss": 0.2846, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.9851239669421488, |
|
"grad_norm": 7.494790672708267, |
|
"learning_rate": 2.942246379712265e-06, |
|
"loss": 0.2539, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.9873278236914601, |
|
"grad_norm": 4.060442461438909, |
|
"learning_rate": 2.9419569564810767e-06, |
|
"loss": 0.3017, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.9895316804407713, |
|
"grad_norm": 7.559439545503309, |
|
"learning_rate": 2.9416668241686477e-06, |
|
"loss": 0.3009, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"grad_norm": 4.546880168760103, |
|
"learning_rate": 2.9413759829176495e-06, |
|
"loss": 0.2334, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9939393939393939, |
|
"grad_norm": 5.258995040306454, |
|
"learning_rate": 2.9410844328711035e-06, |
|
"loss": 0.1016, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.9961432506887052, |
|
"grad_norm": 6.053619389338811, |
|
"learning_rate": 2.9407921741723793e-06, |
|
"loss": 0.2842, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.9983471074380166, |
|
"grad_norm": 5.754465135177187, |
|
"learning_rate": 2.9404992069651952e-06, |
|
"loss": 0.3233, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.0005509641873278, |
|
"grad_norm": 4.9612019994531975, |
|
"learning_rate": 2.9402055313936167e-06, |
|
"loss": 0.1417, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.002754820936639, |
|
"grad_norm": 5.6244102135741825, |
|
"learning_rate": 2.9399111476020595e-06, |
|
"loss": 0.2074, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.0049586776859505, |
|
"grad_norm": 3.6159502978670965, |
|
"learning_rate": 2.939616055735286e-06, |
|
"loss": 0.0678, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.0071625344352617, |
|
"grad_norm": 4.375326321208629, |
|
"learning_rate": 2.939320255938408e-06, |
|
"loss": 0.1807, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.009366391184573, |
|
"grad_norm": 5.632731167384229, |
|
"learning_rate": 2.9390237483568837e-06, |
|
"loss": 0.2764, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.0115702479338844, |
|
"grad_norm": 5.570279486677513, |
|
"learning_rate": 2.938726533136522e-06, |
|
"loss": 0.2779, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.0137741046831956, |
|
"grad_norm": 6.526157916565178, |
|
"learning_rate": 2.938428610423477e-06, |
|
"loss": 0.2135, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0159779614325068, |
|
"grad_norm": 10.345737600617348, |
|
"learning_rate": 2.9381299803642527e-06, |
|
"loss": 0.1378, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.018181818181818, |
|
"grad_norm": 11.833262955463693, |
|
"learning_rate": 2.9378306431057e-06, |
|
"loss": 0.139, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.0203856749311295, |
|
"grad_norm": 8.344807812955779, |
|
"learning_rate": 2.9375305987950176e-06, |
|
"loss": 0.0673, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.0225895316804408, |
|
"grad_norm": 5.488027829308774, |
|
"learning_rate": 2.9372298475797512e-06, |
|
"loss": 0.1667, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.024793388429752, |
|
"grad_norm": 5.683146118159619, |
|
"learning_rate": 2.936928389607797e-06, |
|
"loss": 0.1326, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.0269972451790634, |
|
"grad_norm": 8.805650429808152, |
|
"learning_rate": 2.9366262250273957e-06, |
|
"loss": 0.375, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.0292011019283747, |
|
"grad_norm": 6.748749150131435, |
|
"learning_rate": 2.9363233539871365e-06, |
|
"loss": 0.201, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.031404958677686, |
|
"grad_norm": 5.494562831147715, |
|
"learning_rate": 2.936019776635956e-06, |
|
"loss": 0.1246, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.0336088154269973, |
|
"grad_norm": 7.74993668962211, |
|
"learning_rate": 2.9357154931231377e-06, |
|
"loss": 0.3916, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.0358126721763086, |
|
"grad_norm": 3.5550318373349805, |
|
"learning_rate": 2.935410503598313e-06, |
|
"loss": 0.1295, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0380165289256198, |
|
"grad_norm": 11.370592776936205, |
|
"learning_rate": 2.9351048082114615e-06, |
|
"loss": 0.2021, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.040220385674931, |
|
"grad_norm": 6.916155255829466, |
|
"learning_rate": 2.934798407112907e-06, |
|
"loss": 0.2371, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.0424242424242425, |
|
"grad_norm": 4.724194212237889, |
|
"learning_rate": 2.9344913004533225e-06, |
|
"loss": 0.1488, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.0446280991735537, |
|
"grad_norm": 5.567099475250736, |
|
"learning_rate": 2.9341834883837276e-06, |
|
"loss": 0.371, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.046831955922865, |
|
"grad_norm": 4.549670157011353, |
|
"learning_rate": 2.9338749710554895e-06, |
|
"loss": 0.1426, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0490358126721764, |
|
"grad_norm": 3.664024697925411, |
|
"learning_rate": 2.93356574862032e-06, |
|
"loss": 0.1283, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.0512396694214876, |
|
"grad_norm": 5.63620689419913, |
|
"learning_rate": 2.9332558212302795e-06, |
|
"loss": 0.3073, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.0534435261707988, |
|
"grad_norm": 5.920190052335488, |
|
"learning_rate": 2.9329451890377745e-06, |
|
"loss": 0.3822, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.0556473829201103, |
|
"grad_norm": 4.5184551389608965, |
|
"learning_rate": 2.9326338521955584e-06, |
|
"loss": 0.2088, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.0578512396694215, |
|
"grad_norm": 4.849019465438414, |
|
"learning_rate": 2.932321810856731e-06, |
|
"loss": 0.2116, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.0600550964187327, |
|
"grad_norm": 9.26096782279171, |
|
"learning_rate": 2.9320090651747376e-06, |
|
"loss": 0.2098, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.062258953168044, |
|
"grad_norm": 5.881206475990649, |
|
"learning_rate": 2.9316956153033713e-06, |
|
"loss": 0.3356, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.0644628099173554, |
|
"grad_norm": 5.2250601677751325, |
|
"learning_rate": 2.931381461396771e-06, |
|
"loss": 0.1708, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 2.656994585543327, |
|
"learning_rate": 2.9310666036094206e-06, |
|
"loss": 0.1475, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.0688705234159779, |
|
"grad_norm": 5.808437762834121, |
|
"learning_rate": 2.9307510420961527e-06, |
|
"loss": 0.1207, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.0710743801652893, |
|
"grad_norm": 4.365336973897983, |
|
"learning_rate": 2.9304347770121433e-06, |
|
"loss": 0.1158, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.0732782369146006, |
|
"grad_norm": 8.115694826703175, |
|
"learning_rate": 2.9301178085129156e-06, |
|
"loss": 0.3765, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.0754820936639118, |
|
"grad_norm": 4.782580532088457, |
|
"learning_rate": 2.929800136754339e-06, |
|
"loss": 0.3289, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.0776859504132232, |
|
"grad_norm": 3.916128908970371, |
|
"learning_rate": 2.929481761892629e-06, |
|
"loss": 0.2164, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.0798898071625345, |
|
"grad_norm": 4.684165993134075, |
|
"learning_rate": 2.9291626840843446e-06, |
|
"loss": 0.19, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0820936639118457, |
|
"grad_norm": 9.8418516597872, |
|
"learning_rate": 2.9288429034863927e-06, |
|
"loss": 0.1844, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.084297520661157, |
|
"grad_norm": 3.8532530548962187, |
|
"learning_rate": 2.928522420256026e-06, |
|
"loss": 0.1772, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.0865013774104684, |
|
"grad_norm": 3.98234924660316, |
|
"learning_rate": 2.928201234550841e-06, |
|
"loss": 0.1009, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.0887052341597796, |
|
"grad_norm": 4.128780640643151, |
|
"learning_rate": 2.927879346528781e-06, |
|
"loss": 0.2658, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 4.492600919543367, |
|
"learning_rate": 2.9275567563481335e-06, |
|
"loss": 0.1274, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.0931129476584023, |
|
"grad_norm": 20.5140436999278, |
|
"learning_rate": 2.927233464167533e-06, |
|
"loss": 0.1561, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.0953168044077135, |
|
"grad_norm": 11.77382225835973, |
|
"learning_rate": 2.926909470145957e-06, |
|
"loss": 0.3978, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.0975206611570247, |
|
"grad_norm": 5.588583740069422, |
|
"learning_rate": 2.9265847744427307e-06, |
|
"loss": 0.2566, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.0997245179063362, |
|
"grad_norm": 3.8060281982906097, |
|
"learning_rate": 2.9262593772175216e-06, |
|
"loss": 0.0789, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.1019283746556474, |
|
"grad_norm": 5.73992801608302, |
|
"learning_rate": 2.925933278630344e-06, |
|
"loss": 0.2749, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.1041322314049586, |
|
"grad_norm": 5.387804017557412, |
|
"learning_rate": 2.9256064788415577e-06, |
|
"loss": 0.1235, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.1063360881542699, |
|
"grad_norm": 4.598141851922886, |
|
"learning_rate": 2.9252789780118643e-06, |
|
"loss": 0.2522, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.1085399449035813, |
|
"grad_norm": 5.507025996681415, |
|
"learning_rate": 2.924950776302314e-06, |
|
"loss": 0.1645, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.1107438016528925, |
|
"grad_norm": 7.5059518483881735, |
|
"learning_rate": 2.924621873874298e-06, |
|
"loss": 0.1555, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.1129476584022038, |
|
"grad_norm": 6.046492745795359, |
|
"learning_rate": 2.924292270889555e-06, |
|
"loss": 0.1493, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.1151515151515152, |
|
"grad_norm": 9.450028349535398, |
|
"learning_rate": 2.923961967510167e-06, |
|
"loss": 0.1064, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.1173553719008265, |
|
"grad_norm": 10.272796845420118, |
|
"learning_rate": 2.9236309638985597e-06, |
|
"loss": 0.1292, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.1195592286501377, |
|
"grad_norm": 4.261466277927439, |
|
"learning_rate": 2.9232992602175044e-06, |
|
"loss": 0.1745, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.1217630853994491, |
|
"grad_norm": 3.6583947560721763, |
|
"learning_rate": 2.922966856630116e-06, |
|
"loss": 0.193, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.1239669421487604, |
|
"grad_norm": 4.194634352059231, |
|
"learning_rate": 2.9226337532998544e-06, |
|
"loss": 0.156, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.1261707988980716, |
|
"grad_norm": 5.271525237798146, |
|
"learning_rate": 2.9222999503905216e-06, |
|
"loss": 0.289, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.128374655647383, |
|
"grad_norm": 5.927523030580433, |
|
"learning_rate": 2.9219654480662657e-06, |
|
"loss": 0.2661, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.1305785123966943, |
|
"grad_norm": 3.674895915077014, |
|
"learning_rate": 2.921630246491578e-06, |
|
"loss": 0.0736, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.1327823691460055, |
|
"grad_norm": 10.052945905533496, |
|
"learning_rate": 2.9212943458312934e-06, |
|
"loss": 0.2025, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.1349862258953167, |
|
"grad_norm": 4.6865949140466086, |
|
"learning_rate": 2.9209577462505908e-06, |
|
"loss": 0.2828, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.1371900826446282, |
|
"grad_norm": 5.089446996586012, |
|
"learning_rate": 2.9206204479149927e-06, |
|
"loss": 0.2029, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.1393939393939394, |
|
"grad_norm": 9.684303751299739, |
|
"learning_rate": 2.9202824509903656e-06, |
|
"loss": 0.1925, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.1415977961432506, |
|
"grad_norm": 4.779441705636188, |
|
"learning_rate": 2.919943755642919e-06, |
|
"loss": 0.3132, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.143801652892562, |
|
"grad_norm": 3.4207597108395507, |
|
"learning_rate": 2.919604362039207e-06, |
|
"loss": 0.134, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.1460055096418733, |
|
"grad_norm": 7.449231612875146, |
|
"learning_rate": 2.9192642703461243e-06, |
|
"loss": 0.2683, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.1482093663911845, |
|
"grad_norm": 4.032811008116536, |
|
"learning_rate": 2.918923480730912e-06, |
|
"loss": 0.1896, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.1504132231404958, |
|
"grad_norm": 4.905485293464102, |
|
"learning_rate": 2.918581993361153e-06, |
|
"loss": 0.1989, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.1526170798898072, |
|
"grad_norm": 7.339812817109276, |
|
"learning_rate": 2.9182398084047736e-06, |
|
"loss": 0.1412, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.1548209366391184, |
|
"grad_norm": 3.3709215655039113, |
|
"learning_rate": 2.9178969260300427e-06, |
|
"loss": 0.0843, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.1570247933884297, |
|
"grad_norm": 3.9137809188731207, |
|
"learning_rate": 2.917553346405572e-06, |
|
"loss": 0.1604, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.1592286501377411, |
|
"grad_norm": 4.547245306347712, |
|
"learning_rate": 2.9172090697003175e-06, |
|
"loss": 0.1775, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.1614325068870524, |
|
"grad_norm": 3.8176334567973282, |
|
"learning_rate": 2.916864096083576e-06, |
|
"loss": 0.2646, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 3.534012301229345, |
|
"learning_rate": 2.916518425724989e-06, |
|
"loss": 0.2208, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.1658402203856748, |
|
"grad_norm": 5.268005108434388, |
|
"learning_rate": 2.9161720587945387e-06, |
|
"loss": 0.207, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.1680440771349863, |
|
"grad_norm": 5.3661996707943, |
|
"learning_rate": 2.9158249954625514e-06, |
|
"loss": 0.0658, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.1702479338842975, |
|
"grad_norm": 4.28879923967976, |
|
"learning_rate": 2.915477235899695e-06, |
|
"loss": 0.1175, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.172451790633609, |
|
"grad_norm": 5.963529550956904, |
|
"learning_rate": 2.9151287802769803e-06, |
|
"loss": 0.1794, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.1746556473829202, |
|
"grad_norm": 5.694876856120757, |
|
"learning_rate": 2.91477962876576e-06, |
|
"loss": 0.0812, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.1768595041322314, |
|
"grad_norm": 4.351944044210483, |
|
"learning_rate": 2.9144297815377286e-06, |
|
"loss": 0.3211, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.1790633608815426, |
|
"grad_norm": 3.972657146689943, |
|
"learning_rate": 2.9140792387649237e-06, |
|
"loss": 0.1349, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.181267217630854, |
|
"grad_norm": 3.7197957379879587, |
|
"learning_rate": 2.9137280006197245e-06, |
|
"loss": 0.2268, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.1834710743801653, |
|
"grad_norm": 5.329312317532163, |
|
"learning_rate": 2.9133760672748516e-06, |
|
"loss": 0.2767, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.1856749311294765, |
|
"grad_norm": 3.883042008810904, |
|
"learning_rate": 2.9130234389033687e-06, |
|
"loss": 0.1757, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.187878787878788, |
|
"grad_norm": 4.5902195667184635, |
|
"learning_rate": 2.9126701156786794e-06, |
|
"loss": 0.241, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.1900826446280992, |
|
"grad_norm": 3.3442026394957405, |
|
"learning_rate": 2.912316097774531e-06, |
|
"loss": 0.1603, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1922865013774104, |
|
"grad_norm": 4.162882139124656, |
|
"learning_rate": 2.9119613853650117e-06, |
|
"loss": 0.2736, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.1944903581267217, |
|
"grad_norm": 4.783670244425796, |
|
"learning_rate": 2.9116059786245505e-06, |
|
"loss": 0.2848, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.1966942148760331, |
|
"grad_norm": 3.644438346409876, |
|
"learning_rate": 2.9112498777279187e-06, |
|
"loss": 0.1325, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.1988980716253443, |
|
"grad_norm": 4.2703260216286285, |
|
"learning_rate": 2.9108930828502284e-06, |
|
"loss": 0.2723, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.2011019283746556, |
|
"grad_norm": 2.5801703466779036, |
|
"learning_rate": 2.9105355941669337e-06, |
|
"loss": 0.1598, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.203305785123967, |
|
"grad_norm": 9.2861271196882, |
|
"learning_rate": 2.9101774118538283e-06, |
|
"loss": 0.254, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.2055096418732782, |
|
"grad_norm": 4.017243467065785, |
|
"learning_rate": 2.90981853608705e-06, |
|
"loss": 0.2379, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.2077134986225895, |
|
"grad_norm": 3.9003651881550945, |
|
"learning_rate": 2.909458967043074e-06, |
|
"loss": 0.0777, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.2099173553719007, |
|
"grad_norm": 8.654932450813039, |
|
"learning_rate": 2.909098704898718e-06, |
|
"loss": 0.2069, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 6.2364635749542625, |
|
"learning_rate": 2.908737749831142e-06, |
|
"loss": 0.2134, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.2143250688705234, |
|
"grad_norm": 6.731388982582371, |
|
"learning_rate": 2.9083761020178443e-06, |
|
"loss": 0.2253, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.2165289256198348, |
|
"grad_norm": 3.502965933051062, |
|
"learning_rate": 2.908013761636665e-06, |
|
"loss": 0.2066, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.218732782369146, |
|
"grad_norm": 14.711406216867847, |
|
"learning_rate": 2.9076507288657855e-06, |
|
"loss": 0.297, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.2209366391184573, |
|
"grad_norm": 17.15730611268536, |
|
"learning_rate": 2.9072870038837265e-06, |
|
"loss": 0.3287, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.2231404958677685, |
|
"grad_norm": 11.708958106392894, |
|
"learning_rate": 2.906922586869349e-06, |
|
"loss": 0.2429, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.22534435261708, |
|
"grad_norm": 5.040925462071028, |
|
"learning_rate": 2.906557478001855e-06, |
|
"loss": 0.3866, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.2275482093663912, |
|
"grad_norm": 5.4384090934766105, |
|
"learning_rate": 2.9061916774607868e-06, |
|
"loss": 0.1418, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.2297520661157024, |
|
"grad_norm": 4.224206508632855, |
|
"learning_rate": 2.9058251854260266e-06, |
|
"loss": 0.2607, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.2319559228650139, |
|
"grad_norm": 6.324360645802438, |
|
"learning_rate": 2.9054580020777965e-06, |
|
"loss": 0.1429, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.234159779614325, |
|
"grad_norm": 4.560338619966233, |
|
"learning_rate": 2.9050901275966583e-06, |
|
"loss": 0.2047, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.2363636363636363, |
|
"grad_norm": 4.978147123367892, |
|
"learning_rate": 2.9047215621635138e-06, |
|
"loss": 0.1914, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.2385674931129476, |
|
"grad_norm": 3.771414086700111, |
|
"learning_rate": 2.904352305959606e-06, |
|
"loss": 0.2456, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.240771349862259, |
|
"grad_norm": 5.891321373092408, |
|
"learning_rate": 2.903982359166515e-06, |
|
"loss": 0.1384, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.2429752066115702, |
|
"grad_norm": 7.95915988811828, |
|
"learning_rate": 2.9036117219661623e-06, |
|
"loss": 0.1797, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.2451790633608815, |
|
"grad_norm": 6.605600633405355, |
|
"learning_rate": 2.903240394540809e-06, |
|
"loss": 0.2491, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.247382920110193, |
|
"grad_norm": 5.488205726433599, |
|
"learning_rate": 2.9028683770730546e-06, |
|
"loss": 0.118, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.2495867768595041, |
|
"grad_norm": 4.012615367050864, |
|
"learning_rate": 2.9024956697458384e-06, |
|
"loss": 0.2405, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.2517906336088154, |
|
"grad_norm": 3.5236670281097413, |
|
"learning_rate": 2.9021222727424384e-06, |
|
"loss": 0.2286, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.2539944903581266, |
|
"grad_norm": 9.955313424056465, |
|
"learning_rate": 2.9017481862464737e-06, |
|
"loss": 0.1459, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.256198347107438, |
|
"grad_norm": 6.660973468538957, |
|
"learning_rate": 2.9013734104419e-06, |
|
"loss": 0.1938, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2584022038567493, |
|
"grad_norm": 16.963451273767287, |
|
"learning_rate": 2.900997945513013e-06, |
|
"loss": 0.1876, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.2606060606060607, |
|
"grad_norm": 5.7870946489445085, |
|
"learning_rate": 2.900621791644448e-06, |
|
"loss": 0.2791, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.262809917355372, |
|
"grad_norm": 10.352260259287972, |
|
"learning_rate": 2.9002449490211776e-06, |
|
"loss": 0.1946, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.2650137741046832, |
|
"grad_norm": 6.861136772777287, |
|
"learning_rate": 2.899867417828514e-06, |
|
"loss": 0.3179, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.2672176308539944, |
|
"grad_norm": 10.960820361107482, |
|
"learning_rate": 2.8994891982521083e-06, |
|
"loss": 0.329, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.2694214876033059, |
|
"grad_norm": 21.54723751149437, |
|
"learning_rate": 2.8991102904779495e-06, |
|
"loss": 0.2181, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.271625344352617, |
|
"grad_norm": 14.02787145282944, |
|
"learning_rate": 2.898730694692365e-06, |
|
"loss": 0.2812, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.2738292011019283, |
|
"grad_norm": 7.0557599098044514, |
|
"learning_rate": 2.8983504110820214e-06, |
|
"loss": 0.353, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.2760330578512398, |
|
"grad_norm": 4.770915540206801, |
|
"learning_rate": 2.8979694398339224e-06, |
|
"loss": 0.1426, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.278236914600551, |
|
"grad_norm": 4.483304136348754, |
|
"learning_rate": 2.8975877811354097e-06, |
|
"loss": 0.1747, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.2804407713498622, |
|
"grad_norm": 6.3811646159524305, |
|
"learning_rate": 2.8972054351741647e-06, |
|
"loss": 0.216, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.2826446280991735, |
|
"grad_norm": 8.112493117800566, |
|
"learning_rate": 2.8968224021382054e-06, |
|
"loss": 0.2906, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.284848484848485, |
|
"grad_norm": 4.851798637905597, |
|
"learning_rate": 2.8964386822158878e-06, |
|
"loss": 0.2229, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.2870523415977961, |
|
"grad_norm": 5.317762416772784, |
|
"learning_rate": 2.8960542755959063e-06, |
|
"loss": 0.1888, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.2892561983471074, |
|
"grad_norm": 5.350207917205157, |
|
"learning_rate": 2.895669182467292e-06, |
|
"loss": 0.1425, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.2914600550964188, |
|
"grad_norm": 4.3468333293964445, |
|
"learning_rate": 2.8952834030194145e-06, |
|
"loss": 0.1799, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.29366391184573, |
|
"grad_norm": 3.3336068941106656, |
|
"learning_rate": 2.8948969374419803e-06, |
|
"loss": 0.2205, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.2958677685950413, |
|
"grad_norm": 2.620320493915155, |
|
"learning_rate": 2.894509785925034e-06, |
|
"loss": 0.1478, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.2980716253443525, |
|
"grad_norm": 4.3011475238646275, |
|
"learning_rate": 2.894121948658957e-06, |
|
"loss": 0.2253, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.300275482093664, |
|
"grad_norm": 2.8022877416603422, |
|
"learning_rate": 2.8937334258344676e-06, |
|
"loss": 0.166, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.3024793388429752, |
|
"grad_norm": 3.7618953894768494, |
|
"learning_rate": 2.893344217642621e-06, |
|
"loss": 0.2193, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.3046831955922866, |
|
"grad_norm": 4.0491570230427625, |
|
"learning_rate": 2.892954324274812e-06, |
|
"loss": 0.319, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.3068870523415979, |
|
"grad_norm": 4.522049092181143, |
|
"learning_rate": 2.892563745922769e-06, |
|
"loss": 0.2692, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.309090909090909, |
|
"grad_norm": 3.055036543425317, |
|
"learning_rate": 2.8921724827785583e-06, |
|
"loss": 0.2053, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.3112947658402203, |
|
"grad_norm": 5.267763701241814, |
|
"learning_rate": 2.8917805350345846e-06, |
|
"loss": 0.2822, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.3134986225895318, |
|
"grad_norm": 4.582217428090828, |
|
"learning_rate": 2.891387902883586e-06, |
|
"loss": 0.289, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.315702479338843, |
|
"grad_norm": 4.043911021583197, |
|
"learning_rate": 2.890994586518641e-06, |
|
"loss": 0.1275, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.3179063360881542, |
|
"grad_norm": 5.754149214212578, |
|
"learning_rate": 2.8906005861331613e-06, |
|
"loss": 0.2715, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.3201101928374657, |
|
"grad_norm": 6.676367069618446, |
|
"learning_rate": 2.8902059019208968e-06, |
|
"loss": 0.267, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"grad_norm": 3.985915686812139, |
|
"learning_rate": 2.8898105340759333e-06, |
|
"loss": 0.2928, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.3245179063360881, |
|
"grad_norm": 3.0780099598439006, |
|
"learning_rate": 2.8894144827926924e-06, |
|
"loss": 0.0972, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.3267217630853994, |
|
"grad_norm": 4.750823583413613, |
|
"learning_rate": 2.8890177482659317e-06, |
|
"loss": 0.1178, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.3289256198347108, |
|
"grad_norm": 4.26762264184326, |
|
"learning_rate": 2.888620330690746e-06, |
|
"loss": 0.2721, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.331129476584022, |
|
"grad_norm": 2.9681851058272506, |
|
"learning_rate": 2.8882222302625646e-06, |
|
"loss": 0.1618, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 3.9008419992283043, |
|
"learning_rate": 2.8878234471771524e-06, |
|
"loss": 0.2589, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.3355371900826447, |
|
"grad_norm": 4.32964863227825, |
|
"learning_rate": 2.887423981630612e-06, |
|
"loss": 0.3656, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.337741046831956, |
|
"grad_norm": 2.822788224808757, |
|
"learning_rate": 2.8870238338193794e-06, |
|
"loss": 0.1352, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.3399449035812672, |
|
"grad_norm": 5.001949178343276, |
|
"learning_rate": 2.8866230039402275e-06, |
|
"loss": 0.2044, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.3421487603305784, |
|
"grad_norm": 3.2789633167007586, |
|
"learning_rate": 2.8862214921902627e-06, |
|
"loss": 0.2084, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.3443526170798898, |
|
"grad_norm": 3.5145152327020757, |
|
"learning_rate": 2.88581929876693e-06, |
|
"loss": 0.1822, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.346556473829201, |
|
"grad_norm": 2.846767948249379, |
|
"learning_rate": 2.885416423868007e-06, |
|
"loss": 0.1922, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.3487603305785125, |
|
"grad_norm": 4.320449712641174, |
|
"learning_rate": 2.885012867691607e-06, |
|
"loss": 0.1512, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.3509641873278238, |
|
"grad_norm": 5.126925822393263, |
|
"learning_rate": 2.884608630436178e-06, |
|
"loss": 0.342, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.353168044077135, |
|
"grad_norm": 5.913929122619571, |
|
"learning_rate": 2.884203712300504e-06, |
|
"loss": 0.2981, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.3553719008264462, |
|
"grad_norm": 3.557959909732146, |
|
"learning_rate": 2.8837981134837033e-06, |
|
"loss": 0.2138, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.3575757575757577, |
|
"grad_norm": 3.186541176730338, |
|
"learning_rate": 2.8833918341852288e-06, |
|
"loss": 0.0836, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.3597796143250689, |
|
"grad_norm": 4.556846309605876, |
|
"learning_rate": 2.882984874604867e-06, |
|
"loss": 0.2688, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.3619834710743801, |
|
"grad_norm": 4.406950831041034, |
|
"learning_rate": 2.8825772349427413e-06, |
|
"loss": 0.1912, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.3641873278236916, |
|
"grad_norm": 2.398733737100056, |
|
"learning_rate": 2.882168915399307e-06, |
|
"loss": 0.107, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.3663911845730028, |
|
"grad_norm": 5.4958527833768285, |
|
"learning_rate": 2.8817599161753557e-06, |
|
"loss": 0.1844, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.368595041322314, |
|
"grad_norm": 8.010170031896015, |
|
"learning_rate": 2.8813502374720128e-06, |
|
"loss": 0.1697, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.3707988980716252, |
|
"grad_norm": 4.153983084539555, |
|
"learning_rate": 2.8809398794907367e-06, |
|
"loss": 0.1748, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.3730027548209367, |
|
"grad_norm": 5.043276780948253, |
|
"learning_rate": 2.880528842433321e-06, |
|
"loss": 0.2369, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.375206611570248, |
|
"grad_norm": 5.363109426054472, |
|
"learning_rate": 2.8801171265018927e-06, |
|
"loss": 0.1227, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.3774104683195592, |
|
"grad_norm": 5.402978025713253, |
|
"learning_rate": 2.8797047318989125e-06, |
|
"loss": 0.2394, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.3796143250688706, |
|
"grad_norm": 4.270847937611633, |
|
"learning_rate": 2.879291658827176e-06, |
|
"loss": 0.2998, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.3818181818181818, |
|
"grad_norm": 4.567221237002694, |
|
"learning_rate": 2.878877907489811e-06, |
|
"loss": 0.2392, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.384022038567493, |
|
"grad_norm": 4.795273563069606, |
|
"learning_rate": 2.8784634780902797e-06, |
|
"loss": 0.379, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.3862258953168043, |
|
"grad_norm": 6.541946363393915, |
|
"learning_rate": 2.878048370832377e-06, |
|
"loss": 0.231, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.3884297520661157, |
|
"grad_norm": 6.028831246833223, |
|
"learning_rate": 2.8776325859202315e-06, |
|
"loss": 0.2724, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.390633608815427, |
|
"grad_norm": 4.511285043111639, |
|
"learning_rate": 2.877216123558306e-06, |
|
"loss": 0.2531, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.3928374655647384, |
|
"grad_norm": 4.6594481678188915, |
|
"learning_rate": 2.8767989839513945e-06, |
|
"loss": 0.2905, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.3950413223140496, |
|
"grad_norm": 5.455069765928385, |
|
"learning_rate": 2.8763811673046257e-06, |
|
"loss": 0.1928, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.3972451790633609, |
|
"grad_norm": 5.98529944300178, |
|
"learning_rate": 2.875962673823461e-06, |
|
"loss": 0.2185, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.399449035812672, |
|
"grad_norm": 4.805666817335156, |
|
"learning_rate": 2.8755435037136932e-06, |
|
"loss": 0.2429, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.4016528925619833, |
|
"grad_norm": 4.467346053498231, |
|
"learning_rate": 2.8751236571814497e-06, |
|
"loss": 0.2098, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.4038567493112948, |
|
"grad_norm": 3.573352650286319, |
|
"learning_rate": 2.8747031344331895e-06, |
|
"loss": 0.1255, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.406060606060606, |
|
"grad_norm": 6.158154604233198, |
|
"learning_rate": 2.8742819356757044e-06, |
|
"loss": 0.197, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.4082644628099175, |
|
"grad_norm": 4.281917416552195, |
|
"learning_rate": 2.873860061116118e-06, |
|
"loss": 0.2842, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.4104683195592287, |
|
"grad_norm": 11.226892423174888, |
|
"learning_rate": 2.8734375109618874e-06, |
|
"loss": 0.2209, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.41267217630854, |
|
"grad_norm": 4.3225627738439645, |
|
"learning_rate": 2.873014285420801e-06, |
|
"loss": 0.2152, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.4148760330578511, |
|
"grad_norm": 3.8825529972131867, |
|
"learning_rate": 2.87259038470098e-06, |
|
"loss": 0.2319, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.4170798898071626, |
|
"grad_norm": 6.079197790722616, |
|
"learning_rate": 2.872165809010877e-06, |
|
"loss": 0.2112, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.4192837465564738, |
|
"grad_norm": 6.1303289009298805, |
|
"learning_rate": 2.8717405585592767e-06, |
|
"loss": 0.2118, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.421487603305785, |
|
"grad_norm": 5.12932154131706, |
|
"learning_rate": 2.871314633555296e-06, |
|
"loss": 0.2052, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.4236914600550965, |
|
"grad_norm": 4.481220346865912, |
|
"learning_rate": 2.870888034208383e-06, |
|
"loss": 0.0703, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.4258953168044077, |
|
"grad_norm": 6.864001797309864, |
|
"learning_rate": 2.8704607607283177e-06, |
|
"loss": 0.0546, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.428099173553719, |
|
"grad_norm": 3.8245534470436295, |
|
"learning_rate": 2.8700328133252115e-06, |
|
"loss": 0.2223, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.4303030303030302, |
|
"grad_norm": 4.916213275333236, |
|
"learning_rate": 2.869604192209507e-06, |
|
"loss": 0.1604, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.4325068870523416, |
|
"grad_norm": 3.2283359865790784, |
|
"learning_rate": 2.869174897591978e-06, |
|
"loss": 0.1728, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.4347107438016529, |
|
"grad_norm": 9.18120748323431, |
|
"learning_rate": 2.8687449296837313e-06, |
|
"loss": 0.3458, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.4369146005509643, |
|
"grad_norm": 5.808199958957165, |
|
"learning_rate": 2.8683142886962017e-06, |
|
"loss": 0.1804, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.4391184573002755, |
|
"grad_norm": 5.665941277081976, |
|
"learning_rate": 2.867882974841157e-06, |
|
"loss": 0.1458, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.4413223140495868, |
|
"grad_norm": 3.8288010492231677, |
|
"learning_rate": 2.867450988330696e-06, |
|
"loss": 0.1645, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.443526170798898, |
|
"grad_norm": 4.1049680267773425, |
|
"learning_rate": 2.8670183293772466e-06, |
|
"loss": 0.3046, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.4457300275482092, |
|
"grad_norm": 4.5412954160400485, |
|
"learning_rate": 2.86658499819357e-06, |
|
"loss": 0.1549, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.4479338842975207, |
|
"grad_norm": 4.423878900596039, |
|
"learning_rate": 2.8661509949927553e-06, |
|
"loss": 0.2464, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.450137741046832, |
|
"grad_norm": 6.658838123174504, |
|
"learning_rate": 2.865716319988224e-06, |
|
"loss": 0.2806, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.4523415977961434, |
|
"grad_norm": 3.4922459804042187, |
|
"learning_rate": 2.8652809733937266e-06, |
|
"loss": 0.1226, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 4.986328878756679, |
|
"learning_rate": 2.8648449554233447e-06, |
|
"loss": 0.3537, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.4567493112947658, |
|
"grad_norm": 4.234882868424787, |
|
"learning_rate": 2.8644082662914905e-06, |
|
"loss": 0.1714, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.458953168044077, |
|
"grad_norm": 3.9070490424818747, |
|
"learning_rate": 2.8639709062129044e-06, |
|
"loss": 0.1269, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.4611570247933885, |
|
"grad_norm": 4.998076466309302, |
|
"learning_rate": 2.863532875402658e-06, |
|
"loss": 0.1779, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.4633608815426997, |
|
"grad_norm": 5.106983845887589, |
|
"learning_rate": 2.8630941740761533e-06, |
|
"loss": 0.1186, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.465564738292011, |
|
"grad_norm": 3.8551902713378965, |
|
"learning_rate": 2.8626548024491213e-06, |
|
"loss": 0.1354, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.4677685950413224, |
|
"grad_norm": 5.2301301811454515, |
|
"learning_rate": 2.862214760737622e-06, |
|
"loss": 0.2037, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.4699724517906336, |
|
"grad_norm": 6.0956564948874306, |
|
"learning_rate": 2.861774049158046e-06, |
|
"loss": 0.2184, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.4721763085399449, |
|
"grad_norm": 6.291702439669367, |
|
"learning_rate": 2.8613326679271136e-06, |
|
"loss": 0.1466, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.474380165289256, |
|
"grad_norm": 3.713580018900493, |
|
"learning_rate": 2.8608906172618723e-06, |
|
"loss": 0.2072, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.4765840220385675, |
|
"grad_norm": 5.537260982915714, |
|
"learning_rate": 2.8604478973797005e-06, |
|
"loss": 0.1488, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.4787878787878788, |
|
"grad_norm": 5.820569540535823, |
|
"learning_rate": 2.860004508498306e-06, |
|
"loss": 0.223, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.4809917355371902, |
|
"grad_norm": 6.1571860143662684, |
|
"learning_rate": 2.8595604508357255e-06, |
|
"loss": 0.4112, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.4831955922865014, |
|
"grad_norm": 3.6498194790392082, |
|
"learning_rate": 2.859115724610322e-06, |
|
"loss": 0.1296, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.4853994490358127, |
|
"grad_norm": 4.172319048467667, |
|
"learning_rate": 2.858670330040791e-06, |
|
"loss": 0.2823, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.487603305785124, |
|
"grad_norm": 3.5422809914301, |
|
"learning_rate": 2.8582242673461545e-06, |
|
"loss": 0.2723, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.4898071625344351, |
|
"grad_norm": 3.994421032502254, |
|
"learning_rate": 2.857777536745763e-06, |
|
"loss": 0.2625, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.4920110192837466, |
|
"grad_norm": 2.4442328235159136, |
|
"learning_rate": 2.857330138459297e-06, |
|
"loss": 0.0773, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.4942148760330578, |
|
"grad_norm": 2.956014467391965, |
|
"learning_rate": 2.856882072706763e-06, |
|
"loss": 0.0263, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.4964187327823693, |
|
"grad_norm": 5.063950525118159, |
|
"learning_rate": 2.856433339708498e-06, |
|
"loss": 0.2555, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.4986225895316805, |
|
"grad_norm": 5.701498370772839, |
|
"learning_rate": 2.855983939685165e-06, |
|
"loss": 0.2644, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.5008264462809917, |
|
"grad_norm": 3.653863039822445, |
|
"learning_rate": 2.8555338728577573e-06, |
|
"loss": 0.1186, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.503030303030303, |
|
"grad_norm": 3.9380606238532856, |
|
"learning_rate": 2.8550831394475946e-06, |
|
"loss": 0.2569, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.5052341597796142, |
|
"grad_norm": 6.240511119913605, |
|
"learning_rate": 2.854631739676324e-06, |
|
"loss": 0.2301, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.5074380165289256, |
|
"grad_norm": 3.969971056794526, |
|
"learning_rate": 2.8541796737659223e-06, |
|
"loss": 0.2162, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.509641873278237, |
|
"grad_norm": 4.593297828759556, |
|
"learning_rate": 2.8537269419386913e-06, |
|
"loss": 0.2088, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.5118457300275483, |
|
"grad_norm": 3.3975859793614074, |
|
"learning_rate": 2.853273544417262e-06, |
|
"loss": 0.1733, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.5140495867768595, |
|
"grad_norm": 4.456601479513547, |
|
"learning_rate": 2.8528194814245925e-06, |
|
"loss": 0.153, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.5162534435261708, |
|
"grad_norm": 5.024001056036946, |
|
"learning_rate": 2.852364753183968e-06, |
|
"loss": 0.267, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.518457300275482, |
|
"grad_norm": 3.9770565806572504, |
|
"learning_rate": 2.851909359919e-06, |
|
"loss": 0.215, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.5206611570247934, |
|
"grad_norm": 4.263990726687718, |
|
"learning_rate": 2.851453301853629e-06, |
|
"loss": 0.1481, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.5228650137741047, |
|
"grad_norm": 10.077089034160084, |
|
"learning_rate": 2.8509965792121202e-06, |
|
"loss": 0.1462, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.525068870523416, |
|
"grad_norm": 4.929665896210096, |
|
"learning_rate": 2.850539192219067e-06, |
|
"loss": 0.1636, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.5272727272727273, |
|
"grad_norm": 5.860454925100347, |
|
"learning_rate": 2.850081141099389e-06, |
|
"loss": 0.3131, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.5294765840220386, |
|
"grad_norm": 3.831102793731722, |
|
"learning_rate": 2.8496224260783324e-06, |
|
"loss": 0.2469, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.5316804407713498, |
|
"grad_norm": 2.8227649238802925, |
|
"learning_rate": 2.8491630473814703e-06, |
|
"loss": 0.1926, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.533884297520661, |
|
"grad_norm": 6.082271321101482, |
|
"learning_rate": 2.8487030052347015e-06, |
|
"loss": 0.3531, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.5360881542699725, |
|
"grad_norm": 4.213384468163032, |
|
"learning_rate": 2.8482422998642513e-06, |
|
"loss": 0.1729, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.5382920110192837, |
|
"grad_norm": 4.192370379792702, |
|
"learning_rate": 2.8477809314966718e-06, |
|
"loss": 0.1484, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.5404958677685952, |
|
"grad_norm": 4.619842000189168, |
|
"learning_rate": 2.84731890035884e-06, |
|
"loss": 0.1681, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.5426997245179064, |
|
"grad_norm": 6.240088261713722, |
|
"learning_rate": 2.84685620667796e-06, |
|
"loss": 0.1121, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5449035812672176, |
|
"grad_norm": 7.318489937821377, |
|
"learning_rate": 2.8463928506815605e-06, |
|
"loss": 0.238, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.5471074380165288, |
|
"grad_norm": 11.369549225694884, |
|
"learning_rate": 2.8459288325974963e-06, |
|
"loss": 0.221, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.54931129476584, |
|
"grad_norm": 3.9222359665127127, |
|
"learning_rate": 2.845464152653948e-06, |
|
"loss": 0.1914, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.5515151515151515, |
|
"grad_norm": 3.9561654328009626, |
|
"learning_rate": 2.8449988110794224e-06, |
|
"loss": 0.1936, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.553719008264463, |
|
"grad_norm": 5.641732295279923, |
|
"learning_rate": 2.8445328081027502e-06, |
|
"loss": 0.196, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.5559228650137742, |
|
"grad_norm": 5.310630607978979, |
|
"learning_rate": 2.844066143953088e-06, |
|
"loss": 0.4102, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.5581267217630854, |
|
"grad_norm": 3.7257912432993767, |
|
"learning_rate": 2.8435988188599175e-06, |
|
"loss": 0.1045, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.5603305785123966, |
|
"grad_norm": 5.021805378306946, |
|
"learning_rate": 2.8431308330530453e-06, |
|
"loss": 0.2973, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.5625344352617079, |
|
"grad_norm": 5.460789289795145, |
|
"learning_rate": 2.8426621867626043e-06, |
|
"loss": 0.177, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.5647382920110193, |
|
"grad_norm": 5.497664501546296, |
|
"learning_rate": 2.842192880219049e-06, |
|
"loss": 0.242, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.5669421487603306, |
|
"grad_norm": 5.424348953607276, |
|
"learning_rate": 2.841722913653162e-06, |
|
"loss": 0.2714, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.569146005509642, |
|
"grad_norm": 5.001059981673565, |
|
"learning_rate": 2.8412522872960486e-06, |
|
"loss": 0.2323, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.5713498622589532, |
|
"grad_norm": 3.902252684011861, |
|
"learning_rate": 2.840781001379139e-06, |
|
"loss": 0.0884, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.5735537190082645, |
|
"grad_norm": 4.121371795480466, |
|
"learning_rate": 2.8403090561341878e-06, |
|
"loss": 0.2631, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 3.7110214921650324, |
|
"learning_rate": 2.8398364517932727e-06, |
|
"loss": 0.2998, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.577961432506887, |
|
"grad_norm": 6.2025699416635, |
|
"learning_rate": 2.8393631885887986e-06, |
|
"loss": 0.1385, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.5801652892561984, |
|
"grad_norm": 3.279306066462292, |
|
"learning_rate": 2.8388892667534903e-06, |
|
"loss": 0.1619, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.5823691460055096, |
|
"grad_norm": 3.1972358459552996, |
|
"learning_rate": 2.838414686520399e-06, |
|
"loss": 0.2275, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.584573002754821, |
|
"grad_norm": 4.729096055141415, |
|
"learning_rate": 2.8379394481229e-06, |
|
"loss": 0.1573, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.5867768595041323, |
|
"grad_norm": 4.079885310824538, |
|
"learning_rate": 2.837463551794691e-06, |
|
"loss": 0.3143, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.5889807162534435, |
|
"grad_norm": 4.290043095300846, |
|
"learning_rate": 2.8369869977697927e-06, |
|
"loss": 0.1397, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.5911845730027547, |
|
"grad_norm": 3.096289822814843, |
|
"learning_rate": 2.8365097862825516e-06, |
|
"loss": 0.0809, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.593388429752066, |
|
"grad_norm": 3.9955918707915945, |
|
"learning_rate": 2.836031917567636e-06, |
|
"loss": 0.1987, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.5955922865013774, |
|
"grad_norm": 5.319859877967437, |
|
"learning_rate": 2.835553391860036e-06, |
|
"loss": 0.1363, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.5977961432506889, |
|
"grad_norm": 5.65396766215741, |
|
"learning_rate": 2.8350742093950677e-06, |
|
"loss": 0.2859, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 7.765991667810481, |
|
"learning_rate": 2.8345943704083686e-06, |
|
"loss": 0.246, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.6022038567493113, |
|
"grad_norm": 2.583152548427552, |
|
"learning_rate": 2.834113875135898e-06, |
|
"loss": 0.1196, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.6044077134986225, |
|
"grad_norm": 3.9093087783630867, |
|
"learning_rate": 2.8336327238139407e-06, |
|
"loss": 0.2921, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.6066115702479338, |
|
"grad_norm": 3.4398541053621354, |
|
"learning_rate": 2.8331509166791015e-06, |
|
"loss": 0.1223, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.6088154269972452, |
|
"grad_norm": 3.8423373441579836, |
|
"learning_rate": 2.832668453968309e-06, |
|
"loss": 0.234, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.6110192837465565, |
|
"grad_norm": 4.044067317705418, |
|
"learning_rate": 2.832185335918813e-06, |
|
"loss": 0.1573, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.613223140495868, |
|
"grad_norm": 2.8739555688196394, |
|
"learning_rate": 2.8317015627681876e-06, |
|
"loss": 0.1728, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.6154269972451791, |
|
"grad_norm": 4.298425879403751, |
|
"learning_rate": 2.8312171347543275e-06, |
|
"loss": 0.1456, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.6176308539944904, |
|
"grad_norm": 4.590580736782103, |
|
"learning_rate": 2.83073205211545e-06, |
|
"loss": 0.2604, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.6198347107438016, |
|
"grad_norm": 5.5527106677455595, |
|
"learning_rate": 2.830246315090094e-06, |
|
"loss": 0.337, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.6220385674931128, |
|
"grad_norm": 4.1351188294540835, |
|
"learning_rate": 2.82975992391712e-06, |
|
"loss": 0.2419, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.6242424242424243, |
|
"grad_norm": 4.644664912832784, |
|
"learning_rate": 2.8292728788357105e-06, |
|
"loss": 0.3959, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.6264462809917355, |
|
"grad_norm": 3.556852835265641, |
|
"learning_rate": 2.82878518008537e-06, |
|
"loss": 0.1947, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.628650137741047, |
|
"grad_norm": 3.630483228818893, |
|
"learning_rate": 2.8282968279059235e-06, |
|
"loss": 0.0405, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.6308539944903582, |
|
"grad_norm": 4.745386954667148, |
|
"learning_rate": 2.8278078225375183e-06, |
|
"loss": 0.1604, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.6330578512396694, |
|
"grad_norm": 3.53832665010997, |
|
"learning_rate": 2.827318164220622e-06, |
|
"loss": 0.2007, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.6352617079889806, |
|
"grad_norm": 5.867754249401183, |
|
"learning_rate": 2.826827853196024e-06, |
|
"loss": 0.1423, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.6374655647382919, |
|
"grad_norm": 4.222754274583092, |
|
"learning_rate": 2.8263368897048346e-06, |
|
"loss": 0.1337, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.6396694214876033, |
|
"grad_norm": 3.117149882374395, |
|
"learning_rate": 2.825845273988484e-06, |
|
"loss": 0.1453, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.6418732782369148, |
|
"grad_norm": 3.4038444714885077, |
|
"learning_rate": 2.8253530062887244e-06, |
|
"loss": 0.0563, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.644077134986226, |
|
"grad_norm": 4.623079936470395, |
|
"learning_rate": 2.8248600868476277e-06, |
|
"loss": 0.1519, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.6462809917355372, |
|
"grad_norm": 3.313018135102592, |
|
"learning_rate": 2.824366515907587e-06, |
|
"loss": 0.2094, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.6484848484848484, |
|
"grad_norm": 5.2839049379385905, |
|
"learning_rate": 2.8238722937113153e-06, |
|
"loss": 0.1547, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.6506887052341597, |
|
"grad_norm": 2.9477242559704817, |
|
"learning_rate": 2.823377420501846e-06, |
|
"loss": 0.093, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.6528925619834711, |
|
"grad_norm": 3.045171763549146, |
|
"learning_rate": 2.8228818965225326e-06, |
|
"loss": 0.08, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.6550964187327823, |
|
"grad_norm": 3.923253731083525, |
|
"learning_rate": 2.8223857220170486e-06, |
|
"loss": 0.1194, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.6573002754820938, |
|
"grad_norm": 3.4924421442319726, |
|
"learning_rate": 2.821888897229388e-06, |
|
"loss": 0.1569, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.659504132231405, |
|
"grad_norm": 10.507784482223977, |
|
"learning_rate": 2.821391422403863e-06, |
|
"loss": 0.2523, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.6617079889807163, |
|
"grad_norm": 6.827016679434054, |
|
"learning_rate": 2.820893297785107e-06, |
|
"loss": 0.1597, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.6639118457300275, |
|
"grad_norm": 4.231118216733152, |
|
"learning_rate": 2.8203945236180726e-06, |
|
"loss": 0.2601, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.6661157024793387, |
|
"grad_norm": 5.790045882163988, |
|
"learning_rate": 2.8198951001480316e-06, |
|
"loss": 0.1201, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.6683195592286502, |
|
"grad_norm": 3.780198445899582, |
|
"learning_rate": 2.8193950276205744e-06, |
|
"loss": 0.3059, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.6705234159779614, |
|
"grad_norm": 2.7743504550344724, |
|
"learning_rate": 2.818894306281612e-06, |
|
"loss": 0.0944, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.6727272727272728, |
|
"grad_norm": 4.384995400856117, |
|
"learning_rate": 2.818392936377374e-06, |
|
"loss": 0.207, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.674931129476584, |
|
"grad_norm": 3.46546366145472, |
|
"learning_rate": 2.817890918154408e-06, |
|
"loss": 0.0987, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6771349862258953, |
|
"grad_norm": 3.944519624313324, |
|
"learning_rate": 2.8173882518595806e-06, |
|
"loss": 0.2076, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.6793388429752065, |
|
"grad_norm": 3.0199025674864197, |
|
"learning_rate": 2.816884937740079e-06, |
|
"loss": 0.139, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.6815426997245178, |
|
"grad_norm": 4.181192024696917, |
|
"learning_rate": 2.8163809760434065e-06, |
|
"loss": 0.4948, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.6837465564738292, |
|
"grad_norm": 5.425368792194891, |
|
"learning_rate": 2.8158763670173866e-06, |
|
"loss": 0.2613, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.6859504132231407, |
|
"grad_norm": 2.449214805033337, |
|
"learning_rate": 2.8153711109101596e-06, |
|
"loss": 0.1652, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.6881542699724519, |
|
"grad_norm": 3.02375338979952, |
|
"learning_rate": 2.8148652079701852e-06, |
|
"loss": 0.0925, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.690358126721763, |
|
"grad_norm": 3.484485189001616, |
|
"learning_rate": 2.814358658446241e-06, |
|
"loss": 0.1745, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.6925619834710743, |
|
"grad_norm": 2.666491627882498, |
|
"learning_rate": 2.813851462587422e-06, |
|
"loss": 0.1677, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.6947658402203856, |
|
"grad_norm": 3.1016763666636717, |
|
"learning_rate": 2.8133436206431416e-06, |
|
"loss": 0.0979, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 2.6268324993637737, |
|
"learning_rate": 2.8128351328631308e-06, |
|
"loss": 0.1548, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.6991735537190082, |
|
"grad_norm": 3.5260065593405403, |
|
"learning_rate": 2.8123259994974376e-06, |
|
"loss": 0.2974, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.7013774104683197, |
|
"grad_norm": 4.976441509919316, |
|
"learning_rate": 2.811816220796428e-06, |
|
"loss": 0.2308, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.703581267217631, |
|
"grad_norm": 5.669812148443133, |
|
"learning_rate": 2.8113057970107864e-06, |
|
"loss": 0.5012, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.7057851239669422, |
|
"grad_norm": 5.4074181108538895, |
|
"learning_rate": 2.8107947283915115e-06, |
|
"loss": 0.2426, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.7079889807162534, |
|
"grad_norm": 4.925146764134966, |
|
"learning_rate": 2.810283015189922e-06, |
|
"loss": 0.2084, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.7101928374655646, |
|
"grad_norm": 4.733811645399949, |
|
"learning_rate": 2.8097706576576524e-06, |
|
"loss": 0.1098, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.712396694214876, |
|
"grad_norm": 3.72787024918664, |
|
"learning_rate": 2.8092576560466546e-06, |
|
"loss": 0.2889, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.7146005509641873, |
|
"grad_norm": 4.179092902335767, |
|
"learning_rate": 2.808744010609196e-06, |
|
"loss": 0.171, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.7168044077134987, |
|
"grad_norm": 3.1698363233858435, |
|
"learning_rate": 2.808229721597861e-06, |
|
"loss": 0.102, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.71900826446281, |
|
"grad_norm": 5.330585067718199, |
|
"learning_rate": 2.8077147892655516e-06, |
|
"loss": 0.4345, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.7212121212121212, |
|
"grad_norm": 3.9967585194484263, |
|
"learning_rate": 2.8071992138654856e-06, |
|
"loss": 0.1291, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.7234159779614324, |
|
"grad_norm": 1.922265729326589, |
|
"learning_rate": 2.806682995651196e-06, |
|
"loss": 0.1489, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.7256198347107436, |
|
"grad_norm": 3.133050865277451, |
|
"learning_rate": 2.806166134876533e-06, |
|
"loss": 0.1107, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.727823691460055, |
|
"grad_norm": 4.8008500192145735, |
|
"learning_rate": 2.805648631795663e-06, |
|
"loss": 0.1931, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.7300275482093666, |
|
"grad_norm": 7.27694610184454, |
|
"learning_rate": 2.8051304866630674e-06, |
|
"loss": 0.1939, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.7322314049586778, |
|
"grad_norm": 4.683082050560034, |
|
"learning_rate": 2.8046116997335432e-06, |
|
"loss": 0.1368, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.734435261707989, |
|
"grad_norm": 4.192820835260911, |
|
"learning_rate": 2.804092271262204e-06, |
|
"loss": 0.1852, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.7366391184573002, |
|
"grad_norm": 2.654337382564104, |
|
"learning_rate": 2.8035722015044784e-06, |
|
"loss": 0.0936, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.7388429752066115, |
|
"grad_norm": 4.735087099788159, |
|
"learning_rate": 2.80305149071611e-06, |
|
"loss": 0.1789, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.741046831955923, |
|
"grad_norm": 4.54086308882895, |
|
"learning_rate": 2.8025301391531584e-06, |
|
"loss": 0.1054, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.7432506887052341, |
|
"grad_norm": 5.877262840590166, |
|
"learning_rate": 2.8020081470719974e-06, |
|
"loss": 0.1144, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.7454545454545456, |
|
"grad_norm": 2.9934758004327198, |
|
"learning_rate": 2.8014855147293165e-06, |
|
"loss": 0.1114, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.7476584022038568, |
|
"grad_norm": 3.9602949185225427, |
|
"learning_rate": 2.800962242382119e-06, |
|
"loss": 0.1469, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.749862258953168, |
|
"grad_norm": 3.4891990751120874, |
|
"learning_rate": 2.8004383302877246e-06, |
|
"loss": 0.2197, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.7520661157024793, |
|
"grad_norm": 3.1228880918415314, |
|
"learning_rate": 2.799913778703767e-06, |
|
"loss": 0.0802, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.7542699724517905, |
|
"grad_norm": 4.262402531909565, |
|
"learning_rate": 2.7993885878881924e-06, |
|
"loss": 0.1462, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.756473829201102, |
|
"grad_norm": 4.464490083715167, |
|
"learning_rate": 2.7988627580992644e-06, |
|
"loss": 0.3734, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.7586776859504132, |
|
"grad_norm": 2.9521520450784293, |
|
"learning_rate": 2.7983362895955592e-06, |
|
"loss": 0.0761, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.7608815426997246, |
|
"grad_norm": 4.569388820068124, |
|
"learning_rate": 2.797809182635967e-06, |
|
"loss": 0.311, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.7630853994490359, |
|
"grad_norm": 2.761531061435357, |
|
"learning_rate": 2.7972814374796923e-06, |
|
"loss": 0.1123, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.765289256198347, |
|
"grad_norm": 2.909256889094545, |
|
"learning_rate": 2.7967530543862536e-06, |
|
"loss": 0.1424, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.7674931129476583, |
|
"grad_norm": 3.580892106221509, |
|
"learning_rate": 2.7962240336154824e-06, |
|
"loss": 0.1713, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.7696969696969695, |
|
"grad_norm": 2.613119863195253, |
|
"learning_rate": 2.795694375427524e-06, |
|
"loss": 0.1453, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.771900826446281, |
|
"grad_norm": 3.9385995694287512, |
|
"learning_rate": 2.795164080082839e-06, |
|
"loss": 0.3253, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.7741046831955924, |
|
"grad_norm": 2.968556000386392, |
|
"learning_rate": 2.7946331478421985e-06, |
|
"loss": 0.1914, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7763085399449037, |
|
"grad_norm": 4.2982285708776615, |
|
"learning_rate": 2.794101578966688e-06, |
|
"loss": 0.1755, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.778512396694215, |
|
"grad_norm": 3.077422083118147, |
|
"learning_rate": 2.793569373717706e-06, |
|
"loss": 0.1018, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.7807162534435261, |
|
"grad_norm": 3.2720302498627394, |
|
"learning_rate": 2.7930365323569652e-06, |
|
"loss": 0.0902, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.7829201101928374, |
|
"grad_norm": 4.579436283539023, |
|
"learning_rate": 2.7925030551464885e-06, |
|
"loss": 0.1652, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.7851239669421488, |
|
"grad_norm": 3.7523711022286372, |
|
"learning_rate": 2.7919689423486137e-06, |
|
"loss": 0.1149, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.78732782369146, |
|
"grad_norm": 2.1787023560029017, |
|
"learning_rate": 2.79143419422599e-06, |
|
"loss": 0.1034, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.7895316804407715, |
|
"grad_norm": 3.262576182854977, |
|
"learning_rate": 2.7908988110415794e-06, |
|
"loss": 0.1919, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.7917355371900827, |
|
"grad_norm": 2.768827717118814, |
|
"learning_rate": 2.7903627930586566e-06, |
|
"loss": 0.0913, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.793939393939394, |
|
"grad_norm": 4.020492031322422, |
|
"learning_rate": 2.789826140540808e-06, |
|
"loss": 0.112, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.7961432506887052, |
|
"grad_norm": 3.8046638565500457, |
|
"learning_rate": 2.7892888537519313e-06, |
|
"loss": 0.2014, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.7983471074380164, |
|
"grad_norm": 3.5265605684044727, |
|
"learning_rate": 2.788750932956238e-06, |
|
"loss": 0.1362, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.8005509641873279, |
|
"grad_norm": 4.629808595887051, |
|
"learning_rate": 2.7882123784182493e-06, |
|
"loss": 0.1114, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.802754820936639, |
|
"grad_norm": 6.514243778597918, |
|
"learning_rate": 2.7876731904027993e-06, |
|
"loss": 0.1641, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.8049586776859505, |
|
"grad_norm": 3.1215686853622335, |
|
"learning_rate": 2.787133369175033e-06, |
|
"loss": 0.2913, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.8071625344352618, |
|
"grad_norm": 3.6554868120588466, |
|
"learning_rate": 2.7865929150004082e-06, |
|
"loss": 0.1416, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.809366391184573, |
|
"grad_norm": 15.691489907553818, |
|
"learning_rate": 2.7860518281446915e-06, |
|
"loss": 0.1736, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.8115702479338842, |
|
"grad_norm": 5.283164839932474, |
|
"learning_rate": 2.7855101088739628e-06, |
|
"loss": 0.2026, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.8137741046831954, |
|
"grad_norm": 3.5594046840956057, |
|
"learning_rate": 2.7849677574546113e-06, |
|
"loss": 0.0848, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.815977961432507, |
|
"grad_norm": 4.612036787809661, |
|
"learning_rate": 2.7844247741533395e-06, |
|
"loss": 0.179, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 3.65401545072593, |
|
"learning_rate": 2.783881159237157e-06, |
|
"loss": 0.1717, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.8203856749311296, |
|
"grad_norm": 3.9012328429874294, |
|
"learning_rate": 2.783336912973387e-06, |
|
"loss": 0.2841, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.8225895316804408, |
|
"grad_norm": 5.965848322247876, |
|
"learning_rate": 2.782792035629663e-06, |
|
"loss": 0.3736, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.824793388429752, |
|
"grad_norm": 3.271989028222971, |
|
"learning_rate": 2.7822465274739265e-06, |
|
"loss": 0.1078, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.8269972451790633, |
|
"grad_norm": 5.339902990752521, |
|
"learning_rate": 2.7817003887744313e-06, |
|
"loss": 0.3349, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.8292011019283747, |
|
"grad_norm": 3.0715961642404577, |
|
"learning_rate": 2.781153619799741e-06, |
|
"loss": 0.1475, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.831404958677686, |
|
"grad_norm": 5.6237694593788845, |
|
"learning_rate": 2.780606220818729e-06, |
|
"loss": 0.1436, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.8336088154269974, |
|
"grad_norm": 5.890345413594224, |
|
"learning_rate": 2.7800581921005778e-06, |
|
"loss": 0.1878, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.8358126721763086, |
|
"grad_norm": 4.06712704576657, |
|
"learning_rate": 2.77950953391478e-06, |
|
"loss": 0.2348, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.8380165289256198, |
|
"grad_norm": 2.882404649474459, |
|
"learning_rate": 2.7789602465311384e-06, |
|
"loss": 0.1351, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.840220385674931, |
|
"grad_norm": 5.473781561460339, |
|
"learning_rate": 2.778410330219764e-06, |
|
"loss": 0.3063, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.8424242424242423, |
|
"grad_norm": 3.922211179254337, |
|
"learning_rate": 2.777859785251079e-06, |
|
"loss": 0.1619, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.8446280991735537, |
|
"grad_norm": 8.221310063208145, |
|
"learning_rate": 2.777308611895812e-06, |
|
"loss": 0.4785, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.846831955922865, |
|
"grad_norm": 4.2268861917345335, |
|
"learning_rate": 2.7767568104250022e-06, |
|
"loss": 0.1154, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.8490358126721764, |
|
"grad_norm": 2.7566969326407658, |
|
"learning_rate": 2.7762043811099986e-06, |
|
"loss": 0.0647, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.8512396694214877, |
|
"grad_norm": 4.1654096542802925, |
|
"learning_rate": 2.775651324222457e-06, |
|
"loss": 0.2954, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.8534435261707989, |
|
"grad_norm": 2.862929107741642, |
|
"learning_rate": 2.775097640034343e-06, |
|
"loss": 0.1234, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.8556473829201101, |
|
"grad_norm": 4.035924471619375, |
|
"learning_rate": 2.7745433288179313e-06, |
|
"loss": 0.2158, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.8578512396694213, |
|
"grad_norm": 3.9005408283862884, |
|
"learning_rate": 2.773988390845802e-06, |
|
"loss": 0.2078, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.8600550964187328, |
|
"grad_norm": 4.0776857291236, |
|
"learning_rate": 2.773432826390847e-06, |
|
"loss": 0.2066, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.8622589531680442, |
|
"grad_norm": 4.054788798271851, |
|
"learning_rate": 2.7728766357262645e-06, |
|
"loss": 0.1977, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.8644628099173555, |
|
"grad_norm": 4.8154226644199, |
|
"learning_rate": 2.77231981912556e-06, |
|
"loss": 0.2452, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 3.749035576393667, |
|
"learning_rate": 2.7717623768625487e-06, |
|
"loss": 0.1679, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.868870523415978, |
|
"grad_norm": 3.0296230257574823, |
|
"learning_rate": 2.7712043092113518e-06, |
|
"loss": 0.1371, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.8710743801652892, |
|
"grad_norm": 2.8961077120026397, |
|
"learning_rate": 2.7706456164463987e-06, |
|
"loss": 0.1451, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.8732782369146006, |
|
"grad_norm": 4.757014237158706, |
|
"learning_rate": 2.7700862988424264e-06, |
|
"loss": 0.2476, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8754820936639118, |
|
"grad_norm": 3.5605409135922486, |
|
"learning_rate": 2.7695263566744786e-06, |
|
"loss": 0.3223, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.8776859504132233, |
|
"grad_norm": 3.673778148979456, |
|
"learning_rate": 2.7689657902179063e-06, |
|
"loss": 0.1496, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.8798898071625345, |
|
"grad_norm": 3.9594639471733015, |
|
"learning_rate": 2.7684045997483683e-06, |
|
"loss": 0.1621, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.8820936639118457, |
|
"grad_norm": 4.3094578613213015, |
|
"learning_rate": 2.7678427855418293e-06, |
|
"loss": 0.1284, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.884297520661157, |
|
"grad_norm": 4.230139062066988, |
|
"learning_rate": 2.767280347874561e-06, |
|
"loss": 0.2194, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.8865013774104682, |
|
"grad_norm": 4.483162054895854, |
|
"learning_rate": 2.766717287023141e-06, |
|
"loss": 0.3231, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.8887052341597796, |
|
"grad_norm": 3.6045893200147656, |
|
"learning_rate": 2.766153603264455e-06, |
|
"loss": 0.3204, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.8909090909090909, |
|
"grad_norm": 2.68825864298539, |
|
"learning_rate": 2.7655892968756935e-06, |
|
"loss": 0.1282, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.8931129476584023, |
|
"grad_norm": 3.086935863096946, |
|
"learning_rate": 2.7650243681343543e-06, |
|
"loss": 0.1713, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.8953168044077136, |
|
"grad_norm": 4.285553555539189, |
|
"learning_rate": 2.76445881731824e-06, |
|
"loss": 0.1801, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.8975206611570248, |
|
"grad_norm": 4.032253713736677, |
|
"learning_rate": 2.76389264470546e-06, |
|
"loss": 0.1143, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.899724517906336, |
|
"grad_norm": 3.5305542226645032, |
|
"learning_rate": 2.76332585057443e-06, |
|
"loss": 0.2464, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.9019283746556472, |
|
"grad_norm": 4.04266297597818, |
|
"learning_rate": 2.762758435203869e-06, |
|
"loss": 0.1271, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.9041322314049587, |
|
"grad_norm": 3.046350178355839, |
|
"learning_rate": 2.762190398872805e-06, |
|
"loss": 0.1708, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.9063360881542701, |
|
"grad_norm": 4.39744290609223, |
|
"learning_rate": 2.7616217418605685e-06, |
|
"loss": 0.0867, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.9085399449035814, |
|
"grad_norm": 3.9606892297824627, |
|
"learning_rate": 2.7610524644467955e-06, |
|
"loss": 0.1766, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.9107438016528926, |
|
"grad_norm": 2.6007521338428163, |
|
"learning_rate": 2.7604825669114284e-06, |
|
"loss": 0.1322, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.9129476584022038, |
|
"grad_norm": 4.5370637771574485, |
|
"learning_rate": 2.7599120495347137e-06, |
|
"loss": 0.2271, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.915151515151515, |
|
"grad_norm": 4.704518903323181, |
|
"learning_rate": 2.7593409125972037e-06, |
|
"loss": 0.2211, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.9173553719008265, |
|
"grad_norm": 3.7308347809758704, |
|
"learning_rate": 2.7587691563797533e-06, |
|
"loss": 0.1474, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.9195592286501377, |
|
"grad_norm": 3.531251299278127, |
|
"learning_rate": 2.758196781163524e-06, |
|
"loss": 0.1425, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.9217630853994492, |
|
"grad_norm": 3.899278122311197, |
|
"learning_rate": 2.7576237872299805e-06, |
|
"loss": 0.1692, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.9239669421487604, |
|
"grad_norm": 3.970267403444949, |
|
"learning_rate": 2.7570501748608922e-06, |
|
"loss": 0.3354, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.9261707988980716, |
|
"grad_norm": 5.657505689245682, |
|
"learning_rate": 2.756475944338333e-06, |
|
"loss": 0.2707, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.9283746556473829, |
|
"grad_norm": 5.4080580645655125, |
|
"learning_rate": 2.75590109594468e-06, |
|
"loss": 0.2196, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.930578512396694, |
|
"grad_norm": 4.072394021014495, |
|
"learning_rate": 2.7553256299626138e-06, |
|
"loss": 0.2698, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.9327823691460055, |
|
"grad_norm": 2.667949896246189, |
|
"learning_rate": 2.7547495466751204e-06, |
|
"loss": 0.1677, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.9349862258953168, |
|
"grad_norm": 3.638205070981609, |
|
"learning_rate": 2.754172846365488e-06, |
|
"loss": 0.1484, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.9371900826446282, |
|
"grad_norm": 3.4775529494976136, |
|
"learning_rate": 2.7535955293173087e-06, |
|
"loss": 0.1624, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 4.173548390099768, |
|
"learning_rate": 2.753017595814477e-06, |
|
"loss": 0.1607, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.9415977961432507, |
|
"grad_norm": 4.164962747564953, |
|
"learning_rate": 2.7524390461411916e-06, |
|
"loss": 0.1889, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.943801652892562, |
|
"grad_norm": 2.54142311710031, |
|
"learning_rate": 2.751859880581954e-06, |
|
"loss": 0.1741, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.9460055096418731, |
|
"grad_norm": 2.178677072386189, |
|
"learning_rate": 2.751280099421569e-06, |
|
"loss": 0.1216, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.9482093663911846, |
|
"grad_norm": 3.8814007175409673, |
|
"learning_rate": 2.750699702945142e-06, |
|
"loss": 0.1891, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.950413223140496, |
|
"grad_norm": 8.658086942729867, |
|
"learning_rate": 2.7501186914380845e-06, |
|
"loss": 0.1778, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.9526170798898073, |
|
"grad_norm": 3.146576269483999, |
|
"learning_rate": 2.749537065186107e-06, |
|
"loss": 0.0524, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.9548209366391185, |
|
"grad_norm": 3.2325353182101395, |
|
"learning_rate": 2.748954824475224e-06, |
|
"loss": 0.1623, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.9570247933884297, |
|
"grad_norm": 3.1057019071534047, |
|
"learning_rate": 2.748371969591752e-06, |
|
"loss": 0.1908, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.959228650137741, |
|
"grad_norm": 2.678469488136285, |
|
"learning_rate": 2.74778850082231e-06, |
|
"loss": 0.1876, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.9614325068870524, |
|
"grad_norm": 4.750733810300038, |
|
"learning_rate": 2.747204418453818e-06, |
|
"loss": 0.2796, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.9636363636363636, |
|
"grad_norm": 2.4582854680750486, |
|
"learning_rate": 2.7466197227734976e-06, |
|
"loss": 0.0621, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.965840220385675, |
|
"grad_norm": 4.052676767921253, |
|
"learning_rate": 2.7460344140688733e-06, |
|
"loss": 0.238, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.9680440771349863, |
|
"grad_norm": 3.0237993476789753, |
|
"learning_rate": 2.7454484926277693e-06, |
|
"loss": 0.1915, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.9702479338842975, |
|
"grad_norm": 2.989473268610309, |
|
"learning_rate": 2.744861958738313e-06, |
|
"loss": 0.0917, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.9724517906336088, |
|
"grad_norm": 2.36311627004035, |
|
"learning_rate": 2.744274812688931e-06, |
|
"loss": 0.1871, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.97465564738292, |
|
"grad_norm": 3.4871528972576944, |
|
"learning_rate": 2.743687054768353e-06, |
|
"loss": 0.1615, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.9768595041322314, |
|
"grad_norm": 3.7392756691831246, |
|
"learning_rate": 2.743098685265607e-06, |
|
"loss": 0.2817, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.9790633608815427, |
|
"grad_norm": 4.617396209400955, |
|
"learning_rate": 2.7425097044700246e-06, |
|
"loss": 0.235, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.9812672176308541, |
|
"grad_norm": 3.679289220007701, |
|
"learning_rate": 2.741920112671237e-06, |
|
"loss": 0.1242, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.9834710743801653, |
|
"grad_norm": 3.3171681919567293, |
|
"learning_rate": 2.7413299101591738e-06, |
|
"loss": 0.1351, |
|
"step": 900 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 4530, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 834641013964800.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|