| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.42472682342947604, |
| "eval_steps": 500, |
| "global_step": 11000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0003861152940267964, |
| "grad_norm": 8.003422737121582, |
| "learning_rate": 0.0001999768330823584, |
| "loss": 5.0107, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0007722305880535929, |
| "grad_norm": 11.965606689453125, |
| "learning_rate": 0.0001999510920627566, |
| "loss": 0.9013, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0011583458820803893, |
| "grad_norm": 9.783374786376953, |
| "learning_rate": 0.00019992535104315483, |
| "loss": 0.5506, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0015444611761071857, |
| "grad_norm": 5.720436096191406, |
| "learning_rate": 0.00019989961002355304, |
| "loss": 0.501, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.001930576470133982, |
| "grad_norm": 11.18126392364502, |
| "learning_rate": 0.00019987386900395125, |
| "loss": 0.8176, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0023166917641607786, |
| "grad_norm": 8.88875675201416, |
| "learning_rate": 0.00019984812798434947, |
| "loss": 0.5404, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0027028070581875748, |
| "grad_norm": 2.9886066913604736, |
| "learning_rate": 0.00019982238696474768, |
| "loss": 0.5295, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.0030889223522143714, |
| "grad_norm": 8.936307907104492, |
| "learning_rate": 0.0001997966459451459, |
| "loss": 0.7398, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.0034750376462411676, |
| "grad_norm": 11.393534660339355, |
| "learning_rate": 0.0001997709049255441, |
| "loss": 0.6333, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.003861152940267964, |
| "grad_norm": 8.725994110107422, |
| "learning_rate": 0.00019974516390594235, |
| "loss": 0.4689, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.00424726823429476, |
| "grad_norm": 20.316652297973633, |
| "learning_rate": 0.00019971942288634053, |
| "loss": 0.8522, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.004633383528321557, |
| "grad_norm": 4.232663631439209, |
| "learning_rate": 0.00019969368186673875, |
| "loss": 0.5041, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.005019498822348353, |
| "grad_norm": 2.609255313873291, |
| "learning_rate": 0.00019966794084713696, |
| "loss": 0.7439, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.0054056141163751495, |
| "grad_norm": 10.063919067382812, |
| "learning_rate": 0.00019964219982753517, |
| "loss": 0.6702, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.005791729410401946, |
| "grad_norm": 5.799802303314209, |
| "learning_rate": 0.00019961645880793339, |
| "loss": 0.7356, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.006177844704428743, |
| "grad_norm": 12.664258003234863, |
| "learning_rate": 0.0001995907177883316, |
| "loss": 0.4067, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.006563959998455539, |
| "grad_norm": 9.42366886138916, |
| "learning_rate": 0.00019956497676872984, |
| "loss": 0.4767, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.006950075292482335, |
| "grad_norm": 5.382272243499756, |
| "learning_rate": 0.00019953923574912803, |
| "loss": 0.536, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.0073361905865091314, |
| "grad_norm": 9.797371864318848, |
| "learning_rate": 0.00019951349472952624, |
| "loss": 0.4735, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.007722305880535928, |
| "grad_norm": 7.965329647064209, |
| "learning_rate": 0.00019948775370992445, |
| "loss": 0.3881, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.008108421174562725, |
| "grad_norm": 4.075791835784912, |
| "learning_rate": 0.00019946201269032267, |
| "loss": 0.5564, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.00849453646858952, |
| "grad_norm": 24.367305755615234, |
| "learning_rate": 0.0001994362716707209, |
| "loss": 0.9795, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.008880651762616317, |
| "grad_norm": 9.627866744995117, |
| "learning_rate": 0.0001994105306511191, |
| "loss": 0.4528, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.009266767056643114, |
| "grad_norm": 7.469555854797363, |
| "learning_rate": 0.00019938478963151733, |
| "loss": 0.447, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.00965288235066991, |
| "grad_norm": 7.426730155944824, |
| "learning_rate": 0.00019935904861191552, |
| "loss": 0.6026, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.010038997644696707, |
| "grad_norm": 6.999317169189453, |
| "learning_rate": 0.00019933330759231373, |
| "loss": 0.4962, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.010425112938723502, |
| "grad_norm": 10.492286682128906, |
| "learning_rate": 0.00019930756657271194, |
| "loss": 0.7987, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.010811228232750299, |
| "grad_norm": 7.079407215118408, |
| "learning_rate": 0.00019928182555311016, |
| "loss": 0.4395, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.011197343526777096, |
| "grad_norm": 9.610014915466309, |
| "learning_rate": 0.0001992560845335084, |
| "loss": 0.748, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.011583458820803891, |
| "grad_norm": 5.993048667907715, |
| "learning_rate": 0.00019923034351390658, |
| "loss": 0.4328, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.011969574114830689, |
| "grad_norm": 7.336791515350342, |
| "learning_rate": 0.00019920460249430483, |
| "loss": 0.4104, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.012355689408857486, |
| "grad_norm": 7.967221736907959, |
| "learning_rate": 0.000199178861474703, |
| "loss": 0.4662, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.012741804702884281, |
| "grad_norm": 4.464987754821777, |
| "learning_rate": 0.00019915312045510125, |
| "loss": 0.725, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.013127919996911078, |
| "grad_norm": 8.669449806213379, |
| "learning_rate": 0.00019912737943549944, |
| "loss": 0.4256, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.013514035290937873, |
| "grad_norm": 4.114014148712158, |
| "learning_rate": 0.00019910163841589765, |
| "loss": 0.4477, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.01390015058496467, |
| "grad_norm": 9.254106521606445, |
| "learning_rate": 0.0001990758973962959, |
| "loss": 0.514, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.014286265878991468, |
| "grad_norm": 0.8039970993995667, |
| "learning_rate": 0.00019905015637669408, |
| "loss": 0.5802, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.014672381173018263, |
| "grad_norm": 3.9931838512420654, |
| "learning_rate": 0.00019902441535709232, |
| "loss": 0.8973, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.01505849646704506, |
| "grad_norm": 1.7645355463027954, |
| "learning_rate": 0.0001989986743374905, |
| "loss": 0.7108, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.015444611761071855, |
| "grad_norm": 6.8542866706848145, |
| "learning_rate": 0.00019897293331788875, |
| "loss": 0.5796, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.015830727055098654, |
| "grad_norm": 5.278103828430176, |
| "learning_rate": 0.00019894719229828696, |
| "loss": 0.3841, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.01621684234912545, |
| "grad_norm": 9.00206184387207, |
| "learning_rate": 0.00019892145127868514, |
| "loss": 0.5891, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.016602957643152245, |
| "grad_norm": 7.684702396392822, |
| "learning_rate": 0.00019889571025908339, |
| "loss": 0.4868, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.01698907293717904, |
| "grad_norm": 4.198502540588379, |
| "learning_rate": 0.00019886996923948157, |
| "loss": 0.571, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.01737518823120584, |
| "grad_norm": 7.454501628875732, |
| "learning_rate": 0.0001988442282198798, |
| "loss": 0.5133, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.017761303525232634, |
| "grad_norm": 13.236722946166992, |
| "learning_rate": 0.000198818487200278, |
| "loss": 0.4139, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.01814741881925943, |
| "grad_norm": 6.4592390060424805, |
| "learning_rate": 0.00019879274618067624, |
| "loss": 0.6078, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.01853353411328623, |
| "grad_norm": 11.73417854309082, |
| "learning_rate": 0.00019876700516107445, |
| "loss": 0.5472, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.018919649407313024, |
| "grad_norm": 2.5162808895111084, |
| "learning_rate": 0.00019874126414147264, |
| "loss": 0.6611, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.01930576470133982, |
| "grad_norm": 4.9637837409973145, |
| "learning_rate": 0.00019871552312187088, |
| "loss": 0.6472, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.019691879995366618, |
| "grad_norm": 11.545489311218262, |
| "learning_rate": 0.00019868978210226906, |
| "loss": 0.5304, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.020077995289393413, |
| "grad_norm": 5.197858810424805, |
| "learning_rate": 0.0001986640410826673, |
| "loss": 0.605, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.02046411058342021, |
| "grad_norm": 4.935055255889893, |
| "learning_rate": 0.0001986383000630655, |
| "loss": 0.6524, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.020850225877447004, |
| "grad_norm": 5.838052749633789, |
| "learning_rate": 0.00019861255904346373, |
| "loss": 0.4957, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.021236341171473803, |
| "grad_norm": 4.682408809661865, |
| "learning_rate": 0.00019858681802386194, |
| "loss": 0.8523, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.021622456465500598, |
| "grad_norm": 10.720857620239258, |
| "learning_rate": 0.00019856107700426013, |
| "loss": 0.516, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.022008571759527393, |
| "grad_norm": 6.515562534332275, |
| "learning_rate": 0.00019853533598465837, |
| "loss": 0.6095, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.022394687053554192, |
| "grad_norm": 3.204960584640503, |
| "learning_rate": 0.00019850959496505656, |
| "loss": 0.6624, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.022780802347580988, |
| "grad_norm": 2.305497884750366, |
| "learning_rate": 0.0001984838539454548, |
| "loss": 0.5986, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.023166917641607783, |
| "grad_norm": 13.07105541229248, |
| "learning_rate": 0.000198458112925853, |
| "loss": 0.337, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.023553032935634582, |
| "grad_norm": 3.1491329669952393, |
| "learning_rate": 0.00019843237190625122, |
| "loss": 0.5466, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.023939148229661377, |
| "grad_norm": 1.92014479637146, |
| "learning_rate": 0.00019840663088664944, |
| "loss": 0.5525, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.024325263523688172, |
| "grad_norm": 2.206550121307373, |
| "learning_rate": 0.00019838088986704762, |
| "loss": 0.5069, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.02471137881771497, |
| "grad_norm": 2.386288642883301, |
| "learning_rate": 0.00019835514884744586, |
| "loss": 0.3749, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.025097494111741767, |
| "grad_norm": 7.393959045410156, |
| "learning_rate": 0.00019832940782784405, |
| "loss": 0.7144, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.025483609405768562, |
| "grad_norm": 5.7293171882629395, |
| "learning_rate": 0.0001983036668082423, |
| "loss": 0.5052, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.025869724699795357, |
| "grad_norm": 6.440220832824707, |
| "learning_rate": 0.0001982779257886405, |
| "loss": 0.4343, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.026255839993822156, |
| "grad_norm": 0.8553487658500671, |
| "learning_rate": 0.00019825218476903872, |
| "loss": 0.7562, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.02664195528784895, |
| "grad_norm": 3.762784719467163, |
| "learning_rate": 0.00019822644374943693, |
| "loss": 0.5593, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.027028070581875747, |
| "grad_norm": 11.674392700195312, |
| "learning_rate": 0.00019820070272983512, |
| "loss": 0.6069, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.027414185875902546, |
| "grad_norm": 8.631232261657715, |
| "learning_rate": 0.00019817496171023336, |
| "loss": 0.3584, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.02780030116992934, |
| "grad_norm": 5.7163920402526855, |
| "learning_rate": 0.00019814922069063157, |
| "loss": 0.5563, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.028186416463956136, |
| "grad_norm": 8.186172485351562, |
| "learning_rate": 0.00019812347967102978, |
| "loss": 0.5289, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.028572531757982935, |
| "grad_norm": 7.287814140319824, |
| "learning_rate": 0.000198097738651428, |
| "loss": 0.4543, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.02895864705200973, |
| "grad_norm": 6.621245384216309, |
| "learning_rate": 0.0001980719976318262, |
| "loss": 0.3244, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.029344762346036526, |
| "grad_norm": 3.5209403038024902, |
| "learning_rate": 0.00019804625661222442, |
| "loss": 0.5385, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.029730877640063325, |
| "grad_norm": 2.562343120574951, |
| "learning_rate": 0.00019802051559262264, |
| "loss": 0.4868, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.03011699293409012, |
| "grad_norm": 7.782780647277832, |
| "learning_rate": 0.00019799477457302085, |
| "loss": 0.5682, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.030503108228116915, |
| "grad_norm": 8.173531532287598, |
| "learning_rate": 0.00019796903355341906, |
| "loss": 0.3557, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.03088922352214371, |
| "grad_norm": 4.502675533294678, |
| "learning_rate": 0.00019794329253381728, |
| "loss": 0.517, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.031275338816170506, |
| "grad_norm": 6.314894199371338, |
| "learning_rate": 0.0001979175515142155, |
| "loss": 0.6392, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.03166145411019731, |
| "grad_norm": 7.054763317108154, |
| "learning_rate": 0.0001978918104946137, |
| "loss": 0.5823, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.032047569404224104, |
| "grad_norm": 1.7847551107406616, |
| "learning_rate": 0.00019786606947501192, |
| "loss": 0.4495, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.0324336846982509, |
| "grad_norm": 5.268612861633301, |
| "learning_rate": 0.00019784032845541013, |
| "loss": 0.4379, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.032819799992277694, |
| "grad_norm": 4.834717273712158, |
| "learning_rate": 0.00019781458743580834, |
| "loss": 0.5379, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.03320591528630449, |
| "grad_norm": 6.077203273773193, |
| "learning_rate": 0.00019778884641620656, |
| "loss": 0.5666, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.033592030580331285, |
| "grad_norm": 0.9583851099014282, |
| "learning_rate": 0.00019776310539660477, |
| "loss": 0.8146, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.03397814587435808, |
| "grad_norm": 0.8457469940185547, |
| "learning_rate": 0.00019773736437700298, |
| "loss": 0.3497, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.03436426116838488, |
| "grad_norm": 5.251153945922852, |
| "learning_rate": 0.0001977116233574012, |
| "loss": 0.6299, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.03475037646241168, |
| "grad_norm": 4.057605266571045, |
| "learning_rate": 0.0001976858823377994, |
| "loss": 0.5829, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.03513649175643847, |
| "grad_norm": 7.625199794769287, |
| "learning_rate": 0.00019766014131819762, |
| "loss": 0.452, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.03552260705046527, |
| "grad_norm": 4.618866443634033, |
| "learning_rate": 0.00019763440029859584, |
| "loss": 0.5693, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.035908722344492064, |
| "grad_norm": 8.480955123901367, |
| "learning_rate": 0.00019760865927899405, |
| "loss": 0.4894, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.03629483763851886, |
| "grad_norm": 6.537581920623779, |
| "learning_rate": 0.00019758291825939226, |
| "loss": 0.7396, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.03668095293254566, |
| "grad_norm": 8.093205451965332, |
| "learning_rate": 0.00019755717723979048, |
| "loss": 0.3702, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.03706706822657246, |
| "grad_norm": 2.523141860961914, |
| "learning_rate": 0.0001975314362201887, |
| "loss": 0.3746, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.03745318352059925, |
| "grad_norm": 1.2707194089889526, |
| "learning_rate": 0.0001975056952005869, |
| "loss": 0.3211, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.03783929881462605, |
| "grad_norm": 2.818798780441284, |
| "learning_rate": 0.00019747995418098512, |
| "loss": 0.6594, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.03822541410865284, |
| "grad_norm": 7.373154640197754, |
| "learning_rate": 0.00019745421316138333, |
| "loss": 0.7825, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.03861152940267964, |
| "grad_norm": 2.8925669193267822, |
| "learning_rate": 0.00019742847214178154, |
| "loss": 0.4847, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.038997644696706434, |
| "grad_norm": 10.87833023071289, |
| "learning_rate": 0.00019740273112217976, |
| "loss": 0.5098, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.039383759990733236, |
| "grad_norm": 3.8262317180633545, |
| "learning_rate": 0.00019737699010257797, |
| "loss": 0.5168, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.03976987528476003, |
| "grad_norm": 2.5567004680633545, |
| "learning_rate": 0.00019735124908297618, |
| "loss": 0.5597, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.04015599057878683, |
| "grad_norm": 4.385695457458496, |
| "learning_rate": 0.0001973255080633744, |
| "loss": 0.3187, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.04054210587281362, |
| "grad_norm": 1.8186907768249512, |
| "learning_rate": 0.0001972997670437726, |
| "loss": 0.6274, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.04092822116684042, |
| "grad_norm": 7.446481704711914, |
| "learning_rate": 0.00019727402602417082, |
| "loss": 0.4365, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.04131433646086721, |
| "grad_norm": 8.973576545715332, |
| "learning_rate": 0.00019724828500456904, |
| "loss": 0.6518, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.04170045175489401, |
| "grad_norm": 4.984101295471191, |
| "learning_rate": 0.00019722254398496725, |
| "loss": 0.4694, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.04208656704892081, |
| "grad_norm": 8.2625093460083, |
| "learning_rate": 0.00019719680296536546, |
| "loss": 0.5532, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.042472682342947606, |
| "grad_norm": 2.2425265312194824, |
| "learning_rate": 0.0001971710619457637, |
| "loss": 0.4011, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.0428587976369744, |
| "grad_norm": 1.4552969932556152, |
| "learning_rate": 0.0001971453209261619, |
| "loss": 0.396, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.043244912931001196, |
| "grad_norm": 2.0675857067108154, |
| "learning_rate": 0.0001971195799065601, |
| "loss": 0.3727, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.04363102822502799, |
| "grad_norm": 5.24460506439209, |
| "learning_rate": 0.00019709383888695832, |
| "loss": 0.5016, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.04401714351905479, |
| "grad_norm": 5.524955749511719, |
| "learning_rate": 0.00019706809786735653, |
| "loss": 0.5866, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.04440325881308159, |
| "grad_norm": 4.2749199867248535, |
| "learning_rate": 0.00019704235684775474, |
| "loss": 0.4591, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.044789374107108385, |
| "grad_norm": 3.460395336151123, |
| "learning_rate": 0.00019701661582815296, |
| "loss": 0.5275, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.04517548940113518, |
| "grad_norm": 1.3979772329330444, |
| "learning_rate": 0.0001969908748085512, |
| "loss": 0.3375, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.045561604695161975, |
| "grad_norm": 2.29923939704895, |
| "learning_rate": 0.00019696513378894938, |
| "loss": 0.5683, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.04594771998918877, |
| "grad_norm": 3.211496353149414, |
| "learning_rate": 0.0001969393927693476, |
| "loss": 0.7122, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.046333835283215566, |
| "grad_norm": 4.18447208404541, |
| "learning_rate": 0.0001969136517497458, |
| "loss": 0.5149, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.04671995057724236, |
| "grad_norm": 14.650918960571289, |
| "learning_rate": 0.00019688791073014402, |
| "loss": 0.6384, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.047106065871269164, |
| "grad_norm": 4.956239700317383, |
| "learning_rate": 0.00019686216971054226, |
| "loss": 0.3602, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.04749218116529596, |
| "grad_norm": 4.0486860275268555, |
| "learning_rate": 0.00019683642869094045, |
| "loss": 0.7719, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.047878296459322754, |
| "grad_norm": 4.587133407592773, |
| "learning_rate": 0.0001968106876713387, |
| "loss": 0.4079, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.04826441175334955, |
| "grad_norm": 0.7830008268356323, |
| "learning_rate": 0.00019678494665173688, |
| "loss": 0.5841, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.048650527047376345, |
| "grad_norm": 5.378068447113037, |
| "learning_rate": 0.0001967592056321351, |
| "loss": 0.3226, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.04903664234140314, |
| "grad_norm": 4.002605438232422, |
| "learning_rate": 0.0001967334646125333, |
| "loss": 0.4411, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.04942275763542994, |
| "grad_norm": 4.695134162902832, |
| "learning_rate": 0.00019670772359293152, |
| "loss": 0.415, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.04980887292945674, |
| "grad_norm": 6.046143054962158, |
| "learning_rate": 0.00019668198257332976, |
| "loss": 0.463, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.05019498822348353, |
| "grad_norm": 1.8300361633300781, |
| "learning_rate": 0.00019665624155372794, |
| "loss": 0.408, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.05058110351751033, |
| "grad_norm": 5.80141544342041, |
| "learning_rate": 0.00019663050053412618, |
| "loss": 0.4481, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.050967218811537124, |
| "grad_norm": 4.103593349456787, |
| "learning_rate": 0.00019660475951452437, |
| "loss": 0.3054, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.05135333410556392, |
| "grad_norm": 9.129929542541504, |
| "learning_rate": 0.00019657901849492258, |
| "loss": 0.5554, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.051739449399590715, |
| "grad_norm": 6.979663372039795, |
| "learning_rate": 0.0001965532774753208, |
| "loss": 0.388, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.05212556469361752, |
| "grad_norm": 6.329915523529053, |
| "learning_rate": 0.000196527536455719, |
| "loss": 0.8894, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.05251167998764431, |
| "grad_norm": 0.2686227262020111, |
| "learning_rate": 0.00019650179543611725, |
| "loss": 0.5032, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.05289779528167111, |
| "grad_norm": 4.818896770477295, |
| "learning_rate": 0.00019647605441651544, |
| "loss": 0.3417, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.0532839105756979, |
| "grad_norm": 6.252008438110352, |
| "learning_rate": 0.00019645031339691368, |
| "loss": 0.398, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.0536700258697247, |
| "grad_norm": 1.2734620571136475, |
| "learning_rate": 0.00019642457237731186, |
| "loss": 0.5369, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.054056141163751494, |
| "grad_norm": 6.511690616607666, |
| "learning_rate": 0.00019639883135771008, |
| "loss": 0.4646, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.054442256457778296, |
| "grad_norm": 3.2352371215820312, |
| "learning_rate": 0.00019637309033810832, |
| "loss": 0.3805, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.05482837175180509, |
| "grad_norm": 1.0574132204055786, |
| "learning_rate": 0.0001963473493185065, |
| "loss": 0.4572, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.055214487045831886, |
| "grad_norm": 2.8525452613830566, |
| "learning_rate": 0.00019632160829890474, |
| "loss": 0.4417, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.05560060233985868, |
| "grad_norm": 3.588179111480713, |
| "learning_rate": 0.00019629586727930293, |
| "loss": 0.6214, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.05598671763388548, |
| "grad_norm": 3.969320058822632, |
| "learning_rate": 0.00019627012625970117, |
| "loss": 0.6114, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.05637283292791227, |
| "grad_norm": 3.465053081512451, |
| "learning_rate": 0.00019624438524009936, |
| "loss": 0.6066, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.05675894822193907, |
| "grad_norm": 3.5419201850891113, |
| "learning_rate": 0.00019621864422049757, |
| "loss": 0.3906, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.05714506351596587, |
| "grad_norm": 3.4580233097076416, |
| "learning_rate": 0.0001961929032008958, |
| "loss": 0.9283, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.057531178809992665, |
| "grad_norm": 4.222144603729248, |
| "learning_rate": 0.000196167162181294, |
| "loss": 0.4225, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.05791729410401946, |
| "grad_norm": 0.8072681427001953, |
| "learning_rate": 0.00019614142116169224, |
| "loss": 0.5012, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.058303409398046256, |
| "grad_norm": 2.827258348464966, |
| "learning_rate": 0.00019611568014209042, |
| "loss": 0.4333, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.05868952469207305, |
| "grad_norm": 1.3494776487350464, |
| "learning_rate": 0.00019608993912248866, |
| "loss": 0.3019, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.05907563998609985, |
| "grad_norm": 4.3279900550842285, |
| "learning_rate": 0.00019606419810288688, |
| "loss": 0.4807, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.05946175528012665, |
| "grad_norm": 3.8996474742889404, |
| "learning_rate": 0.0001960384570832851, |
| "loss": 0.4876, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.059847870574153444, |
| "grad_norm": 5.255978584289551, |
| "learning_rate": 0.0001960127160636833, |
| "loss": 0.4661, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.06023398586818024, |
| "grad_norm": 5.172120094299316, |
| "learning_rate": 0.0001959869750440815, |
| "loss": 0.4885, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.060620101162207035, |
| "grad_norm": 5.385959625244141, |
| "learning_rate": 0.00019596123402447973, |
| "loss": 0.2995, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.06100621645623383, |
| "grad_norm": 3.9922871589660645, |
| "learning_rate": 0.00019593549300487792, |
| "loss": 0.4568, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.061392331750260626, |
| "grad_norm": 6.048642158508301, |
| "learning_rate": 0.00019590975198527616, |
| "loss": 0.4649, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.06177844704428742, |
| "grad_norm": 1.0315563678741455, |
| "learning_rate": 0.00019588401096567437, |
| "loss": 0.5175, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.06216456233831422, |
| "grad_norm": 12.403678894042969, |
| "learning_rate": 0.00019585826994607258, |
| "loss": 0.523, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.06255067763234101, |
| "grad_norm": 6.127188205718994, |
| "learning_rate": 0.0001958325289264708, |
| "loss": 0.5861, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.06293679292636781, |
| "grad_norm": 6.398592948913574, |
| "learning_rate": 0.00019580678790686898, |
| "loss": 0.7471, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.06332290822039462, |
| "grad_norm": 4.127200603485107, |
| "learning_rate": 0.00019578104688726722, |
| "loss": 0.4921, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.06370902351442141, |
| "grad_norm": 4.601541042327881, |
| "learning_rate": 0.0001957553058676654, |
| "loss": 0.3682, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.06409513880844821, |
| "grad_norm": 6.32781457901001, |
| "learning_rate": 0.00019572956484806365, |
| "loss": 0.3748, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.064481254102475, |
| "grad_norm": 3.7280173301696777, |
| "learning_rate": 0.00019570382382846186, |
| "loss": 0.5912, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.0648673693965018, |
| "grad_norm": 6.7821946144104, |
| "learning_rate": 0.00019567808280886008, |
| "loss": 0.4073, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.0652534846905286, |
| "grad_norm": 1.4645791053771973, |
| "learning_rate": 0.0001956523417892583, |
| "loss": 0.7164, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.06563959998455539, |
| "grad_norm": 2.367361545562744, |
| "learning_rate": 0.00019562660076965648, |
| "loss": 0.3859, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.06602571527858218, |
| "grad_norm": 2.198493480682373, |
| "learning_rate": 0.00019560085975005472, |
| "loss": 0.4928, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.06641183057260898, |
| "grad_norm": 1.882567048072815, |
| "learning_rate": 0.00019557511873045293, |
| "loss": 0.5861, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.06679794586663577, |
| "grad_norm": 6.324089527130127, |
| "learning_rate": 0.00019554937771085114, |
| "loss": 0.6249, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.06718406116066257, |
| "grad_norm": 4.283392906188965, |
| "learning_rate": 0.00019552363669124936, |
| "loss": 0.5403, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.06757017645468937, |
| "grad_norm": 4.464428424835205, |
| "learning_rate": 0.00019549789567164757, |
| "loss": 0.5815, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.06795629174871616, |
| "grad_norm": 0.32923218607902527, |
| "learning_rate": 0.00019547215465204578, |
| "loss": 0.3791, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.06834240704274297, |
| "grad_norm": 5.255763053894043, |
| "learning_rate": 0.00019544641363244397, |
| "loss": 0.4252, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.06872852233676977, |
| "grad_norm": 2.1615116596221924, |
| "learning_rate": 0.0001954206726128422, |
| "loss": 0.4457, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.06911463763079656, |
| "grad_norm": 2.0209217071533203, |
| "learning_rate": 0.00019539493159324042, |
| "loss": 0.4377, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.06950075292482336, |
| "grad_norm": 8.12317943572998, |
| "learning_rate": 0.00019536919057363864, |
| "loss": 0.4025, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.06988686821885015, |
| "grad_norm": 1.7034660577774048, |
| "learning_rate": 0.00019534344955403685, |
| "loss": 0.2915, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.07027298351287695, |
| "grad_norm": 4.640942096710205, |
| "learning_rate": 0.00019531770853443506, |
| "loss": 0.6588, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.07065909880690374, |
| "grad_norm": 4.202883243560791, |
| "learning_rate": 0.00019529196751483328, |
| "loss": 0.4442, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.07104521410093054, |
| "grad_norm": 3.26898193359375, |
| "learning_rate": 0.00019526622649523146, |
| "loss": 0.5065, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.07143132939495733, |
| "grad_norm": 8.189995765686035, |
| "learning_rate": 0.0001952404854756297, |
| "loss": 0.5258, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.07181744468898413, |
| "grad_norm": 3.2618284225463867, |
| "learning_rate": 0.00019521474445602792, |
| "loss": 0.5037, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.07220355998301092, |
| "grad_norm": 2.168548583984375, |
| "learning_rate": 0.00019518900343642613, |
| "loss": 0.4887, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.07258967527703772, |
| "grad_norm": 2.2029404640197754, |
| "learning_rate": 0.00019516326241682434, |
| "loss": 0.4646, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.07297579057106451, |
| "grad_norm": 1.561713695526123, |
| "learning_rate": 0.00019513752139722256, |
| "loss": 0.432, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.07336190586509132, |
| "grad_norm": 3.428372621536255, |
| "learning_rate": 0.00019511178037762077, |
| "loss": 0.4124, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.07374802115911812, |
| "grad_norm": 5.6706671714782715, |
| "learning_rate": 0.00019508603935801898, |
| "loss": 0.4431, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.07413413645314491, |
| "grad_norm": 8.872734069824219, |
| "learning_rate": 0.0001950602983384172, |
| "loss": 0.7001, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.07452025174717171, |
| "grad_norm": 1.6821974515914917, |
| "learning_rate": 0.0001950345573188154, |
| "loss": 0.4204, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.0749063670411985, |
| "grad_norm": 3.117480993270874, |
| "learning_rate": 0.00019500881629921362, |
| "loss": 0.3748, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.0752924823352253, |
| "grad_norm": 0.8384984731674194, |
| "learning_rate": 0.00019498307527961184, |
| "loss": 0.2636, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.0756785976292521, |
| "grad_norm": 2.8956708908081055, |
| "learning_rate": 0.00019495733426001005, |
| "loss": 0.4514, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.07606471292327889, |
| "grad_norm": 11.233087539672852, |
| "learning_rate": 0.00019493159324040826, |
| "loss": 0.4002, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.07645082821730569, |
| "grad_norm": 4.066893100738525, |
| "learning_rate": 0.00019490585222080648, |
| "loss": 0.4449, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.07683694351133248, |
| "grad_norm": 4.854077339172363, |
| "learning_rate": 0.0001948801112012047, |
| "loss": 0.4961, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.07722305880535928, |
| "grad_norm": 2.5722827911376953, |
| "learning_rate": 0.0001948543701816029, |
| "loss": 0.3743, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.07760917409938607, |
| "grad_norm": 5.842077255249023, |
| "learning_rate": 0.00019482862916200112, |
| "loss": 0.2906, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.07799528939341287, |
| "grad_norm": 6.163092136383057, |
| "learning_rate": 0.00019480288814239933, |
| "loss": 0.4374, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.07838140468743968, |
| "grad_norm": 4.589334487915039, |
| "learning_rate": 0.00019477714712279754, |
| "loss": 0.484, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.07876751998146647, |
| "grad_norm": 6.951212406158447, |
| "learning_rate": 0.00019475140610319576, |
| "loss": 0.5767, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.07915363527549327, |
| "grad_norm": 3.3662521839141846, |
| "learning_rate": 0.00019472566508359397, |
| "loss": 0.5566, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.07953975056952006, |
| "grad_norm": 1.6602391004562378, |
| "learning_rate": 0.00019469992406399218, |
| "loss": 0.1436, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.07992586586354686, |
| "grad_norm": 6.451857089996338, |
| "learning_rate": 0.0001946741830443904, |
| "loss": 0.3778, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.08031198115757365, |
| "grad_norm": 3.7249560356140137, |
| "learning_rate": 0.0001946484420247886, |
| "loss": 0.5391, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.08069809645160045, |
| "grad_norm": 4.138098239898682, |
| "learning_rate": 0.00019462270100518682, |
| "loss": 0.3598, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.08108421174562724, |
| "grad_norm": 3.224778175354004, |
| "learning_rate": 0.00019459695998558504, |
| "loss": 0.2967, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.08147032703965404, |
| "grad_norm": 1.3951358795166016, |
| "learning_rate": 0.00019457121896598325, |
| "loss": 0.2698, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.08185644233368083, |
| "grad_norm": 5.956802845001221, |
| "learning_rate": 0.00019454547794638146, |
| "loss": 0.451, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.08224255762770763, |
| "grad_norm": 3.456360101699829, |
| "learning_rate": 0.00019451973692677968, |
| "loss": 0.3365, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.08262867292173443, |
| "grad_norm": 2.9433653354644775, |
| "learning_rate": 0.0001944939959071779, |
| "loss": 0.4424, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.08301478821576122, |
| "grad_norm": 3.136000871658325, |
| "learning_rate": 0.0001944682548875761, |
| "loss": 0.6224, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.08340090350978802, |
| "grad_norm": 2.669084310531616, |
| "learning_rate": 0.00019444251386797432, |
| "loss": 0.4435, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.08378701880381483, |
| "grad_norm": 4.573731899261475, |
| "learning_rate": 0.00019441677284837253, |
| "loss": 0.5555, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.08417313409784162, |
| "grad_norm": 6.354156017303467, |
| "learning_rate": 0.00019439103182877074, |
| "loss": 0.4232, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.08455924939186842, |
| "grad_norm": 2.993691921234131, |
| "learning_rate": 0.00019436529080916895, |
| "loss": 0.51, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.08494536468589521, |
| "grad_norm": 3.6496782302856445, |
| "learning_rate": 0.00019433954978956717, |
| "loss": 0.4031, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.085331479979922, |
| "grad_norm": 1.9039051532745361, |
| "learning_rate": 0.00019431380876996538, |
| "loss": 0.4407, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.0857175952739488, |
| "grad_norm": 2.3211915493011475, |
| "learning_rate": 0.00019428806775036362, |
| "loss": 0.4057, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.0861037105679756, |
| "grad_norm": 4.883905410766602, |
| "learning_rate": 0.0001942623267307618, |
| "loss": 0.6223, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.08648982586200239, |
| "grad_norm": 2.164484977722168, |
| "learning_rate": 0.00019423658571116005, |
| "loss": 0.3141, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.08687594115602919, |
| "grad_norm": 2.2078909873962402, |
| "learning_rate": 0.00019421084469155823, |
| "loss": 0.3523, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.08726205645005598, |
| "grad_norm": 1.0987967252731323, |
| "learning_rate": 0.00019418510367195645, |
| "loss": 0.4013, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.08764817174408278, |
| "grad_norm": 2.418327569961548, |
| "learning_rate": 0.00019415936265235466, |
| "loss": 0.581, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.08803428703810957, |
| "grad_norm": 4.029652118682861, |
| "learning_rate": 0.00019413362163275287, |
| "loss": 0.5244, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.08842040233213637, |
| "grad_norm": 3.4661777019500732, |
| "learning_rate": 0.00019410788061315112, |
| "loss": 0.4531, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.08880651762616318, |
| "grad_norm": 1.0475856065750122, |
| "learning_rate": 0.0001940821395935493, |
| "loss": 0.4362, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.08919263292018997, |
| "grad_norm": 4.021854400634766, |
| "learning_rate": 0.00019405639857394754, |
| "loss": 0.4532, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.08957874821421677, |
| "grad_norm": 1.836438536643982, |
| "learning_rate": 0.00019403065755434573, |
| "loss": 0.6858, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.08996486350824356, |
| "grad_norm": 2.5251567363739014, |
| "learning_rate": 0.00019400491653474394, |
| "loss": 0.3619, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.09035097880227036, |
| "grad_norm": 3.067208766937256, |
| "learning_rate": 0.00019397917551514215, |
| "loss": 0.7376, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.09073709409629716, |
| "grad_norm": 0.9124518036842346, |
| "learning_rate": 0.00019395343449554037, |
| "loss": 0.4193, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.09112320939032395, |
| "grad_norm": 3.8170812129974365, |
| "learning_rate": 0.0001939276934759386, |
| "loss": 0.5393, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.09150932468435075, |
| "grad_norm": 8.19250202178955, |
| "learning_rate": 0.0001939019524563368, |
| "loss": 0.424, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.09189543997837754, |
| "grad_norm": 4.459112167358398, |
| "learning_rate": 0.00019387621143673503, |
| "loss": 0.3278, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.09228155527240434, |
| "grad_norm": 5.578339576721191, |
| "learning_rate": 0.00019385047041713322, |
| "loss": 0.5223, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.09266767056643113, |
| "grad_norm": 1.3707878589630127, |
| "learning_rate": 0.00019382472939753143, |
| "loss": 0.3004, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.09305378586045793, |
| "grad_norm": 5.0041184425354, |
| "learning_rate": 0.00019379898837792967, |
| "loss": 0.4378, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.09343990115448472, |
| "grad_norm": 5.668384552001953, |
| "learning_rate": 0.00019377324735832786, |
| "loss": 0.499, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.09382601644851153, |
| "grad_norm": 5.605838775634766, |
| "learning_rate": 0.0001937475063387261, |
| "loss": 0.5642, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.09421213174253833, |
| "grad_norm": 5.055904865264893, |
| "learning_rate": 0.0001937217653191243, |
| "loss": 0.6225, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.09459824703656512, |
| "grad_norm": 3.1779348850250244, |
| "learning_rate": 0.00019369602429952253, |
| "loss": 0.3673, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.09498436233059192, |
| "grad_norm": 2.540269136428833, |
| "learning_rate": 0.00019367028327992071, |
| "loss": 0.3499, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.09537047762461871, |
| "grad_norm": 2.284114122390747, |
| "learning_rate": 0.00019364454226031893, |
| "loss": 0.5405, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.09575659291864551, |
| "grad_norm": 6.752682685852051, |
| "learning_rate": 0.00019361880124071717, |
| "loss": 0.5594, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.0961427082126723, |
| "grad_norm": 2.5659310817718506, |
| "learning_rate": 0.00019359306022111535, |
| "loss": 0.5872, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.0965288235066991, |
| "grad_norm": 4.503110885620117, |
| "learning_rate": 0.0001935673192015136, |
| "loss": 0.5171, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.0969149388007259, |
| "grad_norm": 1.7715080976486206, |
| "learning_rate": 0.00019354157818191178, |
| "loss": 0.6131, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.09730105409475269, |
| "grad_norm": 4.479000568389893, |
| "learning_rate": 0.00019351583716231002, |
| "loss": 0.4396, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.09768716938877949, |
| "grad_norm": 0.9424387812614441, |
| "learning_rate": 0.00019349009614270823, |
| "loss": 0.3166, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.09807328468280628, |
| "grad_norm": 3.792689800262451, |
| "learning_rate": 0.00019346435512310642, |
| "loss": 0.2591, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.09845939997683308, |
| "grad_norm": 2.9132003784179688, |
| "learning_rate": 0.00019343861410350466, |
| "loss": 0.3523, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.09884551527085988, |
| "grad_norm": 1.6421749591827393, |
| "learning_rate": 0.00019341287308390285, |
| "loss": 0.2805, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.09923163056488668, |
| "grad_norm": 1.9469813108444214, |
| "learning_rate": 0.0001933871320643011, |
| "loss": 0.3929, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.09961774585891348, |
| "grad_norm": 3.081062078475952, |
| "learning_rate": 0.00019336139104469927, |
| "loss": 0.569, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.10000386115294027, |
| "grad_norm": 4.728143215179443, |
| "learning_rate": 0.00019333565002509751, |
| "loss": 0.3443, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.10038997644696707, |
| "grad_norm": 2.4117422103881836, |
| "learning_rate": 0.00019330990900549573, |
| "loss": 0.4492, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.10077609174099386, |
| "grad_norm": 5.794168472290039, |
| "learning_rate": 0.00019328416798589391, |
| "loss": 0.5088, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.10116220703502066, |
| "grad_norm": 1.0662094354629517, |
| "learning_rate": 0.00019325842696629215, |
| "loss": 0.4683, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.10154832232904745, |
| "grad_norm": 2.13590931892395, |
| "learning_rate": 0.00019323268594669034, |
| "loss": 0.5833, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.10193443762307425, |
| "grad_norm": 7.52834415435791, |
| "learning_rate": 0.00019320694492708858, |
| "loss": 0.54, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.10232055291710104, |
| "grad_norm": 6.155847072601318, |
| "learning_rate": 0.00019318120390748677, |
| "loss": 0.6809, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.10270666821112784, |
| "grad_norm": 6.527890205383301, |
| "learning_rate": 0.000193155462887885, |
| "loss": 0.5239, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.10309278350515463, |
| "grad_norm": 3.3918986320495605, |
| "learning_rate": 0.00019312972186828322, |
| "loss": 0.4636, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.10347889879918143, |
| "grad_norm": 2.0933191776275635, |
| "learning_rate": 0.0001931039808486814, |
| "loss": 0.5288, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.10386501409320824, |
| "grad_norm": 2.4386465549468994, |
| "learning_rate": 0.00019307823982907965, |
| "loss": 0.5496, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.10425112938723503, |
| "grad_norm": 2.885315418243408, |
| "learning_rate": 0.00019305249880947783, |
| "loss": 0.5928, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.10463724468126183, |
| "grad_norm": 4.986598968505859, |
| "learning_rate": 0.00019302675778987607, |
| "loss": 0.3513, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.10502335997528862, |
| "grad_norm": 2.7999277114868164, |
| "learning_rate": 0.0001930010167702743, |
| "loss": 0.4484, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.10540947526931542, |
| "grad_norm": 1.6467676162719727, |
| "learning_rate": 0.0001929752757506725, |
| "loss": 0.4729, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.10579559056334222, |
| "grad_norm": 2.168477773666382, |
| "learning_rate": 0.00019294953473107071, |
| "loss": 0.3579, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.10618170585736901, |
| "grad_norm": 1.5439807176589966, |
| "learning_rate": 0.00019292379371146893, |
| "loss": 0.5715, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.1065678211513958, |
| "grad_norm": 1.2175947427749634, |
| "learning_rate": 0.00019289805269186714, |
| "loss": 0.4059, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.1069539364454226, |
| "grad_norm": 2.649782419204712, |
| "learning_rate": 0.00019287231167226533, |
| "loss": 0.7864, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.1073400517394494, |
| "grad_norm": 3.698002815246582, |
| "learning_rate": 0.00019284657065266357, |
| "loss": 0.5107, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.10772616703347619, |
| "grad_norm": 1.7418729066848755, |
| "learning_rate": 0.00019282082963306178, |
| "loss": 0.332, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.10811228232750299, |
| "grad_norm": 2.2264151573181152, |
| "learning_rate": 0.00019279508861346, |
| "loss": 0.3944, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.10849839762152978, |
| "grad_norm": 1.3407092094421387, |
| "learning_rate": 0.0001927693475938582, |
| "loss": 0.4603, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.10888451291555659, |
| "grad_norm": 0.3719189763069153, |
| "learning_rate": 0.00019274360657425642, |
| "loss": 0.496, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.10927062820958339, |
| "grad_norm": 4.728814125061035, |
| "learning_rate": 0.00019271786555465463, |
| "loss": 0.4079, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.10965674350361018, |
| "grad_norm": 5.007620334625244, |
| "learning_rate": 0.00019269212453505285, |
| "loss": 0.5221, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.11004285879763698, |
| "grad_norm": 2.7476320266723633, |
| "learning_rate": 0.00019266638351545106, |
| "loss": 0.3705, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.11042897409166377, |
| "grad_norm": 2.2960126399993896, |
| "learning_rate": 0.00019264064249584927, |
| "loss": 0.6656, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.11081508938569057, |
| "grad_norm": 0.9589812755584717, |
| "learning_rate": 0.0001926149014762475, |
| "loss": 0.6977, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.11120120467971736, |
| "grad_norm": 2.274984121322632, |
| "learning_rate": 0.0001925891604566457, |
| "loss": 0.4237, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.11158731997374416, |
| "grad_norm": 1.8849111795425415, |
| "learning_rate": 0.00019256341943704391, |
| "loss": 0.2357, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.11197343526777095, |
| "grad_norm": 2.7264740467071533, |
| "learning_rate": 0.00019253767841744213, |
| "loss": 0.3422, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.11235955056179775, |
| "grad_norm": 3.832827568054199, |
| "learning_rate": 0.00019251193739784034, |
| "loss": 0.3861, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.11274566585582455, |
| "grad_norm": 2.3612313270568848, |
| "learning_rate": 0.00019248619637823855, |
| "loss": 0.3299, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.11313178114985134, |
| "grad_norm": 2.2509396076202393, |
| "learning_rate": 0.00019246045535863677, |
| "loss": 0.6027, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.11351789644387814, |
| "grad_norm": 2.7600464820861816, |
| "learning_rate": 0.00019243471433903498, |
| "loss": 0.3864, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.11390401173790494, |
| "grad_norm": 5.956289768218994, |
| "learning_rate": 0.0001924089733194332, |
| "loss": 0.4669, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.11429012703193174, |
| "grad_norm": 4.651761531829834, |
| "learning_rate": 0.0001923832322998314, |
| "loss": 0.5774, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.11467624232595854, |
| "grad_norm": 1.1770590543746948, |
| "learning_rate": 0.00019235749128022962, |
| "loss": 0.3951, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.11506235761998533, |
| "grad_norm": 0.8117956519126892, |
| "learning_rate": 0.00019233175026062783, |
| "loss": 0.3919, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.11544847291401213, |
| "grad_norm": 1.318812608718872, |
| "learning_rate": 0.00019230600924102605, |
| "loss": 0.2905, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.11583458820803892, |
| "grad_norm": 1.981382966041565, |
| "learning_rate": 0.00019228026822142426, |
| "loss": 0.5312, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.11622070350206572, |
| "grad_norm": 1.9083003997802734, |
| "learning_rate": 0.00019225452720182247, |
| "loss": 0.3129, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.11660681879609251, |
| "grad_norm": 2.7581653594970703, |
| "learning_rate": 0.0001922287861822207, |
| "loss": 0.3954, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.11699293409011931, |
| "grad_norm": 1.065090537071228, |
| "learning_rate": 0.0001922030451626189, |
| "loss": 0.2722, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.1173790493841461, |
| "grad_norm": 0.600864827632904, |
| "learning_rate": 0.0001921773041430171, |
| "loss": 0.493, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.1177651646781729, |
| "grad_norm": 4.4449052810668945, |
| "learning_rate": 0.00019215156312341533, |
| "loss": 0.4982, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.1181512799721997, |
| "grad_norm": 3.34476637840271, |
| "learning_rate": 0.00019212582210381354, |
| "loss": 0.3204, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.11853739526622649, |
| "grad_norm": 1.7432445287704468, |
| "learning_rate": 0.00019210008108421175, |
| "loss": 0.6601, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.1189235105602533, |
| "grad_norm": 1.908324956893921, |
| "learning_rate": 0.00019207434006460997, |
| "loss": 0.5947, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.1193096258542801, |
| "grad_norm": 5.373056888580322, |
| "learning_rate": 0.00019204859904500818, |
| "loss": 0.5169, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.11969574114830689, |
| "grad_norm": 0.861535370349884, |
| "learning_rate": 0.0001920228580254064, |
| "loss": 0.3829, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.12008185644233368, |
| "grad_norm": 1.2700462341308594, |
| "learning_rate": 0.0001919971170058046, |
| "loss": 0.4475, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.12046797173636048, |
| "grad_norm": 2.9959444999694824, |
| "learning_rate": 0.00019197137598620282, |
| "loss": 0.4704, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.12085408703038727, |
| "grad_norm": 0.280109167098999, |
| "learning_rate": 0.00019194563496660103, |
| "loss": 0.3732, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.12124020232441407, |
| "grad_norm": 0.9746024012565613, |
| "learning_rate": 0.00019191989394699925, |
| "loss": 0.4693, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.12162631761844087, |
| "grad_norm": 1.7267721891403198, |
| "learning_rate": 0.00019189415292739746, |
| "loss": 0.4509, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.12201243291246766, |
| "grad_norm": 2.1759033203125, |
| "learning_rate": 0.00019186841190779567, |
| "loss": 0.428, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.12239854820649446, |
| "grad_norm": 1.270711064338684, |
| "learning_rate": 0.0001918426708881939, |
| "loss": 0.4262, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.12278466350052125, |
| "grad_norm": 3.7549123764038086, |
| "learning_rate": 0.0001918169298685921, |
| "loss": 0.4758, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.12317077879454805, |
| "grad_norm": 1.6550017595291138, |
| "learning_rate": 0.0001917911888489903, |
| "loss": 0.452, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.12355689408857484, |
| "grad_norm": 3.7151713371276855, |
| "learning_rate": 0.00019176544782938853, |
| "loss": 0.4844, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.12394300938260165, |
| "grad_norm": 0.5354440808296204, |
| "learning_rate": 0.00019173970680978674, |
| "loss": 0.4432, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.12432912467662845, |
| "grad_norm": 3.2494261264801025, |
| "learning_rate": 0.00019171396579018498, |
| "loss": 0.587, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.12471523997065524, |
| "grad_norm": 1.2129877805709839, |
| "learning_rate": 0.00019168822477058317, |
| "loss": 0.4662, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.12510135526468202, |
| "grad_norm": 3.723402500152588, |
| "learning_rate": 0.00019166248375098138, |
| "loss": 0.5261, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.12548747055870882, |
| "grad_norm": 1.596259593963623, |
| "learning_rate": 0.0001916367427313796, |
| "loss": 0.2802, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.12587358585273561, |
| "grad_norm": 5.5710320472717285, |
| "learning_rate": 0.0001916110017117778, |
| "loss": 0.5246, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.1262597011467624, |
| "grad_norm": 4.490183353424072, |
| "learning_rate": 0.00019158526069217602, |
| "loss": 0.4929, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.12664581644078923, |
| "grad_norm": 2.482572555541992, |
| "learning_rate": 0.00019155951967257423, |
| "loss": 0.3677, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.12703193173481603, |
| "grad_norm": 3.348520517349243, |
| "learning_rate": 0.00019153377865297247, |
| "loss": 0.6471, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.12741804702884282, |
| "grad_norm": 7.735306262969971, |
| "learning_rate": 0.00019150803763337066, |
| "loss": 0.6057, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.12780416232286962, |
| "grad_norm": 2.120649576187134, |
| "learning_rate": 0.00019148229661376887, |
| "loss": 0.5408, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.12819027761689641, |
| "grad_norm": 10.259540557861328, |
| "learning_rate": 0.00019145655559416709, |
| "loss": 0.4753, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.1285763929109232, |
| "grad_norm": 4.094576358795166, |
| "learning_rate": 0.0001914308145745653, |
| "loss": 0.3832, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.12896250820495, |
| "grad_norm": 3.1248559951782227, |
| "learning_rate": 0.00019140507355496354, |
| "loss": 0.5631, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.1293486234989768, |
| "grad_norm": 1.2975168228149414, |
| "learning_rate": 0.00019137933253536173, |
| "loss": 0.5158, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.1297347387930036, |
| "grad_norm": 3.2515244483947754, |
| "learning_rate": 0.00019135359151575997, |
| "loss": 0.4176, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.1301208540870304, |
| "grad_norm": 2.287757396697998, |
| "learning_rate": 0.00019132785049615815, |
| "loss": 0.5316, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.1305069693810572, |
| "grad_norm": 8.668967247009277, |
| "learning_rate": 0.00019130210947655637, |
| "loss": 0.6653, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.13089308467508398, |
| "grad_norm": 4.751536846160889, |
| "learning_rate": 0.00019127636845695458, |
| "loss": 0.4508, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.13127919996911078, |
| "grad_norm": 3.240792751312256, |
| "learning_rate": 0.0001912506274373528, |
| "loss": 0.441, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.13166531526313757, |
| "grad_norm": 2.146261215209961, |
| "learning_rate": 0.00019122488641775103, |
| "loss": 0.3394, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.13205143055716437, |
| "grad_norm": 2.259693145751953, |
| "learning_rate": 0.00019119914539814922, |
| "loss": 0.4348, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.13243754585119116, |
| "grad_norm": 1.8136098384857178, |
| "learning_rate": 0.00019117340437854746, |
| "loss": 0.4441, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.13282366114521796, |
| "grad_norm": 1.7324503660202026, |
| "learning_rate": 0.00019114766335894565, |
| "loss": 0.4725, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.13320977643924475, |
| "grad_norm": 4.709383487701416, |
| "learning_rate": 0.00019112192233934389, |
| "loss": 0.5383, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.13359589173327155, |
| "grad_norm": 0.3468118906021118, |
| "learning_rate": 0.00019109618131974207, |
| "loss": 0.3228, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.13398200702729834, |
| "grad_norm": 6.66448974609375, |
| "learning_rate": 0.00019107044030014029, |
| "loss": 0.4128, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.13436812232132514, |
| "grad_norm": 1.6971935033798218, |
| "learning_rate": 0.00019104469928053853, |
| "loss": 0.5061, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.13475423761535194, |
| "grad_norm": 0.9180198311805725, |
| "learning_rate": 0.0001910189582609367, |
| "loss": 0.4445, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.13514035290937873, |
| "grad_norm": 3.1244235038757324, |
| "learning_rate": 0.00019099321724133495, |
| "loss": 0.4099, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.13552646820340553, |
| "grad_norm": 2.7192864418029785, |
| "learning_rate": 0.00019096747622173314, |
| "loss": 0.5004, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.13591258349743232, |
| "grad_norm": 2.0905699729919434, |
| "learning_rate": 0.00019094173520213138, |
| "loss": 0.4277, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.13629869879145912, |
| "grad_norm": 3.3753092288970947, |
| "learning_rate": 0.0001909159941825296, |
| "loss": 0.4129, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.13668481408548594, |
| "grad_norm": 4.199211120605469, |
| "learning_rate": 0.00019089025316292778, |
| "loss": 0.5734, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.13707092937951273, |
| "grad_norm": 2.0411245822906494, |
| "learning_rate": 0.00019086451214332602, |
| "loss": 0.3018, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.13745704467353953, |
| "grad_norm": 17.236717224121094, |
| "learning_rate": 0.0001908387711237242, |
| "loss": 0.4527, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.13784315996756633, |
| "grad_norm": 1.4575644731521606, |
| "learning_rate": 0.00019081303010412245, |
| "loss": 0.3773, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.13822927526159312, |
| "grad_norm": 3.926090717315674, |
| "learning_rate": 0.00019078728908452063, |
| "loss": 0.5316, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.13861539055561992, |
| "grad_norm": 3.1841864585876465, |
| "learning_rate": 0.00019076154806491887, |
| "loss": 0.3705, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.1390015058496467, |
| "grad_norm": 4.08506441116333, |
| "learning_rate": 0.00019073580704531709, |
| "loss": 0.4941, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.1393876211436735, |
| "grad_norm": 3.063154458999634, |
| "learning_rate": 0.00019071006602571527, |
| "loss": 0.4435, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.1397737364377003, |
| "grad_norm": 6.122230529785156, |
| "learning_rate": 0.0001906843250061135, |
| "loss": 0.5067, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.1401598517317271, |
| "grad_norm": 3.3089540004730225, |
| "learning_rate": 0.0001906585839865117, |
| "loss": 0.4329, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.1405459670257539, |
| "grad_norm": 1.7245008945465088, |
| "learning_rate": 0.00019063284296690994, |
| "loss": 0.4502, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.1409320823197807, |
| "grad_norm": 1.7759568691253662, |
| "learning_rate": 0.00019060710194730813, |
| "loss": 0.2379, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.14131819761380748, |
| "grad_norm": 0.432452529668808, |
| "learning_rate": 0.00019058136092770637, |
| "loss": 0.4277, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.14170431290783428, |
| "grad_norm": 3.311952829360962, |
| "learning_rate": 0.00019055561990810458, |
| "loss": 0.4558, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.14209042820186107, |
| "grad_norm": 1.9942964315414429, |
| "learning_rate": 0.00019052987888850277, |
| "loss": 0.3349, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.14247654349588787, |
| "grad_norm": 6.226424217224121, |
| "learning_rate": 0.000190504137868901, |
| "loss": 0.5809, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.14286265878991466, |
| "grad_norm": 6.223634719848633, |
| "learning_rate": 0.0001904783968492992, |
| "loss": 0.5788, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.14324877408394146, |
| "grad_norm": 0.7370914220809937, |
| "learning_rate": 0.00019045265582969743, |
| "loss": 0.4834, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.14363488937796826, |
| "grad_norm": 1.745880365371704, |
| "learning_rate": 0.00019042691481009565, |
| "loss": 0.6995, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.14402100467199505, |
| "grad_norm": 0.8839595913887024, |
| "learning_rate": 0.00019040117379049386, |
| "loss": 0.3526, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.14440711996602185, |
| "grad_norm": 1.1224008798599243, |
| "learning_rate": 0.00019037543277089207, |
| "loss": 0.3558, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.14479323526004864, |
| "grad_norm": 1.0473041534423828, |
| "learning_rate": 0.00019034969175129026, |
| "loss": 0.2465, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.14517935055407544, |
| "grad_norm": 3.83192777633667, |
| "learning_rate": 0.0001903239507316885, |
| "loss": 0.4832, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.14556546584810223, |
| "grad_norm": 3.323885440826416, |
| "learning_rate": 0.00019029820971208669, |
| "loss": 0.4924, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.14595158114212903, |
| "grad_norm": 3.2334187030792236, |
| "learning_rate": 0.00019027246869248493, |
| "loss": 0.5053, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.14633769643615582, |
| "grad_norm": 2.280498743057251, |
| "learning_rate": 0.00019024672767288314, |
| "loss": 0.554, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.14672381173018265, |
| "grad_norm": 4.546648979187012, |
| "learning_rate": 0.00019022098665328135, |
| "loss": 0.3999, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.14710992702420944, |
| "grad_norm": 0.6303244829177856, |
| "learning_rate": 0.00019019524563367957, |
| "loss": 0.4481, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.14749604231823624, |
| "grad_norm": 2.605196475982666, |
| "learning_rate": 0.00019016950461407775, |
| "loss": 0.3561, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.14788215761226303, |
| "grad_norm": 3.0562639236450195, |
| "learning_rate": 0.000190143763594476, |
| "loss": 0.5903, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.14826827290628983, |
| "grad_norm": 11.164155006408691, |
| "learning_rate": 0.0001901180225748742, |
| "loss": 0.4299, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.14865438820031662, |
| "grad_norm": 4.996811866760254, |
| "learning_rate": 0.00019009228155527242, |
| "loss": 0.4423, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.14904050349434342, |
| "grad_norm": 2.627272844314575, |
| "learning_rate": 0.00019006654053567063, |
| "loss": 0.4875, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.1494266187883702, |
| "grad_norm": 2.6532809734344482, |
| "learning_rate": 0.00019004079951606885, |
| "loss": 0.5221, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.149812734082397, |
| "grad_norm": 5.821976661682129, |
| "learning_rate": 0.00019001505849646706, |
| "loss": 0.4793, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.1501988493764238, |
| "grad_norm": 2.888029098510742, |
| "learning_rate": 0.00018998931747686524, |
| "loss": 0.5784, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.1505849646704506, |
| "grad_norm": 0.9147624969482422, |
| "learning_rate": 0.00018996357645726349, |
| "loss": 0.5533, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.1509710799644774, |
| "grad_norm": 2.6088199615478516, |
| "learning_rate": 0.0001899378354376617, |
| "loss": 0.5028, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.1513571952585042, |
| "grad_norm": 3.8208296298980713, |
| "learning_rate": 0.0001899120944180599, |
| "loss": 0.4934, |
| "step": 3920 |
| }, |
| { |
| "epoch": 0.15174331055253099, |
| "grad_norm": 2.8711328506469727, |
| "learning_rate": 0.00018988635339845813, |
| "loss": 0.4417, |
| "step": 3930 |
| }, |
| { |
| "epoch": 0.15212942584655778, |
| "grad_norm": 2.922855854034424, |
| "learning_rate": 0.00018986061237885634, |
| "loss": 0.5303, |
| "step": 3940 |
| }, |
| { |
| "epoch": 0.15251554114058458, |
| "grad_norm": 2.52575945854187, |
| "learning_rate": 0.00018983487135925455, |
| "loss": 0.397, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.15290165643461137, |
| "grad_norm": 3.3369996547698975, |
| "learning_rate": 0.00018980913033965277, |
| "loss": 0.4172, |
| "step": 3960 |
| }, |
| { |
| "epoch": 0.15328777172863817, |
| "grad_norm": 1.7678214311599731, |
| "learning_rate": 0.00018978338932005098, |
| "loss": 0.3122, |
| "step": 3970 |
| }, |
| { |
| "epoch": 0.15367388702266496, |
| "grad_norm": 3.3293211460113525, |
| "learning_rate": 0.0001897576483004492, |
| "loss": 0.6864, |
| "step": 3980 |
| }, |
| { |
| "epoch": 0.15406000231669176, |
| "grad_norm": 1.4911530017852783, |
| "learning_rate": 0.0001897319072808474, |
| "loss": 0.3888, |
| "step": 3990 |
| }, |
| { |
| "epoch": 0.15444611761071855, |
| "grad_norm": 1.4884055852890015, |
| "learning_rate": 0.00018970616626124562, |
| "loss": 0.3952, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.15483223290474535, |
| "grad_norm": 1.2745383977890015, |
| "learning_rate": 0.00018968042524164383, |
| "loss": 0.3647, |
| "step": 4010 |
| }, |
| { |
| "epoch": 0.15521834819877214, |
| "grad_norm": 7.799386024475098, |
| "learning_rate": 0.00018965468422204205, |
| "loss": 0.5554, |
| "step": 4020 |
| }, |
| { |
| "epoch": 0.15560446349279894, |
| "grad_norm": 2.4778294563293457, |
| "learning_rate": 0.00018962894320244026, |
| "loss": 0.662, |
| "step": 4030 |
| }, |
| { |
| "epoch": 0.15599057878682573, |
| "grad_norm": 0.8415629267692566, |
| "learning_rate": 0.00018960320218283847, |
| "loss": 0.4317, |
| "step": 4040 |
| }, |
| { |
| "epoch": 0.15637669408085253, |
| "grad_norm": 4.507715702056885, |
| "learning_rate": 0.00018957746116323669, |
| "loss": 0.4512, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.15676280937487935, |
| "grad_norm": 3.5790421962738037, |
| "learning_rate": 0.0001895517201436349, |
| "loss": 0.4022, |
| "step": 4060 |
| }, |
| { |
| "epoch": 0.15714892466890615, |
| "grad_norm": 3.7266156673431396, |
| "learning_rate": 0.0001895259791240331, |
| "loss": 0.3945, |
| "step": 4070 |
| }, |
| { |
| "epoch": 0.15753503996293294, |
| "grad_norm": 7.909580230712891, |
| "learning_rate": 0.00018950023810443133, |
| "loss": 0.3726, |
| "step": 4080 |
| }, |
| { |
| "epoch": 0.15792115525695974, |
| "grad_norm": 2.2439534664154053, |
| "learning_rate": 0.00018947449708482954, |
| "loss": 0.4157, |
| "step": 4090 |
| }, |
| { |
| "epoch": 0.15830727055098653, |
| "grad_norm": 1.6076972484588623, |
| "learning_rate": 0.00018944875606522775, |
| "loss": 0.2363, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.15869338584501333, |
| "grad_norm": 3.7495157718658447, |
| "learning_rate": 0.00018942301504562596, |
| "loss": 0.4908, |
| "step": 4110 |
| }, |
| { |
| "epoch": 0.15907950113904012, |
| "grad_norm": 0.2942291796207428, |
| "learning_rate": 0.00018939727402602418, |
| "loss": 0.4915, |
| "step": 4120 |
| }, |
| { |
| "epoch": 0.15946561643306692, |
| "grad_norm": 1.3951829671859741, |
| "learning_rate": 0.0001893715330064224, |
| "loss": 0.4585, |
| "step": 4130 |
| }, |
| { |
| "epoch": 0.15985173172709372, |
| "grad_norm": 0.4405671954154968, |
| "learning_rate": 0.0001893457919868206, |
| "loss": 0.2839, |
| "step": 4140 |
| }, |
| { |
| "epoch": 0.1602378470211205, |
| "grad_norm": 1.0917588472366333, |
| "learning_rate": 0.00018932005096721882, |
| "loss": 0.401, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.1606239623151473, |
| "grad_norm": 1.6183397769927979, |
| "learning_rate": 0.00018929430994761703, |
| "loss": 0.5555, |
| "step": 4160 |
| }, |
| { |
| "epoch": 0.1610100776091741, |
| "grad_norm": 2.0909583568573, |
| "learning_rate": 0.00018926856892801524, |
| "loss": 0.52, |
| "step": 4170 |
| }, |
| { |
| "epoch": 0.1613961929032009, |
| "grad_norm": 2.901456356048584, |
| "learning_rate": 0.00018924282790841346, |
| "loss": 0.603, |
| "step": 4180 |
| }, |
| { |
| "epoch": 0.1617823081972277, |
| "grad_norm": 7.230431079864502, |
| "learning_rate": 0.00018921708688881167, |
| "loss": 0.6189, |
| "step": 4190 |
| }, |
| { |
| "epoch": 0.1621684234912545, |
| "grad_norm": 6.773900508880615, |
| "learning_rate": 0.00018919134586920988, |
| "loss": 0.2494, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.16255453878528128, |
| "grad_norm": 0.8557988405227661, |
| "learning_rate": 0.0001891656048496081, |
| "loss": 0.2617, |
| "step": 4210 |
| }, |
| { |
| "epoch": 0.16294065407930808, |
| "grad_norm": 1.3747268915176392, |
| "learning_rate": 0.00018913986383000634, |
| "loss": 0.4189, |
| "step": 4220 |
| }, |
| { |
| "epoch": 0.16332676937333487, |
| "grad_norm": 4.072261810302734, |
| "learning_rate": 0.00018911412281040452, |
| "loss": 0.5473, |
| "step": 4230 |
| }, |
| { |
| "epoch": 0.16371288466736167, |
| "grad_norm": 2.7210185527801514, |
| "learning_rate": 0.00018908838179080274, |
| "loss": 0.3501, |
| "step": 4240 |
| }, |
| { |
| "epoch": 0.16409899996138846, |
| "grad_norm": 2.276454448699951, |
| "learning_rate": 0.00018906264077120095, |
| "loss": 0.3078, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.16448511525541526, |
| "grad_norm": 3.586536169052124, |
| "learning_rate": 0.00018903689975159916, |
| "loss": 0.3856, |
| "step": 4260 |
| }, |
| { |
| "epoch": 0.16487123054944205, |
| "grad_norm": 2.199673891067505, |
| "learning_rate": 0.00018901115873199738, |
| "loss": 0.3677, |
| "step": 4270 |
| }, |
| { |
| "epoch": 0.16525734584346885, |
| "grad_norm": 2.8410561084747314, |
| "learning_rate": 0.0001889854177123956, |
| "loss": 0.6101, |
| "step": 4280 |
| }, |
| { |
| "epoch": 0.16564346113749565, |
| "grad_norm": 3.9638853073120117, |
| "learning_rate": 0.00018895967669279383, |
| "loss": 0.5066, |
| "step": 4290 |
| }, |
| { |
| "epoch": 0.16602957643152244, |
| "grad_norm": 1.2070738077163696, |
| "learning_rate": 0.00018893393567319202, |
| "loss": 0.385, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.16641569172554924, |
| "grad_norm": 1.0531187057495117, |
| "learning_rate": 0.00018890819465359023, |
| "loss": 0.3608, |
| "step": 4310 |
| }, |
| { |
| "epoch": 0.16680180701957603, |
| "grad_norm": 1.1998246908187866, |
| "learning_rate": 0.00018888245363398844, |
| "loss": 0.4624, |
| "step": 4320 |
| }, |
| { |
| "epoch": 0.16718792231360285, |
| "grad_norm": 2.126063346862793, |
| "learning_rate": 0.00018885671261438666, |
| "loss": 0.6076, |
| "step": 4330 |
| }, |
| { |
| "epoch": 0.16757403760762965, |
| "grad_norm": 1.5854765176773071, |
| "learning_rate": 0.0001888309715947849, |
| "loss": 0.4817, |
| "step": 4340 |
| }, |
| { |
| "epoch": 0.16796015290165645, |
| "grad_norm": 6.630712509155273, |
| "learning_rate": 0.00018880523057518308, |
| "loss": 0.4098, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.16834626819568324, |
| "grad_norm": 2.060789108276367, |
| "learning_rate": 0.00018877948955558132, |
| "loss": 0.3523, |
| "step": 4360 |
| }, |
| { |
| "epoch": 0.16873238348971004, |
| "grad_norm": 2.2551252841949463, |
| "learning_rate": 0.0001887537485359795, |
| "loss": 0.3095, |
| "step": 4370 |
| }, |
| { |
| "epoch": 0.16911849878373683, |
| "grad_norm": 3.736640453338623, |
| "learning_rate": 0.00018872800751637772, |
| "loss": 0.3812, |
| "step": 4380 |
| }, |
| { |
| "epoch": 0.16950461407776363, |
| "grad_norm": 1.9971100091934204, |
| "learning_rate": 0.00018870226649677594, |
| "loss": 0.3422, |
| "step": 4390 |
| }, |
| { |
| "epoch": 0.16989072937179042, |
| "grad_norm": 3.6577255725860596, |
| "learning_rate": 0.00018867652547717415, |
| "loss": 0.7857, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.17027684466581722, |
| "grad_norm": 2.166538715362549, |
| "learning_rate": 0.0001886507844575724, |
| "loss": 0.5596, |
| "step": 4410 |
| }, |
| { |
| "epoch": 0.170662959959844, |
| "grad_norm": 2.0177736282348633, |
| "learning_rate": 0.00018862504343797058, |
| "loss": 0.3197, |
| "step": 4420 |
| }, |
| { |
| "epoch": 0.1710490752538708, |
| "grad_norm": 0.29447808861732483, |
| "learning_rate": 0.00018859930241836882, |
| "loss": 0.5284, |
| "step": 4430 |
| }, |
| { |
| "epoch": 0.1714351905478976, |
| "grad_norm": 2.17985200881958, |
| "learning_rate": 0.000188573561398767, |
| "loss": 0.5188, |
| "step": 4440 |
| }, |
| { |
| "epoch": 0.1718213058419244, |
| "grad_norm": 2.87449049949646, |
| "learning_rate": 0.00018854782037916522, |
| "loss": 0.554, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.1722074211359512, |
| "grad_norm": 1.8865265846252441, |
| "learning_rate": 0.00018852207935956343, |
| "loss": 0.4338, |
| "step": 4460 |
| }, |
| { |
| "epoch": 0.172593536429978, |
| "grad_norm": 2.042337417602539, |
| "learning_rate": 0.00018849633833996164, |
| "loss": 0.3924, |
| "step": 4470 |
| }, |
| { |
| "epoch": 0.17297965172400478, |
| "grad_norm": 1.4254354238510132, |
| "learning_rate": 0.00018847059732035988, |
| "loss": 0.2607, |
| "step": 4480 |
| }, |
| { |
| "epoch": 0.17336576701803158, |
| "grad_norm": 2.611560344696045, |
| "learning_rate": 0.00018844485630075807, |
| "loss": 0.4967, |
| "step": 4490 |
| }, |
| { |
| "epoch": 0.17375188231205838, |
| "grad_norm": 1.1008936166763306, |
| "learning_rate": 0.0001884191152811563, |
| "loss": 0.4109, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.17413799760608517, |
| "grad_norm": 0.8280178308486938, |
| "learning_rate": 0.0001883933742615545, |
| "loss": 0.6632, |
| "step": 4510 |
| }, |
| { |
| "epoch": 0.17452411290011197, |
| "grad_norm": 2.226020336151123, |
| "learning_rate": 0.0001883676332419527, |
| "loss": 0.4777, |
| "step": 4520 |
| }, |
| { |
| "epoch": 0.17491022819413876, |
| "grad_norm": 1.6062042713165283, |
| "learning_rate": 0.00018834189222235095, |
| "loss": 0.4671, |
| "step": 4530 |
| }, |
| { |
| "epoch": 0.17529634348816556, |
| "grad_norm": 3.9853012561798096, |
| "learning_rate": 0.00018831615120274914, |
| "loss": 0.4843, |
| "step": 4540 |
| }, |
| { |
| "epoch": 0.17568245878219235, |
| "grad_norm": 0.30268657207489014, |
| "learning_rate": 0.00018829041018314738, |
| "loss": 0.3922, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.17606857407621915, |
| "grad_norm": 6.283960342407227, |
| "learning_rate": 0.00018826466916354556, |
| "loss": 0.6106, |
| "step": 4560 |
| }, |
| { |
| "epoch": 0.17645468937024594, |
| "grad_norm": 1.4164658784866333, |
| "learning_rate": 0.0001882389281439438, |
| "loss": 0.3014, |
| "step": 4570 |
| }, |
| { |
| "epoch": 0.17684080466427274, |
| "grad_norm": 4.847668170928955, |
| "learning_rate": 0.000188213187124342, |
| "loss": 0.5216, |
| "step": 4580 |
| }, |
| { |
| "epoch": 0.17722691995829956, |
| "grad_norm": 3.683180332183838, |
| "learning_rate": 0.0001881874461047402, |
| "loss": 0.3268, |
| "step": 4590 |
| }, |
| { |
| "epoch": 0.17761303525232636, |
| "grad_norm": 1.053144097328186, |
| "learning_rate": 0.00018816170508513844, |
| "loss": 0.5229, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.17799915054635315, |
| "grad_norm": 0.29438719153404236, |
| "learning_rate": 0.00018813596406553663, |
| "loss": 0.4523, |
| "step": 4610 |
| }, |
| { |
| "epoch": 0.17838526584037995, |
| "grad_norm": 1.5682024955749512, |
| "learning_rate": 0.00018811022304593487, |
| "loss": 0.4367, |
| "step": 4620 |
| }, |
| { |
| "epoch": 0.17877138113440674, |
| "grad_norm": 1.462189793586731, |
| "learning_rate": 0.00018808448202633306, |
| "loss": 0.5086, |
| "step": 4630 |
| }, |
| { |
| "epoch": 0.17915749642843354, |
| "grad_norm": 0.7927210927009583, |
| "learning_rate": 0.0001880587410067313, |
| "loss": 0.4654, |
| "step": 4640 |
| }, |
| { |
| "epoch": 0.17954361172246033, |
| "grad_norm": 1.4543548822402954, |
| "learning_rate": 0.0001880329999871295, |
| "loss": 0.5005, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.17992972701648713, |
| "grad_norm": 1.5814868211746216, |
| "learning_rate": 0.00018800725896752772, |
| "loss": 0.4127, |
| "step": 4660 |
| }, |
| { |
| "epoch": 0.18031584231051392, |
| "grad_norm": 1.9244798421859741, |
| "learning_rate": 0.00018798151794792594, |
| "loss": 0.3796, |
| "step": 4670 |
| }, |
| { |
| "epoch": 0.18070195760454072, |
| "grad_norm": 1.8725996017456055, |
| "learning_rate": 0.00018795577692832412, |
| "loss": 0.4112, |
| "step": 4680 |
| }, |
| { |
| "epoch": 0.18108807289856751, |
| "grad_norm": 2.8138442039489746, |
| "learning_rate": 0.00018793003590872236, |
| "loss": 0.6117, |
| "step": 4690 |
| }, |
| { |
| "epoch": 0.1814741881925943, |
| "grad_norm": 3.4465060234069824, |
| "learning_rate": 0.00018790429488912055, |
| "loss": 0.4223, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.1818603034866211, |
| "grad_norm": 4.431785583496094, |
| "learning_rate": 0.0001878785538695188, |
| "loss": 0.54, |
| "step": 4710 |
| }, |
| { |
| "epoch": 0.1822464187806479, |
| "grad_norm": 6.951846599578857, |
| "learning_rate": 0.000187852812849917, |
| "loss": 0.3702, |
| "step": 4720 |
| }, |
| { |
| "epoch": 0.1826325340746747, |
| "grad_norm": 1.0188024044036865, |
| "learning_rate": 0.00018782707183031522, |
| "loss": 0.2715, |
| "step": 4730 |
| }, |
| { |
| "epoch": 0.1830186493687015, |
| "grad_norm": 0.3875834047794342, |
| "learning_rate": 0.00018780133081071343, |
| "loss": 0.4208, |
| "step": 4740 |
| }, |
| { |
| "epoch": 0.1834047646627283, |
| "grad_norm": 2.7475740909576416, |
| "learning_rate": 0.00018777558979111162, |
| "loss": 0.3613, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.18379087995675508, |
| "grad_norm": 2.553227186203003, |
| "learning_rate": 0.00018774984877150986, |
| "loss": 0.4781, |
| "step": 4760 |
| }, |
| { |
| "epoch": 0.18417699525078188, |
| "grad_norm": 2.005154609680176, |
| "learning_rate": 0.00018772410775190804, |
| "loss": 0.3805, |
| "step": 4770 |
| }, |
| { |
| "epoch": 0.18456311054480867, |
| "grad_norm": 0.7380127310752869, |
| "learning_rate": 0.00018769836673230628, |
| "loss": 0.3679, |
| "step": 4780 |
| }, |
| { |
| "epoch": 0.18494922583883547, |
| "grad_norm": 3.6547505855560303, |
| "learning_rate": 0.0001876726257127045, |
| "loss": 0.4502, |
| "step": 4790 |
| }, |
| { |
| "epoch": 0.18533534113286226, |
| "grad_norm": 2.232980728149414, |
| "learning_rate": 0.0001876468846931027, |
| "loss": 0.4628, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.18572145642688906, |
| "grad_norm": 6.521275043487549, |
| "learning_rate": 0.00018762114367350092, |
| "loss": 0.4765, |
| "step": 4810 |
| }, |
| { |
| "epoch": 0.18610757172091585, |
| "grad_norm": 1.6310979127883911, |
| "learning_rate": 0.0001875954026538991, |
| "loss": 0.4039, |
| "step": 4820 |
| }, |
| { |
| "epoch": 0.18649368701494265, |
| "grad_norm": 1.1469775438308716, |
| "learning_rate": 0.00018756966163429735, |
| "loss": 0.4195, |
| "step": 4830 |
| }, |
| { |
| "epoch": 0.18687980230896944, |
| "grad_norm": 0.7688332200050354, |
| "learning_rate": 0.00018754392061469556, |
| "loss": 0.264, |
| "step": 4840 |
| }, |
| { |
| "epoch": 0.18726591760299627, |
| "grad_norm": 3.3422155380249023, |
| "learning_rate": 0.00018751817959509378, |
| "loss": 0.5275, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.18765203289702306, |
| "grad_norm": 1.517876386642456, |
| "learning_rate": 0.000187492438575492, |
| "loss": 0.4567, |
| "step": 4860 |
| }, |
| { |
| "epoch": 0.18803814819104986, |
| "grad_norm": 1.2196050882339478, |
| "learning_rate": 0.0001874666975558902, |
| "loss": 0.4231, |
| "step": 4870 |
| }, |
| { |
| "epoch": 0.18842426348507665, |
| "grad_norm": 1.3325402736663818, |
| "learning_rate": 0.00018744095653628842, |
| "loss": 0.6325, |
| "step": 4880 |
| }, |
| { |
| "epoch": 0.18881037877910345, |
| "grad_norm": 6.098769664764404, |
| "learning_rate": 0.0001874152155166866, |
| "loss": 0.576, |
| "step": 4890 |
| }, |
| { |
| "epoch": 0.18919649407313024, |
| "grad_norm": 2.602363348007202, |
| "learning_rate": 0.00018738947449708484, |
| "loss": 0.3237, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.18958260936715704, |
| "grad_norm": 0.970106303691864, |
| "learning_rate": 0.00018736373347748306, |
| "loss": 0.409, |
| "step": 4910 |
| }, |
| { |
| "epoch": 0.18996872466118384, |
| "grad_norm": 3.2592012882232666, |
| "learning_rate": 0.00018733799245788127, |
| "loss": 0.408, |
| "step": 4920 |
| }, |
| { |
| "epoch": 0.19035483995521063, |
| "grad_norm": 0.31132128834724426, |
| "learning_rate": 0.00018731225143827948, |
| "loss": 0.2446, |
| "step": 4930 |
| }, |
| { |
| "epoch": 0.19074095524923743, |
| "grad_norm": 5.321741104125977, |
| "learning_rate": 0.0001872865104186777, |
| "loss": 0.4604, |
| "step": 4940 |
| }, |
| { |
| "epoch": 0.19112707054326422, |
| "grad_norm": 1.1165122985839844, |
| "learning_rate": 0.0001872607693990759, |
| "loss": 0.3605, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.19151318583729102, |
| "grad_norm": 0.8274110555648804, |
| "learning_rate": 0.0001872350283794741, |
| "loss": 0.2669, |
| "step": 4960 |
| }, |
| { |
| "epoch": 0.1918993011313178, |
| "grad_norm": 2.8668346405029297, |
| "learning_rate": 0.00018720928735987234, |
| "loss": 0.4055, |
| "step": 4970 |
| }, |
| { |
| "epoch": 0.1922854164253446, |
| "grad_norm": 3.411841630935669, |
| "learning_rate": 0.00018718354634027055, |
| "loss": 0.5989, |
| "step": 4980 |
| }, |
| { |
| "epoch": 0.1926715317193714, |
| "grad_norm": 0.18740829825401306, |
| "learning_rate": 0.00018715780532066876, |
| "loss": 0.3805, |
| "step": 4990 |
| }, |
| { |
| "epoch": 0.1930576470133982, |
| "grad_norm": 1.0823473930358887, |
| "learning_rate": 0.00018713206430106698, |
| "loss": 0.2854, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.193443762307425, |
| "grad_norm": 1.9816405773162842, |
| "learning_rate": 0.0001871063232814652, |
| "loss": 0.3771, |
| "step": 5010 |
| }, |
| { |
| "epoch": 0.1938298776014518, |
| "grad_norm": 5.267081260681152, |
| "learning_rate": 0.0001870805822618634, |
| "loss": 0.3085, |
| "step": 5020 |
| }, |
| { |
| "epoch": 0.19421599289547858, |
| "grad_norm": 5.706038475036621, |
| "learning_rate": 0.00018705484124226162, |
| "loss": 0.484, |
| "step": 5030 |
| }, |
| { |
| "epoch": 0.19460210818950538, |
| "grad_norm": 1.3357723951339722, |
| "learning_rate": 0.00018702910022265983, |
| "loss": 0.2161, |
| "step": 5040 |
| }, |
| { |
| "epoch": 0.19498822348353217, |
| "grad_norm": 1.0626447200775146, |
| "learning_rate": 0.00018700335920305804, |
| "loss": 0.3491, |
| "step": 5050 |
| }, |
| { |
| "epoch": 0.19537433877755897, |
| "grad_norm": 2.441228151321411, |
| "learning_rate": 0.00018697761818345626, |
| "loss": 0.3975, |
| "step": 5060 |
| }, |
| { |
| "epoch": 0.19576045407158577, |
| "grad_norm": 2.6739327907562256, |
| "learning_rate": 0.00018695187716385447, |
| "loss": 0.3418, |
| "step": 5070 |
| }, |
| { |
| "epoch": 0.19614656936561256, |
| "grad_norm": 2.3216919898986816, |
| "learning_rate": 0.00018692613614425268, |
| "loss": 0.6265, |
| "step": 5080 |
| }, |
| { |
| "epoch": 0.19653268465963936, |
| "grad_norm": 3.9119021892547607, |
| "learning_rate": 0.0001869003951246509, |
| "loss": 0.2982, |
| "step": 5090 |
| }, |
| { |
| "epoch": 0.19691879995366615, |
| "grad_norm": 5.744061470031738, |
| "learning_rate": 0.0001868746541050491, |
| "loss": 0.4048, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.19730491524769297, |
| "grad_norm": 8.512910842895508, |
| "learning_rate": 0.00018684891308544732, |
| "loss": 0.3598, |
| "step": 5110 |
| }, |
| { |
| "epoch": 0.19769103054171977, |
| "grad_norm": 1.6382296085357666, |
| "learning_rate": 0.00018682317206584554, |
| "loss": 0.2121, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.19807714583574657, |
| "grad_norm": 2.1593070030212402, |
| "learning_rate": 0.00018679743104624375, |
| "loss": 0.4914, |
| "step": 5130 |
| }, |
| { |
| "epoch": 0.19846326112977336, |
| "grad_norm": 3.067112445831299, |
| "learning_rate": 0.00018677169002664196, |
| "loss": 0.4171, |
| "step": 5140 |
| }, |
| { |
| "epoch": 0.19884937642380016, |
| "grad_norm": 1.9954415559768677, |
| "learning_rate": 0.00018674594900704018, |
| "loss": 0.5161, |
| "step": 5150 |
| }, |
| { |
| "epoch": 0.19923549171782695, |
| "grad_norm": 2.793346643447876, |
| "learning_rate": 0.0001867202079874384, |
| "loss": 0.4159, |
| "step": 5160 |
| }, |
| { |
| "epoch": 0.19962160701185375, |
| "grad_norm": 4.087403774261475, |
| "learning_rate": 0.0001866944669678366, |
| "loss": 0.3339, |
| "step": 5170 |
| }, |
| { |
| "epoch": 0.20000772230588054, |
| "grad_norm": 2.10153865814209, |
| "learning_rate": 0.00018666872594823482, |
| "loss": 0.4352, |
| "step": 5180 |
| }, |
| { |
| "epoch": 0.20039383759990734, |
| "grad_norm": 2.947117805480957, |
| "learning_rate": 0.00018664298492863303, |
| "loss": 0.305, |
| "step": 5190 |
| }, |
| { |
| "epoch": 0.20077995289393413, |
| "grad_norm": 1.2496302127838135, |
| "learning_rate": 0.00018661724390903124, |
| "loss": 0.4578, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.20116606818796093, |
| "grad_norm": 0.5246118903160095, |
| "learning_rate": 0.00018659150288942946, |
| "loss": 0.7531, |
| "step": 5210 |
| }, |
| { |
| "epoch": 0.20155218348198772, |
| "grad_norm": 4.099668502807617, |
| "learning_rate": 0.00018656576186982767, |
| "loss": 0.3809, |
| "step": 5220 |
| }, |
| { |
| "epoch": 0.20193829877601452, |
| "grad_norm": 4.237419128417969, |
| "learning_rate": 0.00018654002085022588, |
| "loss": 0.3169, |
| "step": 5230 |
| }, |
| { |
| "epoch": 0.20232441407004131, |
| "grad_norm": 1.6228466033935547, |
| "learning_rate": 0.0001865142798306241, |
| "loss": 0.5832, |
| "step": 5240 |
| }, |
| { |
| "epoch": 0.2027105293640681, |
| "grad_norm": 4.567386627197266, |
| "learning_rate": 0.0001864885388110223, |
| "loss": 0.2177, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.2030966446580949, |
| "grad_norm": 1.4991040229797363, |
| "learning_rate": 0.00018646279779142052, |
| "loss": 0.3851, |
| "step": 5260 |
| }, |
| { |
| "epoch": 0.2034827599521217, |
| "grad_norm": 2.127082586288452, |
| "learning_rate": 0.00018643705677181874, |
| "loss": 0.4721, |
| "step": 5270 |
| }, |
| { |
| "epoch": 0.2038688752461485, |
| "grad_norm": 2.9149303436279297, |
| "learning_rate": 0.00018641131575221695, |
| "loss": 0.2556, |
| "step": 5280 |
| }, |
| { |
| "epoch": 0.2042549905401753, |
| "grad_norm": 0.06375914812088013, |
| "learning_rate": 0.00018638557473261516, |
| "loss": 0.3599, |
| "step": 5290 |
| }, |
| { |
| "epoch": 0.20464110583420209, |
| "grad_norm": 3.338331699371338, |
| "learning_rate": 0.00018635983371301338, |
| "loss": 0.4062, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.20502722112822888, |
| "grad_norm": 4.006681442260742, |
| "learning_rate": 0.0001863340926934116, |
| "loss": 0.4538, |
| "step": 5310 |
| }, |
| { |
| "epoch": 0.20541333642225568, |
| "grad_norm": 1.1406009197235107, |
| "learning_rate": 0.0001863083516738098, |
| "loss": 0.8432, |
| "step": 5320 |
| }, |
| { |
| "epoch": 0.20579945171628247, |
| "grad_norm": 9.281437873840332, |
| "learning_rate": 0.00018628261065420802, |
| "loss": 0.4538, |
| "step": 5330 |
| }, |
| { |
| "epoch": 0.20618556701030927, |
| "grad_norm": 3.1884214878082275, |
| "learning_rate": 0.00018625686963460626, |
| "loss": 0.3361, |
| "step": 5340 |
| }, |
| { |
| "epoch": 0.20657168230433606, |
| "grad_norm": 1.4311977624893188, |
| "learning_rate": 0.00018623112861500444, |
| "loss": 0.5519, |
| "step": 5350 |
| }, |
| { |
| "epoch": 0.20695779759836286, |
| "grad_norm": 3.574361801147461, |
| "learning_rate": 0.00018620538759540266, |
| "loss": 0.518, |
| "step": 5360 |
| }, |
| { |
| "epoch": 0.20734391289238968, |
| "grad_norm": 3.0186073780059814, |
| "learning_rate": 0.00018617964657580087, |
| "loss": 0.4204, |
| "step": 5370 |
| }, |
| { |
| "epoch": 0.20773002818641648, |
| "grad_norm": 2.832859754562378, |
| "learning_rate": 0.00018615390555619908, |
| "loss": 0.5736, |
| "step": 5380 |
| }, |
| { |
| "epoch": 0.20811614348044327, |
| "grad_norm": 2.2258200645446777, |
| "learning_rate": 0.0001861281645365973, |
| "loss": 0.8194, |
| "step": 5390 |
| }, |
| { |
| "epoch": 0.20850225877447007, |
| "grad_norm": 1.0975148677825928, |
| "learning_rate": 0.0001861024235169955, |
| "loss": 0.5235, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.20888837406849686, |
| "grad_norm": 2.597329616546631, |
| "learning_rate": 0.00018607668249739375, |
| "loss": 0.2798, |
| "step": 5410 |
| }, |
| { |
| "epoch": 0.20927448936252366, |
| "grad_norm": 1.3780876398086548, |
| "learning_rate": 0.00018605094147779194, |
| "loss": 0.4046, |
| "step": 5420 |
| }, |
| { |
| "epoch": 0.20966060465655045, |
| "grad_norm": 2.409886598587036, |
| "learning_rate": 0.00018602520045819018, |
| "loss": 0.3243, |
| "step": 5430 |
| }, |
| { |
| "epoch": 0.21004671995057725, |
| "grad_norm": 1.0368077754974365, |
| "learning_rate": 0.00018599945943858836, |
| "loss": 0.4469, |
| "step": 5440 |
| }, |
| { |
| "epoch": 0.21043283524460404, |
| "grad_norm": 2.961658000946045, |
| "learning_rate": 0.00018597371841898658, |
| "loss": 0.5104, |
| "step": 5450 |
| }, |
| { |
| "epoch": 0.21081895053863084, |
| "grad_norm": 1.1599836349487305, |
| "learning_rate": 0.00018594797739938482, |
| "loss": 0.3422, |
| "step": 5460 |
| }, |
| { |
| "epoch": 0.21120506583265763, |
| "grad_norm": 3.293682336807251, |
| "learning_rate": 0.000185922236379783, |
| "loss": 0.3556, |
| "step": 5470 |
| }, |
| { |
| "epoch": 0.21159118112668443, |
| "grad_norm": 1.6923863887786865, |
| "learning_rate": 0.00018589649536018124, |
| "loss": 0.3084, |
| "step": 5480 |
| }, |
| { |
| "epoch": 0.21197729642071123, |
| "grad_norm": 3.7289531230926514, |
| "learning_rate": 0.00018587075434057943, |
| "loss": 0.4668, |
| "step": 5490 |
| }, |
| { |
| "epoch": 0.21236341171473802, |
| "grad_norm": 1.3744993209838867, |
| "learning_rate": 0.00018584501332097767, |
| "loss": 0.2984, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.21274952700876482, |
| "grad_norm": 1.4377775192260742, |
| "learning_rate": 0.00018581927230137586, |
| "loss": 0.2622, |
| "step": 5510 |
| }, |
| { |
| "epoch": 0.2131356423027916, |
| "grad_norm": 4.957859992980957, |
| "learning_rate": 0.00018579353128177407, |
| "loss": 0.5561, |
| "step": 5520 |
| }, |
| { |
| "epoch": 0.2135217575968184, |
| "grad_norm": 3.2645647525787354, |
| "learning_rate": 0.0001857677902621723, |
| "loss": 0.59, |
| "step": 5530 |
| }, |
| { |
| "epoch": 0.2139078728908452, |
| "grad_norm": 1.1365091800689697, |
| "learning_rate": 0.0001857420492425705, |
| "loss": 0.443, |
| "step": 5540 |
| }, |
| { |
| "epoch": 0.214293988184872, |
| "grad_norm": 3.187476396560669, |
| "learning_rate": 0.00018571630822296874, |
| "loss": 0.2612, |
| "step": 5550 |
| }, |
| { |
| "epoch": 0.2146801034788988, |
| "grad_norm": 2.6851940155029297, |
| "learning_rate": 0.00018569056720336692, |
| "loss": 0.4543, |
| "step": 5560 |
| }, |
| { |
| "epoch": 0.2150662187729256, |
| "grad_norm": 2.2613587379455566, |
| "learning_rate": 0.00018566482618376516, |
| "loss": 0.3185, |
| "step": 5570 |
| }, |
| { |
| "epoch": 0.21545233406695238, |
| "grad_norm": 1.292475700378418, |
| "learning_rate": 0.00018563908516416335, |
| "loss": 0.2794, |
| "step": 5580 |
| }, |
| { |
| "epoch": 0.21583844936097918, |
| "grad_norm": 2.0878446102142334, |
| "learning_rate": 0.00018561334414456156, |
| "loss": 0.3908, |
| "step": 5590 |
| }, |
| { |
| "epoch": 0.21622456465500597, |
| "grad_norm": 8.058819770812988, |
| "learning_rate": 0.0001855876031249598, |
| "loss": 0.6282, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.21661067994903277, |
| "grad_norm": 1.8231629133224487, |
| "learning_rate": 0.000185561862105358, |
| "loss": 0.4973, |
| "step": 5610 |
| }, |
| { |
| "epoch": 0.21699679524305956, |
| "grad_norm": 3.947242259979248, |
| "learning_rate": 0.00018553612108575623, |
| "loss": 0.4598, |
| "step": 5620 |
| }, |
| { |
| "epoch": 0.21738291053708636, |
| "grad_norm": 3.3258073329925537, |
| "learning_rate": 0.00018551038006615442, |
| "loss": 0.5266, |
| "step": 5630 |
| }, |
| { |
| "epoch": 0.21776902583111318, |
| "grad_norm": 2.301485300064087, |
| "learning_rate": 0.00018548463904655266, |
| "loss": 0.4339, |
| "step": 5640 |
| }, |
| { |
| "epoch": 0.21815514112513998, |
| "grad_norm": 4.4706878662109375, |
| "learning_rate": 0.00018545889802695087, |
| "loss": 0.5233, |
| "step": 5650 |
| }, |
| { |
| "epoch": 0.21854125641916677, |
| "grad_norm": 1.1203399896621704, |
| "learning_rate": 0.00018543315700734906, |
| "loss": 0.4547, |
| "step": 5660 |
| }, |
| { |
| "epoch": 0.21892737171319357, |
| "grad_norm": 0.3744584918022156, |
| "learning_rate": 0.0001854074159877473, |
| "loss": 0.2524, |
| "step": 5670 |
| }, |
| { |
| "epoch": 0.21931348700722036, |
| "grad_norm": 2.7888870239257812, |
| "learning_rate": 0.00018538167496814548, |
| "loss": 0.411, |
| "step": 5680 |
| }, |
| { |
| "epoch": 0.21969960230124716, |
| "grad_norm": 4.9972429275512695, |
| "learning_rate": 0.00018535593394854372, |
| "loss": 0.6359, |
| "step": 5690 |
| }, |
| { |
| "epoch": 0.22008571759527396, |
| "grad_norm": 1.1321420669555664, |
| "learning_rate": 0.0001853301929289419, |
| "loss": 0.4068, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.22047183288930075, |
| "grad_norm": 1.9291785955429077, |
| "learning_rate": 0.00018530445190934015, |
| "loss": 0.5428, |
| "step": 5710 |
| }, |
| { |
| "epoch": 0.22085794818332755, |
| "grad_norm": 0.8663263916969299, |
| "learning_rate": 0.00018527871088973836, |
| "loss": 0.4662, |
| "step": 5720 |
| }, |
| { |
| "epoch": 0.22124406347735434, |
| "grad_norm": 3.039782762527466, |
| "learning_rate": 0.00018525296987013655, |
| "loss": 0.3045, |
| "step": 5730 |
| }, |
| { |
| "epoch": 0.22163017877138114, |
| "grad_norm": 1.3552179336547852, |
| "learning_rate": 0.0001852272288505348, |
| "loss": 0.3411, |
| "step": 5740 |
| }, |
| { |
| "epoch": 0.22201629406540793, |
| "grad_norm": 1.4136948585510254, |
| "learning_rate": 0.00018520148783093298, |
| "loss": 0.5517, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.22240240935943473, |
| "grad_norm": 2.463942766189575, |
| "learning_rate": 0.00018517574681133122, |
| "loss": 0.4681, |
| "step": 5760 |
| }, |
| { |
| "epoch": 0.22278852465346152, |
| "grad_norm": 0.9063917994499207, |
| "learning_rate": 0.0001851500057917294, |
| "loss": 0.4537, |
| "step": 5770 |
| }, |
| { |
| "epoch": 0.22317463994748832, |
| "grad_norm": 2.352678060531616, |
| "learning_rate": 0.00018512426477212764, |
| "loss": 0.4245, |
| "step": 5780 |
| }, |
| { |
| "epoch": 0.2235607552415151, |
| "grad_norm": 2.0424869060516357, |
| "learning_rate": 0.00018509852375252586, |
| "loss": 0.2892, |
| "step": 5790 |
| }, |
| { |
| "epoch": 0.2239468705355419, |
| "grad_norm": 2.7604904174804688, |
| "learning_rate": 0.00018507278273292404, |
| "loss": 0.3606, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.2243329858295687, |
| "grad_norm": 2.827798366546631, |
| "learning_rate": 0.00018504704171332228, |
| "loss": 0.3212, |
| "step": 5810 |
| }, |
| { |
| "epoch": 0.2247191011235955, |
| "grad_norm": 3.1988680362701416, |
| "learning_rate": 0.00018502130069372047, |
| "loss": 0.5649, |
| "step": 5820 |
| }, |
| { |
| "epoch": 0.2251052164176223, |
| "grad_norm": 1.8216092586517334, |
| "learning_rate": 0.0001849955596741187, |
| "loss": 0.2871, |
| "step": 5830 |
| }, |
| { |
| "epoch": 0.2254913317116491, |
| "grad_norm": 2.7595627307891846, |
| "learning_rate": 0.00018496981865451692, |
| "loss": 0.665, |
| "step": 5840 |
| }, |
| { |
| "epoch": 0.22587744700567589, |
| "grad_norm": 1.2395098209381104, |
| "learning_rate": 0.00018494407763491514, |
| "loss": 0.2504, |
| "step": 5850 |
| }, |
| { |
| "epoch": 0.22626356229970268, |
| "grad_norm": 0.6991098523139954, |
| "learning_rate": 0.00018491833661531335, |
| "loss": 0.2263, |
| "step": 5860 |
| }, |
| { |
| "epoch": 0.22664967759372948, |
| "grad_norm": 11.053647994995117, |
| "learning_rate": 0.00018489259559571156, |
| "loss": 0.5919, |
| "step": 5870 |
| }, |
| { |
| "epoch": 0.22703579288775627, |
| "grad_norm": 2.8663880825042725, |
| "learning_rate": 0.00018486685457610978, |
| "loss": 0.3399, |
| "step": 5880 |
| }, |
| { |
| "epoch": 0.22742190818178307, |
| "grad_norm": 1.4995262622833252, |
| "learning_rate": 0.00018484111355650796, |
| "loss": 0.4474, |
| "step": 5890 |
| }, |
| { |
| "epoch": 0.2278080234758099, |
| "grad_norm": 3.275681972503662, |
| "learning_rate": 0.0001848153725369062, |
| "loss": 0.4347, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.22819413876983669, |
| "grad_norm": 14.772253036499023, |
| "learning_rate": 0.00018478963151730442, |
| "loss": 0.3705, |
| "step": 5910 |
| }, |
| { |
| "epoch": 0.22858025406386348, |
| "grad_norm": 3.184976816177368, |
| "learning_rate": 0.00018476389049770263, |
| "loss": 0.3866, |
| "step": 5920 |
| }, |
| { |
| "epoch": 0.22896636935789028, |
| "grad_norm": 2.310765504837036, |
| "learning_rate": 0.00018473814947810084, |
| "loss": 0.2717, |
| "step": 5930 |
| }, |
| { |
| "epoch": 0.22935248465191707, |
| "grad_norm": 2.061189889907837, |
| "learning_rate": 0.00018471240845849906, |
| "loss": 0.2054, |
| "step": 5940 |
| }, |
| { |
| "epoch": 0.22973859994594387, |
| "grad_norm": 10.815469741821289, |
| "learning_rate": 0.00018468666743889727, |
| "loss": 0.5868, |
| "step": 5950 |
| }, |
| { |
| "epoch": 0.23012471523997066, |
| "grad_norm": 1.7080497741699219, |
| "learning_rate": 0.00018466092641929548, |
| "loss": 0.236, |
| "step": 5960 |
| }, |
| { |
| "epoch": 0.23051083053399746, |
| "grad_norm": 7.389080047607422, |
| "learning_rate": 0.0001846351853996937, |
| "loss": 0.2752, |
| "step": 5970 |
| }, |
| { |
| "epoch": 0.23089694582802425, |
| "grad_norm": 2.9860422611236572, |
| "learning_rate": 0.0001846094443800919, |
| "loss": 0.3436, |
| "step": 5980 |
| }, |
| { |
| "epoch": 0.23128306112205105, |
| "grad_norm": 13.12328815460205, |
| "learning_rate": 0.00018458370336049012, |
| "loss": 0.3952, |
| "step": 5990 |
| }, |
| { |
| "epoch": 0.23166917641607784, |
| "grad_norm": 3.7130823135375977, |
| "learning_rate": 0.00018455796234088834, |
| "loss": 0.3658, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.23205529171010464, |
| "grad_norm": 1.8329843282699585, |
| "learning_rate": 0.00018453222132128655, |
| "loss": 0.4172, |
| "step": 6010 |
| }, |
| { |
| "epoch": 0.23244140700413143, |
| "grad_norm": 1.3583799600601196, |
| "learning_rate": 0.00018450648030168476, |
| "loss": 0.4005, |
| "step": 6020 |
| }, |
| { |
| "epoch": 0.23282752229815823, |
| "grad_norm": 3.1711816787719727, |
| "learning_rate": 0.00018448073928208297, |
| "loss": 0.3674, |
| "step": 6030 |
| }, |
| { |
| "epoch": 0.23321363759218502, |
| "grad_norm": 1.576937198638916, |
| "learning_rate": 0.0001844549982624812, |
| "loss": 0.3444, |
| "step": 6040 |
| }, |
| { |
| "epoch": 0.23359975288621182, |
| "grad_norm": 3.922267436981201, |
| "learning_rate": 0.0001844292572428794, |
| "loss": 0.5939, |
| "step": 6050 |
| }, |
| { |
| "epoch": 0.23398586818023862, |
| "grad_norm": 2.9851067066192627, |
| "learning_rate": 0.00018440351622327761, |
| "loss": 0.2387, |
| "step": 6060 |
| }, |
| { |
| "epoch": 0.2343719834742654, |
| "grad_norm": 2.1216888427734375, |
| "learning_rate": 0.00018437777520367583, |
| "loss": 0.3836, |
| "step": 6070 |
| }, |
| { |
| "epoch": 0.2347580987682922, |
| "grad_norm": 2.9788095951080322, |
| "learning_rate": 0.00018435203418407404, |
| "loss": 0.474, |
| "step": 6080 |
| }, |
| { |
| "epoch": 0.235144214062319, |
| "grad_norm": 1.0204919576644897, |
| "learning_rate": 0.00018432629316447225, |
| "loss": 0.2837, |
| "step": 6090 |
| }, |
| { |
| "epoch": 0.2355303293563458, |
| "grad_norm": 0.9091696739196777, |
| "learning_rate": 0.00018430055214487047, |
| "loss": 0.6203, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.2359164446503726, |
| "grad_norm": 0.25899162888526917, |
| "learning_rate": 0.00018427481112526868, |
| "loss": 0.4759, |
| "step": 6110 |
| }, |
| { |
| "epoch": 0.2363025599443994, |
| "grad_norm": 1.8625538349151611, |
| "learning_rate": 0.0001842490701056669, |
| "loss": 0.2992, |
| "step": 6120 |
| }, |
| { |
| "epoch": 0.23668867523842618, |
| "grad_norm": 1.586521863937378, |
| "learning_rate": 0.0001842233290860651, |
| "loss": 0.6122, |
| "step": 6130 |
| }, |
| { |
| "epoch": 0.23707479053245298, |
| "grad_norm": 2.387650966644287, |
| "learning_rate": 0.00018419758806646332, |
| "loss": 0.3276, |
| "step": 6140 |
| }, |
| { |
| "epoch": 0.23746090582647977, |
| "grad_norm": 4.840515613555908, |
| "learning_rate": 0.00018417184704686153, |
| "loss": 0.6295, |
| "step": 6150 |
| }, |
| { |
| "epoch": 0.2378470211205066, |
| "grad_norm": 1.70024836063385, |
| "learning_rate": 0.00018414610602725975, |
| "loss": 0.2047, |
| "step": 6160 |
| }, |
| { |
| "epoch": 0.2382331364145334, |
| "grad_norm": 2.791619062423706, |
| "learning_rate": 0.00018412036500765796, |
| "loss": 0.4364, |
| "step": 6170 |
| }, |
| { |
| "epoch": 0.2386192517085602, |
| "grad_norm": 3.710066318511963, |
| "learning_rate": 0.00018409462398805617, |
| "loss": 0.4564, |
| "step": 6180 |
| }, |
| { |
| "epoch": 0.23900536700258698, |
| "grad_norm": 2.564347982406616, |
| "learning_rate": 0.0001840688829684544, |
| "loss": 0.3156, |
| "step": 6190 |
| }, |
| { |
| "epoch": 0.23939148229661378, |
| "grad_norm": 2.3921267986297607, |
| "learning_rate": 0.0001840431419488526, |
| "loss": 0.3483, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.23977759759064057, |
| "grad_norm": 1.4785810708999634, |
| "learning_rate": 0.00018401740092925081, |
| "loss": 0.4338, |
| "step": 6210 |
| }, |
| { |
| "epoch": 0.24016371288466737, |
| "grad_norm": 3.624790906906128, |
| "learning_rate": 0.00018399165990964903, |
| "loss": 0.7156, |
| "step": 6220 |
| }, |
| { |
| "epoch": 0.24054982817869416, |
| "grad_norm": 3.942161798477173, |
| "learning_rate": 0.00018396591889004724, |
| "loss": 0.3932, |
| "step": 6230 |
| }, |
| { |
| "epoch": 0.24093594347272096, |
| "grad_norm": 3.2236740589141846, |
| "learning_rate": 0.00018394017787044545, |
| "loss": 0.3933, |
| "step": 6240 |
| }, |
| { |
| "epoch": 0.24132205876674775, |
| "grad_norm": 2.5040500164031982, |
| "learning_rate": 0.00018391443685084367, |
| "loss": 0.5711, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.24170817406077455, |
| "grad_norm": 1.9934203624725342, |
| "learning_rate": 0.00018388869583124188, |
| "loss": 0.3074, |
| "step": 6260 |
| }, |
| { |
| "epoch": 0.24209428935480135, |
| "grad_norm": 3.702509641647339, |
| "learning_rate": 0.0001838629548116401, |
| "loss": 0.3454, |
| "step": 6270 |
| }, |
| { |
| "epoch": 0.24248040464882814, |
| "grad_norm": 2.076802968978882, |
| "learning_rate": 0.0001838372137920383, |
| "loss": 0.3044, |
| "step": 6280 |
| }, |
| { |
| "epoch": 0.24286651994285494, |
| "grad_norm": 5.798679351806641, |
| "learning_rate": 0.00018381147277243652, |
| "loss": 0.3396, |
| "step": 6290 |
| }, |
| { |
| "epoch": 0.24325263523688173, |
| "grad_norm": 4.698869705200195, |
| "learning_rate": 0.00018378573175283473, |
| "loss": 0.3735, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.24363875053090853, |
| "grad_norm": 3.029979705810547, |
| "learning_rate": 0.00018375999073323295, |
| "loss": 0.3891, |
| "step": 6310 |
| }, |
| { |
| "epoch": 0.24402486582493532, |
| "grad_norm": 2.5507185459136963, |
| "learning_rate": 0.00018373424971363116, |
| "loss": 0.4854, |
| "step": 6320 |
| }, |
| { |
| "epoch": 0.24441098111896212, |
| "grad_norm": 3.2052571773529053, |
| "learning_rate": 0.00018370850869402937, |
| "loss": 0.6789, |
| "step": 6330 |
| }, |
| { |
| "epoch": 0.2447970964129889, |
| "grad_norm": 1.9265435934066772, |
| "learning_rate": 0.00018368276767442761, |
| "loss": 0.4505, |
| "step": 6340 |
| }, |
| { |
| "epoch": 0.2451832117070157, |
| "grad_norm": 0.8391959071159363, |
| "learning_rate": 0.0001836570266548258, |
| "loss": 0.3432, |
| "step": 6350 |
| }, |
| { |
| "epoch": 0.2455693270010425, |
| "grad_norm": 3.4653851985931396, |
| "learning_rate": 0.00018363128563522401, |
| "loss": 0.3571, |
| "step": 6360 |
| }, |
| { |
| "epoch": 0.2459554422950693, |
| "grad_norm": 2.3033368587493896, |
| "learning_rate": 0.00018360554461562223, |
| "loss": 0.3625, |
| "step": 6370 |
| }, |
| { |
| "epoch": 0.2463415575890961, |
| "grad_norm": 1.659408450126648, |
| "learning_rate": 0.00018357980359602044, |
| "loss": 0.5311, |
| "step": 6380 |
| }, |
| { |
| "epoch": 0.2467276728831229, |
| "grad_norm": 1.1839714050292969, |
| "learning_rate": 0.00018355406257641865, |
| "loss": 0.3905, |
| "step": 6390 |
| }, |
| { |
| "epoch": 0.24711378817714968, |
| "grad_norm": 0.49230822920799255, |
| "learning_rate": 0.00018352832155681687, |
| "loss": 0.4021, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.24749990347117648, |
| "grad_norm": 4.451594829559326, |
| "learning_rate": 0.0001835025805372151, |
| "loss": 0.4504, |
| "step": 6410 |
| }, |
| { |
| "epoch": 0.2478860187652033, |
| "grad_norm": 1.0058324337005615, |
| "learning_rate": 0.0001834768395176133, |
| "loss": 0.2636, |
| "step": 6420 |
| }, |
| { |
| "epoch": 0.2482721340592301, |
| "grad_norm": 2.7853894233703613, |
| "learning_rate": 0.0001834510984980115, |
| "loss": 0.47, |
| "step": 6430 |
| }, |
| { |
| "epoch": 0.2486582493532569, |
| "grad_norm": 2.730095148086548, |
| "learning_rate": 0.00018342535747840972, |
| "loss": 0.3941, |
| "step": 6440 |
| }, |
| { |
| "epoch": 0.2490443646472837, |
| "grad_norm": 2.4993178844451904, |
| "learning_rate": 0.00018339961645880793, |
| "loss": 0.5777, |
| "step": 6450 |
| }, |
| { |
| "epoch": 0.24943047994131048, |
| "grad_norm": 2.361525297164917, |
| "learning_rate": 0.00018337387543920617, |
| "loss": 0.3798, |
| "step": 6460 |
| }, |
| { |
| "epoch": 0.24981659523533728, |
| "grad_norm": 2.5558526515960693, |
| "learning_rate": 0.00018334813441960436, |
| "loss": 0.3113, |
| "step": 6470 |
| }, |
| { |
| "epoch": 0.25020271052936405, |
| "grad_norm": 0.8033503890037537, |
| "learning_rate": 0.0001833223934000026, |
| "loss": 0.5254, |
| "step": 6480 |
| }, |
| { |
| "epoch": 0.25058882582339087, |
| "grad_norm": 2.721090078353882, |
| "learning_rate": 0.0001832966523804008, |
| "loss": 0.393, |
| "step": 6490 |
| }, |
| { |
| "epoch": 0.25097494111741764, |
| "grad_norm": 1.7147916555404663, |
| "learning_rate": 0.000183270911360799, |
| "loss": 0.3225, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.25136105641144446, |
| "grad_norm": 2.388347864151001, |
| "learning_rate": 0.00018324517034119721, |
| "loss": 0.3519, |
| "step": 6510 |
| }, |
| { |
| "epoch": 0.25174717170547123, |
| "grad_norm": 2.470891237258911, |
| "learning_rate": 0.00018321942932159543, |
| "loss": 0.4384, |
| "step": 6520 |
| }, |
| { |
| "epoch": 0.25213328699949805, |
| "grad_norm": 1.4743351936340332, |
| "learning_rate": 0.00018319368830199367, |
| "loss": 0.2464, |
| "step": 6530 |
| }, |
| { |
| "epoch": 0.2525194022935248, |
| "grad_norm": 1.5889122486114502, |
| "learning_rate": 0.00018316794728239185, |
| "loss": 0.3149, |
| "step": 6540 |
| }, |
| { |
| "epoch": 0.25290551758755164, |
| "grad_norm": 4.900819778442383, |
| "learning_rate": 0.0001831422062627901, |
| "loss": 0.3978, |
| "step": 6550 |
| }, |
| { |
| "epoch": 0.25329163288157847, |
| "grad_norm": 5.22566556930542, |
| "learning_rate": 0.00018311646524318828, |
| "loss": 0.4473, |
| "step": 6560 |
| }, |
| { |
| "epoch": 0.25367774817560523, |
| "grad_norm": 4.7480363845825195, |
| "learning_rate": 0.0001830907242235865, |
| "loss": 0.3976, |
| "step": 6570 |
| }, |
| { |
| "epoch": 0.25406386346963206, |
| "grad_norm": 1.4711374044418335, |
| "learning_rate": 0.0001830649832039847, |
| "loss": 0.5183, |
| "step": 6580 |
| }, |
| { |
| "epoch": 0.2544499787636588, |
| "grad_norm": 2.237309217453003, |
| "learning_rate": 0.00018303924218438292, |
| "loss": 0.2171, |
| "step": 6590 |
| }, |
| { |
| "epoch": 0.25483609405768565, |
| "grad_norm": 4.107303619384766, |
| "learning_rate": 0.00018301350116478116, |
| "loss": 0.3918, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.2552222093517124, |
| "grad_norm": 4.7285003662109375, |
| "learning_rate": 0.00018298776014517935, |
| "loss": 0.2042, |
| "step": 6610 |
| }, |
| { |
| "epoch": 0.25560832464573924, |
| "grad_norm": 2.1333792209625244, |
| "learning_rate": 0.0001829620191255776, |
| "loss": 0.3502, |
| "step": 6620 |
| }, |
| { |
| "epoch": 0.255994439939766, |
| "grad_norm": 3.062173843383789, |
| "learning_rate": 0.00018293627810597577, |
| "loss": 0.3949, |
| "step": 6630 |
| }, |
| { |
| "epoch": 0.25638055523379283, |
| "grad_norm": 1.538854956626892, |
| "learning_rate": 0.00018291053708637401, |
| "loss": 0.4613, |
| "step": 6640 |
| }, |
| { |
| "epoch": 0.2567666705278196, |
| "grad_norm": 2.546586751937866, |
| "learning_rate": 0.00018288479606677223, |
| "loss": 0.5868, |
| "step": 6650 |
| }, |
| { |
| "epoch": 0.2571527858218464, |
| "grad_norm": 2.7282049655914307, |
| "learning_rate": 0.00018285905504717041, |
| "loss": 0.4186, |
| "step": 6660 |
| }, |
| { |
| "epoch": 0.2575389011158732, |
| "grad_norm": 3.204634189605713, |
| "learning_rate": 0.00018283331402756865, |
| "loss": 0.4072, |
| "step": 6670 |
| }, |
| { |
| "epoch": 0.2579250164099, |
| "grad_norm": 2.421846866607666, |
| "learning_rate": 0.00018280757300796684, |
| "loss": 0.306, |
| "step": 6680 |
| }, |
| { |
| "epoch": 0.2583111317039268, |
| "grad_norm": 4.243416786193848, |
| "learning_rate": 0.00018278183198836508, |
| "loss": 0.2631, |
| "step": 6690 |
| }, |
| { |
| "epoch": 0.2586972469979536, |
| "grad_norm": 1.0495362281799316, |
| "learning_rate": 0.00018275609096876327, |
| "loss": 0.3488, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.25908336229198037, |
| "grad_norm": 1.915279746055603, |
| "learning_rate": 0.0001827303499491615, |
| "loss": 0.2589, |
| "step": 6710 |
| }, |
| { |
| "epoch": 0.2594694775860072, |
| "grad_norm": 3.724299192428589, |
| "learning_rate": 0.00018270460892955972, |
| "loss": 0.5118, |
| "step": 6720 |
| }, |
| { |
| "epoch": 0.25985559288003396, |
| "grad_norm": 2.832204580307007, |
| "learning_rate": 0.0001826788679099579, |
| "loss": 0.2508, |
| "step": 6730 |
| }, |
| { |
| "epoch": 0.2602417081740608, |
| "grad_norm": 1.1942508220672607, |
| "learning_rate": 0.00018265312689035615, |
| "loss": 0.4328, |
| "step": 6740 |
| }, |
| { |
| "epoch": 0.26062782346808755, |
| "grad_norm": 1.0741711854934692, |
| "learning_rate": 0.00018262738587075433, |
| "loss": 0.3514, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.2610139387621144, |
| "grad_norm": 2.9918277263641357, |
| "learning_rate": 0.00018260164485115257, |
| "loss": 0.3528, |
| "step": 6760 |
| }, |
| { |
| "epoch": 0.26140005405614114, |
| "grad_norm": 1.3773655891418457, |
| "learning_rate": 0.0001825759038315508, |
| "loss": 0.365, |
| "step": 6770 |
| }, |
| { |
| "epoch": 0.26178616935016796, |
| "grad_norm": 3.5288615226745605, |
| "learning_rate": 0.000182550162811949, |
| "loss": 0.3645, |
| "step": 6780 |
| }, |
| { |
| "epoch": 0.26217228464419473, |
| "grad_norm": 1.2178785800933838, |
| "learning_rate": 0.00018252442179234721, |
| "loss": 0.3742, |
| "step": 6790 |
| }, |
| { |
| "epoch": 0.26255839993822155, |
| "grad_norm": 2.7981081008911133, |
| "learning_rate": 0.0001824986807727454, |
| "loss": 0.6174, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.2629445152322484, |
| "grad_norm": 1.6766215562820435, |
| "learning_rate": 0.00018247293975314364, |
| "loss": 0.3028, |
| "step": 6810 |
| }, |
| { |
| "epoch": 0.26333063052627514, |
| "grad_norm": 3.7797629833221436, |
| "learning_rate": 0.00018244719873354183, |
| "loss": 0.2633, |
| "step": 6820 |
| }, |
| { |
| "epoch": 0.26371674582030197, |
| "grad_norm": 7.794743537902832, |
| "learning_rate": 0.00018242145771394007, |
| "loss": 0.3586, |
| "step": 6830 |
| }, |
| { |
| "epoch": 0.26410286111432874, |
| "grad_norm": 0.5704814195632935, |
| "learning_rate": 0.00018239571669433828, |
| "loss": 0.3506, |
| "step": 6840 |
| }, |
| { |
| "epoch": 0.26448897640835556, |
| "grad_norm": 5.771059513092041, |
| "learning_rate": 0.0001823699756747365, |
| "loss": 0.3881, |
| "step": 6850 |
| }, |
| { |
| "epoch": 0.2648750917023823, |
| "grad_norm": 2.723592519760132, |
| "learning_rate": 0.0001823442346551347, |
| "loss": 0.3955, |
| "step": 6860 |
| }, |
| { |
| "epoch": 0.26526120699640915, |
| "grad_norm": 1.5448215007781982, |
| "learning_rate": 0.0001823184936355329, |
| "loss": 0.495, |
| "step": 6870 |
| }, |
| { |
| "epoch": 0.2656473222904359, |
| "grad_norm": 2.2980363368988037, |
| "learning_rate": 0.00018229275261593113, |
| "loss": 0.2695, |
| "step": 6880 |
| }, |
| { |
| "epoch": 0.26603343758446274, |
| "grad_norm": 1.959811806678772, |
| "learning_rate": 0.00018226701159632932, |
| "loss": 0.383, |
| "step": 6890 |
| }, |
| { |
| "epoch": 0.2664195528784895, |
| "grad_norm": 2.1491482257843018, |
| "learning_rate": 0.00018224127057672756, |
| "loss": 0.5655, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.26680566817251633, |
| "grad_norm": 6.472841262817383, |
| "learning_rate": 0.00018221552955712577, |
| "loss": 0.4757, |
| "step": 6910 |
| }, |
| { |
| "epoch": 0.2671917834665431, |
| "grad_norm": 7.878561496734619, |
| "learning_rate": 0.000182189788537524, |
| "loss": 0.3944, |
| "step": 6920 |
| }, |
| { |
| "epoch": 0.2675778987605699, |
| "grad_norm": 0.052701435983181, |
| "learning_rate": 0.0001821640475179222, |
| "loss": 0.382, |
| "step": 6930 |
| }, |
| { |
| "epoch": 0.2679640140545967, |
| "grad_norm": 2.294677972793579, |
| "learning_rate": 0.00018213830649832039, |
| "loss": 0.2932, |
| "step": 6940 |
| }, |
| { |
| "epoch": 0.2683501293486235, |
| "grad_norm": 1.6058757305145264, |
| "learning_rate": 0.00018211256547871863, |
| "loss": 0.4438, |
| "step": 6950 |
| }, |
| { |
| "epoch": 0.2687362446426503, |
| "grad_norm": 4.003495693206787, |
| "learning_rate": 0.00018208682445911684, |
| "loss": 0.5945, |
| "step": 6960 |
| }, |
| { |
| "epoch": 0.2691223599366771, |
| "grad_norm": 1.423017144203186, |
| "learning_rate": 0.00018206108343951505, |
| "loss": 0.4356, |
| "step": 6970 |
| }, |
| { |
| "epoch": 0.26950847523070387, |
| "grad_norm": 2.206341028213501, |
| "learning_rate": 0.00018203534241991327, |
| "loss": 0.344, |
| "step": 6980 |
| }, |
| { |
| "epoch": 0.2698945905247307, |
| "grad_norm": 0.6644784212112427, |
| "learning_rate": 0.00018200960140031148, |
| "loss": 0.4988, |
| "step": 6990 |
| }, |
| { |
| "epoch": 0.27028070581875746, |
| "grad_norm": 2.4569833278656006, |
| "learning_rate": 0.0001819838603807097, |
| "loss": 0.3689, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.2706668211127843, |
| "grad_norm": 1.554567575454712, |
| "learning_rate": 0.00018195811936110788, |
| "loss": 0.4684, |
| "step": 7010 |
| }, |
| { |
| "epoch": 0.27105293640681105, |
| "grad_norm": 3.2556328773498535, |
| "learning_rate": 0.00018193237834150612, |
| "loss": 0.611, |
| "step": 7020 |
| }, |
| { |
| "epoch": 0.2714390517008379, |
| "grad_norm": 2.9123427867889404, |
| "learning_rate": 0.00018190663732190433, |
| "loss": 0.4278, |
| "step": 7030 |
| }, |
| { |
| "epoch": 0.27182516699486464, |
| "grad_norm": 2.159273862838745, |
| "learning_rate": 0.00018188089630230255, |
| "loss": 0.2384, |
| "step": 7040 |
| }, |
| { |
| "epoch": 0.27221128228889147, |
| "grad_norm": 3.4977822303771973, |
| "learning_rate": 0.00018185515528270076, |
| "loss": 0.5459, |
| "step": 7050 |
| }, |
| { |
| "epoch": 0.27259739758291823, |
| "grad_norm": 1.1822031736373901, |
| "learning_rate": 0.00018182941426309897, |
| "loss": 0.4364, |
| "step": 7060 |
| }, |
| { |
| "epoch": 0.27298351287694506, |
| "grad_norm": 2.4467339515686035, |
| "learning_rate": 0.00018180367324349719, |
| "loss": 0.5198, |
| "step": 7070 |
| }, |
| { |
| "epoch": 0.2733696281709719, |
| "grad_norm": 1.0406467914581299, |
| "learning_rate": 0.0001817779322238954, |
| "loss": 0.2797, |
| "step": 7080 |
| }, |
| { |
| "epoch": 0.27375574346499865, |
| "grad_norm": 1.925830602645874, |
| "learning_rate": 0.0001817521912042936, |
| "loss": 0.4898, |
| "step": 7090 |
| }, |
| { |
| "epoch": 0.27414185875902547, |
| "grad_norm": 3.0385682582855225, |
| "learning_rate": 0.00018172645018469183, |
| "loss": 0.3867, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.27452797405305224, |
| "grad_norm": 1.5285695791244507, |
| "learning_rate": 0.00018170070916509004, |
| "loss": 0.4233, |
| "step": 7110 |
| }, |
| { |
| "epoch": 0.27491408934707906, |
| "grad_norm": 1.266693115234375, |
| "learning_rate": 0.00018167496814548825, |
| "loss": 0.4724, |
| "step": 7120 |
| }, |
| { |
| "epoch": 0.27530020464110583, |
| "grad_norm": 3.371323585510254, |
| "learning_rate": 0.00018164922712588647, |
| "loss": 0.533, |
| "step": 7130 |
| }, |
| { |
| "epoch": 0.27568631993513265, |
| "grad_norm": 2.662691116333008, |
| "learning_rate": 0.00018162348610628468, |
| "loss": 0.3134, |
| "step": 7140 |
| }, |
| { |
| "epoch": 0.2760724352291594, |
| "grad_norm": 1.8977057933807373, |
| "learning_rate": 0.0001815977450866829, |
| "loss": 0.3038, |
| "step": 7150 |
| }, |
| { |
| "epoch": 0.27645855052318624, |
| "grad_norm": 3.1027894020080566, |
| "learning_rate": 0.0001815720040670811, |
| "loss": 0.5074, |
| "step": 7160 |
| }, |
| { |
| "epoch": 0.276844665817213, |
| "grad_norm": 1.2112785577774048, |
| "learning_rate": 0.00018154626304747932, |
| "loss": 0.324, |
| "step": 7170 |
| }, |
| { |
| "epoch": 0.27723078111123983, |
| "grad_norm": 1.6500996351242065, |
| "learning_rate": 0.00018152052202787753, |
| "loss": 0.2856, |
| "step": 7180 |
| }, |
| { |
| "epoch": 0.2776168964052666, |
| "grad_norm": 3.215747833251953, |
| "learning_rate": 0.00018149478100827575, |
| "loss": 0.4522, |
| "step": 7190 |
| }, |
| { |
| "epoch": 0.2780030116992934, |
| "grad_norm": 4.8541059494018555, |
| "learning_rate": 0.00018146903998867396, |
| "loss": 0.4106, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.2783891269933202, |
| "grad_norm": 2.3697152137756348, |
| "learning_rate": 0.00018144329896907217, |
| "loss": 0.2673, |
| "step": 7210 |
| }, |
| { |
| "epoch": 0.278775242287347, |
| "grad_norm": 2.9693639278411865, |
| "learning_rate": 0.00018141755794947039, |
| "loss": 0.3949, |
| "step": 7220 |
| }, |
| { |
| "epoch": 0.2791613575813738, |
| "grad_norm": 2.691817283630371, |
| "learning_rate": 0.0001813918169298686, |
| "loss": 0.3427, |
| "step": 7230 |
| }, |
| { |
| "epoch": 0.2795474728754006, |
| "grad_norm": 5.197331428527832, |
| "learning_rate": 0.0001813660759102668, |
| "loss": 0.4331, |
| "step": 7240 |
| }, |
| { |
| "epoch": 0.27993358816942737, |
| "grad_norm": 1.5799933671951294, |
| "learning_rate": 0.00018134033489066503, |
| "loss": 0.3543, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.2803197034634542, |
| "grad_norm": 1.3614271879196167, |
| "learning_rate": 0.00018131459387106324, |
| "loss": 0.5289, |
| "step": 7260 |
| }, |
| { |
| "epoch": 0.28070581875748096, |
| "grad_norm": 2.2942802906036377, |
| "learning_rate": 0.00018128885285146145, |
| "loss": 0.4318, |
| "step": 7270 |
| }, |
| { |
| "epoch": 0.2810919340515078, |
| "grad_norm": 1.1805604696273804, |
| "learning_rate": 0.00018126311183185967, |
| "loss": 0.4754, |
| "step": 7280 |
| }, |
| { |
| "epoch": 0.28147804934553455, |
| "grad_norm": 0.5108867883682251, |
| "learning_rate": 0.00018123737081225788, |
| "loss": 0.4517, |
| "step": 7290 |
| }, |
| { |
| "epoch": 0.2818641646395614, |
| "grad_norm": 1.1736596822738647, |
| "learning_rate": 0.0001812116297926561, |
| "loss": 0.4538, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.28225027993358814, |
| "grad_norm": 5.497414588928223, |
| "learning_rate": 0.0001811858887730543, |
| "loss": 0.5116, |
| "step": 7310 |
| }, |
| { |
| "epoch": 0.28263639522761497, |
| "grad_norm": 1.1347368955612183, |
| "learning_rate": 0.00018116014775345252, |
| "loss": 0.3848, |
| "step": 7320 |
| }, |
| { |
| "epoch": 0.28302251052164173, |
| "grad_norm": 2.740715742111206, |
| "learning_rate": 0.00018113440673385073, |
| "loss": 0.3456, |
| "step": 7330 |
| }, |
| { |
| "epoch": 0.28340862581566856, |
| "grad_norm": 1.3853389024734497, |
| "learning_rate": 0.00018110866571424897, |
| "loss": 0.3398, |
| "step": 7340 |
| }, |
| { |
| "epoch": 0.2837947411096954, |
| "grad_norm": 7.493706703186035, |
| "learning_rate": 0.00018108292469464716, |
| "loss": 0.2726, |
| "step": 7350 |
| }, |
| { |
| "epoch": 0.28418085640372215, |
| "grad_norm": 1.81704843044281, |
| "learning_rate": 0.00018105718367504537, |
| "loss": 0.3818, |
| "step": 7360 |
| }, |
| { |
| "epoch": 0.28456697169774897, |
| "grad_norm": 2.4877755641937256, |
| "learning_rate": 0.00018103144265544359, |
| "loss": 0.3499, |
| "step": 7370 |
| }, |
| { |
| "epoch": 0.28495308699177574, |
| "grad_norm": 1.3704471588134766, |
| "learning_rate": 0.0001810057016358418, |
| "loss": 0.2346, |
| "step": 7380 |
| }, |
| { |
| "epoch": 0.28533920228580256, |
| "grad_norm": 2.664745569229126, |
| "learning_rate": 0.00018097996061624, |
| "loss": 0.4041, |
| "step": 7390 |
| }, |
| { |
| "epoch": 0.28572531757982933, |
| "grad_norm": 3.6539089679718018, |
| "learning_rate": 0.00018095421959663823, |
| "loss": 0.2885, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.28611143287385615, |
| "grad_norm": 0.8653857707977295, |
| "learning_rate": 0.00018092847857703647, |
| "loss": 0.3849, |
| "step": 7410 |
| }, |
| { |
| "epoch": 0.2864975481678829, |
| "grad_norm": 2.6319446563720703, |
| "learning_rate": 0.00018090273755743465, |
| "loss": 0.2728, |
| "step": 7420 |
| }, |
| { |
| "epoch": 0.28688366346190974, |
| "grad_norm": 2.3457818031311035, |
| "learning_rate": 0.00018087699653783287, |
| "loss": 0.446, |
| "step": 7430 |
| }, |
| { |
| "epoch": 0.2872697787559365, |
| "grad_norm": 0.8546158671379089, |
| "learning_rate": 0.00018085125551823108, |
| "loss": 0.2898, |
| "step": 7440 |
| }, |
| { |
| "epoch": 0.28765589404996333, |
| "grad_norm": 0.45937278866767883, |
| "learning_rate": 0.0001808255144986293, |
| "loss": 0.583, |
| "step": 7450 |
| }, |
| { |
| "epoch": 0.2880420093439901, |
| "grad_norm": 1.7129520177841187, |
| "learning_rate": 0.00018079977347902753, |
| "loss": 0.4908, |
| "step": 7460 |
| }, |
| { |
| "epoch": 0.2884281246380169, |
| "grad_norm": 4.106715679168701, |
| "learning_rate": 0.00018077403245942572, |
| "loss": 0.3373, |
| "step": 7470 |
| }, |
| { |
| "epoch": 0.2888142399320437, |
| "grad_norm": 3.8112800121307373, |
| "learning_rate": 0.00018074829143982396, |
| "loss": 0.392, |
| "step": 7480 |
| }, |
| { |
| "epoch": 0.2892003552260705, |
| "grad_norm": 0.5382593274116516, |
| "learning_rate": 0.00018072255042022215, |
| "loss": 0.2929, |
| "step": 7490 |
| }, |
| { |
| "epoch": 0.2895864705200973, |
| "grad_norm": 2.50888991355896, |
| "learning_rate": 0.00018069680940062036, |
| "loss": 0.3361, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.2899725858141241, |
| "grad_norm": 3.3544275760650635, |
| "learning_rate": 0.00018067106838101857, |
| "loss": 0.388, |
| "step": 7510 |
| }, |
| { |
| "epoch": 0.2903587011081509, |
| "grad_norm": 1.192386507987976, |
| "learning_rate": 0.00018064532736141679, |
| "loss": 0.4427, |
| "step": 7520 |
| }, |
| { |
| "epoch": 0.2907448164021777, |
| "grad_norm": 1.5527079105377197, |
| "learning_rate": 0.00018061958634181503, |
| "loss": 0.4023, |
| "step": 7530 |
| }, |
| { |
| "epoch": 0.29113093169620446, |
| "grad_norm": 0.67446368932724, |
| "learning_rate": 0.0001805938453222132, |
| "loss": 0.4949, |
| "step": 7540 |
| }, |
| { |
| "epoch": 0.2915170469902313, |
| "grad_norm": 1.6349838972091675, |
| "learning_rate": 0.00018056810430261145, |
| "loss": 0.3811, |
| "step": 7550 |
| }, |
| { |
| "epoch": 0.29190316228425806, |
| "grad_norm": 1.4848904609680176, |
| "learning_rate": 0.00018054236328300964, |
| "loss": 0.3851, |
| "step": 7560 |
| }, |
| { |
| "epoch": 0.2922892775782849, |
| "grad_norm": 0.9933151006698608, |
| "learning_rate": 0.00018051662226340785, |
| "loss": 0.4699, |
| "step": 7570 |
| }, |
| { |
| "epoch": 0.29267539287231165, |
| "grad_norm": 1.1026233434677124, |
| "learning_rate": 0.00018049088124380607, |
| "loss": 0.3287, |
| "step": 7580 |
| }, |
| { |
| "epoch": 0.29306150816633847, |
| "grad_norm": 1.232954740524292, |
| "learning_rate": 0.00018046514022420428, |
| "loss": 0.3722, |
| "step": 7590 |
| }, |
| { |
| "epoch": 0.2934476234603653, |
| "grad_norm": 3.8303146362304688, |
| "learning_rate": 0.00018043939920460252, |
| "loss": 0.2985, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.29383373875439206, |
| "grad_norm": 1.9358845949172974, |
| "learning_rate": 0.0001804136581850007, |
| "loss": 0.4361, |
| "step": 7610 |
| }, |
| { |
| "epoch": 0.2942198540484189, |
| "grad_norm": 1.8905962705612183, |
| "learning_rate": 0.00018038791716539895, |
| "loss": 0.2835, |
| "step": 7620 |
| }, |
| { |
| "epoch": 0.29460596934244565, |
| "grad_norm": 1.9965651035308838, |
| "learning_rate": 0.00018036217614579713, |
| "loss": 0.5387, |
| "step": 7630 |
| }, |
| { |
| "epoch": 0.2949920846364725, |
| "grad_norm": 4.204270839691162, |
| "learning_rate": 0.00018033643512619535, |
| "loss": 0.3498, |
| "step": 7640 |
| }, |
| { |
| "epoch": 0.29537819993049924, |
| "grad_norm": 1.4732340574264526, |
| "learning_rate": 0.00018031069410659359, |
| "loss": 0.315, |
| "step": 7650 |
| }, |
| { |
| "epoch": 0.29576431522452606, |
| "grad_norm": 1.0233594179153442, |
| "learning_rate": 0.00018028495308699177, |
| "loss": 0.1536, |
| "step": 7660 |
| }, |
| { |
| "epoch": 0.29615043051855283, |
| "grad_norm": 3.1531457901000977, |
| "learning_rate": 0.00018025921206739, |
| "loss": 0.3793, |
| "step": 7670 |
| }, |
| { |
| "epoch": 0.29653654581257965, |
| "grad_norm": 0.8080945014953613, |
| "learning_rate": 0.0001802334710477882, |
| "loss": 0.5589, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.2969226611066064, |
| "grad_norm": 3.1202728748321533, |
| "learning_rate": 0.00018020773002818644, |
| "loss": 0.4652, |
| "step": 7690 |
| }, |
| { |
| "epoch": 0.29730877640063325, |
| "grad_norm": 2.5934784412384033, |
| "learning_rate": 0.00018018198900858463, |
| "loss": 0.4921, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.29769489169466, |
| "grad_norm": 2.858642101287842, |
| "learning_rate": 0.00018015624798898284, |
| "loss": 0.2732, |
| "step": 7710 |
| }, |
| { |
| "epoch": 0.29808100698868684, |
| "grad_norm": 3.621229887008667, |
| "learning_rate": 0.00018013050696938108, |
| "loss": 0.5639, |
| "step": 7720 |
| }, |
| { |
| "epoch": 0.2984671222827136, |
| "grad_norm": 3.7943220138549805, |
| "learning_rate": 0.00018010476594977926, |
| "loss": 0.3177, |
| "step": 7730 |
| }, |
| { |
| "epoch": 0.2988532375767404, |
| "grad_norm": 1.6371623277664185, |
| "learning_rate": 0.0001800790249301775, |
| "loss": 0.4211, |
| "step": 7740 |
| }, |
| { |
| "epoch": 0.2992393528707672, |
| "grad_norm": 1.9557713270187378, |
| "learning_rate": 0.0001800532839105757, |
| "loss": 0.4351, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.299625468164794, |
| "grad_norm": 2.684964895248413, |
| "learning_rate": 0.00018002754289097393, |
| "loss": 0.39, |
| "step": 7760 |
| }, |
| { |
| "epoch": 0.3000115834588208, |
| "grad_norm": 1.7401316165924072, |
| "learning_rate": 0.00018000180187137215, |
| "loss": 0.2844, |
| "step": 7770 |
| }, |
| { |
| "epoch": 0.3003976987528476, |
| "grad_norm": 0.6305844187736511, |
| "learning_rate": 0.00017997606085177033, |
| "loss": 0.2472, |
| "step": 7780 |
| }, |
| { |
| "epoch": 0.3007838140468744, |
| "grad_norm": 2.2880289554595947, |
| "learning_rate": 0.00017995031983216857, |
| "loss": 0.3952, |
| "step": 7790 |
| }, |
| { |
| "epoch": 0.3011699293409012, |
| "grad_norm": 3.423980951309204, |
| "learning_rate": 0.00017992457881256676, |
| "loss": 0.4459, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.30155604463492797, |
| "grad_norm": 0.6920475363731384, |
| "learning_rate": 0.000179898837792965, |
| "loss": 0.2909, |
| "step": 7810 |
| }, |
| { |
| "epoch": 0.3019421599289548, |
| "grad_norm": 0.8905349373817444, |
| "learning_rate": 0.00017987309677336318, |
| "loss": 0.346, |
| "step": 7820 |
| }, |
| { |
| "epoch": 0.30232827522298156, |
| "grad_norm": 1.8836702108383179, |
| "learning_rate": 0.00017984735575376143, |
| "loss": 0.4038, |
| "step": 7830 |
| }, |
| { |
| "epoch": 0.3027143905170084, |
| "grad_norm": 2.6712753772735596, |
| "learning_rate": 0.00017982161473415964, |
| "loss": 0.3452, |
| "step": 7840 |
| }, |
| { |
| "epoch": 0.30310050581103515, |
| "grad_norm": 2.344122886657715, |
| "learning_rate": 0.00017979587371455785, |
| "loss": 0.5091, |
| "step": 7850 |
| }, |
| { |
| "epoch": 0.30348662110506197, |
| "grad_norm": 3.734415054321289, |
| "learning_rate": 0.00017977013269495607, |
| "loss": 0.3893, |
| "step": 7860 |
| }, |
| { |
| "epoch": 0.3038727363990888, |
| "grad_norm": 1.70572829246521, |
| "learning_rate": 0.00017974439167535425, |
| "loss": 0.4829, |
| "step": 7870 |
| }, |
| { |
| "epoch": 0.30425885169311556, |
| "grad_norm": 1.779189109802246, |
| "learning_rate": 0.0001797186506557525, |
| "loss": 0.5361, |
| "step": 7880 |
| }, |
| { |
| "epoch": 0.3046449669871424, |
| "grad_norm": 2.888803482055664, |
| "learning_rate": 0.00017969290963615068, |
| "loss": 0.4305, |
| "step": 7890 |
| }, |
| { |
| "epoch": 0.30503108228116915, |
| "grad_norm": 1.2247655391693115, |
| "learning_rate": 0.00017966716861654892, |
| "loss": 0.3817, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.305417197575196, |
| "grad_norm": 2.995152473449707, |
| "learning_rate": 0.00017964142759694713, |
| "loss": 0.4669, |
| "step": 7910 |
| }, |
| { |
| "epoch": 0.30580331286922274, |
| "grad_norm": 8.049060821533203, |
| "learning_rate": 0.00017961568657734535, |
| "loss": 0.6706, |
| "step": 7920 |
| }, |
| { |
| "epoch": 0.30618942816324957, |
| "grad_norm": 2.1181435585021973, |
| "learning_rate": 0.00017958994555774356, |
| "loss": 0.4353, |
| "step": 7930 |
| }, |
| { |
| "epoch": 0.30657554345727633, |
| "grad_norm": 8.394509315490723, |
| "learning_rate": 0.00017956420453814174, |
| "loss": 0.3497, |
| "step": 7940 |
| }, |
| { |
| "epoch": 0.30696165875130316, |
| "grad_norm": 2.5140750408172607, |
| "learning_rate": 0.00017953846351853998, |
| "loss": 0.5774, |
| "step": 7950 |
| }, |
| { |
| "epoch": 0.3073477740453299, |
| "grad_norm": 2.720942974090576, |
| "learning_rate": 0.0001795127224989382, |
| "loss": 0.4457, |
| "step": 7960 |
| }, |
| { |
| "epoch": 0.30773388933935675, |
| "grad_norm": 1.8155667781829834, |
| "learning_rate": 0.0001794869814793364, |
| "loss": 0.4155, |
| "step": 7970 |
| }, |
| { |
| "epoch": 0.3081200046333835, |
| "grad_norm": 1.9989752769470215, |
| "learning_rate": 0.00017946124045973462, |
| "loss": 0.3233, |
| "step": 7980 |
| }, |
| { |
| "epoch": 0.30850611992741034, |
| "grad_norm": 0.7483557462692261, |
| "learning_rate": 0.00017943549944013284, |
| "loss": 0.2932, |
| "step": 7990 |
| }, |
| { |
| "epoch": 0.3088922352214371, |
| "grad_norm": 0.5750642418861389, |
| "learning_rate": 0.00017940975842053105, |
| "loss": 0.401, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.30927835051546393, |
| "grad_norm": 1.2084500789642334, |
| "learning_rate": 0.00017938401740092924, |
| "loss": 0.3705, |
| "step": 8010 |
| }, |
| { |
| "epoch": 0.3096644658094907, |
| "grad_norm": 1.833434820175171, |
| "learning_rate": 0.00017935827638132748, |
| "loss": 0.3507, |
| "step": 8020 |
| }, |
| { |
| "epoch": 0.3100505811035175, |
| "grad_norm": 3.147508382797241, |
| "learning_rate": 0.0001793325353617257, |
| "loss": 0.3255, |
| "step": 8030 |
| }, |
| { |
| "epoch": 0.3104366963975443, |
| "grad_norm": 2.150932788848877, |
| "learning_rate": 0.0001793067943421239, |
| "loss": 0.3401, |
| "step": 8040 |
| }, |
| { |
| "epoch": 0.3108228116915711, |
| "grad_norm": 3.3340635299682617, |
| "learning_rate": 0.00017928105332252212, |
| "loss": 0.3606, |
| "step": 8050 |
| }, |
| { |
| "epoch": 0.3112089269855979, |
| "grad_norm": 5.173205375671387, |
| "learning_rate": 0.00017925531230292033, |
| "loss": 0.1695, |
| "step": 8060 |
| }, |
| { |
| "epoch": 0.3115950422796247, |
| "grad_norm": 1.0863877534866333, |
| "learning_rate": 0.00017922957128331854, |
| "loss": 0.3038, |
| "step": 8070 |
| }, |
| { |
| "epoch": 0.31198115757365147, |
| "grad_norm": 1.5977118015289307, |
| "learning_rate": 0.00017920383026371676, |
| "loss": 0.2291, |
| "step": 8080 |
| }, |
| { |
| "epoch": 0.3123672728676783, |
| "grad_norm": 4.040243625640869, |
| "learning_rate": 0.00017917808924411497, |
| "loss": 0.8538, |
| "step": 8090 |
| }, |
| { |
| "epoch": 0.31275338816170506, |
| "grad_norm": 1.5926854610443115, |
| "learning_rate": 0.00017915234822451318, |
| "loss": 0.4733, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.3131395034557319, |
| "grad_norm": 1.0959421396255493, |
| "learning_rate": 0.0001791266072049114, |
| "loss": 0.6076, |
| "step": 8110 |
| }, |
| { |
| "epoch": 0.3135256187497587, |
| "grad_norm": 2.786085367202759, |
| "learning_rate": 0.0001791008661853096, |
| "loss": 0.3229, |
| "step": 8120 |
| }, |
| { |
| "epoch": 0.3139117340437855, |
| "grad_norm": 2.2573914527893066, |
| "learning_rate": 0.00017907512516570782, |
| "loss": 0.3676, |
| "step": 8130 |
| }, |
| { |
| "epoch": 0.3142978493378123, |
| "grad_norm": 2.271852493286133, |
| "learning_rate": 0.00017904938414610604, |
| "loss": 0.6275, |
| "step": 8140 |
| }, |
| { |
| "epoch": 0.31468396463183906, |
| "grad_norm": 1.9762821197509766, |
| "learning_rate": 0.00017902364312650425, |
| "loss": 0.2232, |
| "step": 8150 |
| }, |
| { |
| "epoch": 0.3150700799258659, |
| "grad_norm": 2.9960873126983643, |
| "learning_rate": 0.00017899790210690246, |
| "loss": 0.4739, |
| "step": 8160 |
| }, |
| { |
| "epoch": 0.31545619521989265, |
| "grad_norm": 1.142216682434082, |
| "learning_rate": 0.00017897216108730068, |
| "loss": 0.5983, |
| "step": 8170 |
| }, |
| { |
| "epoch": 0.3158423105139195, |
| "grad_norm": 1.7127768993377686, |
| "learning_rate": 0.0001789464200676989, |
| "loss": 0.4131, |
| "step": 8180 |
| }, |
| { |
| "epoch": 0.31622842580794625, |
| "grad_norm": 1.579793095588684, |
| "learning_rate": 0.0001789206790480971, |
| "loss": 0.3119, |
| "step": 8190 |
| }, |
| { |
| "epoch": 0.31661454110197307, |
| "grad_norm": 0.9647886157035828, |
| "learning_rate": 0.00017889493802849532, |
| "loss": 0.398, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.31700065639599984, |
| "grad_norm": 3.435312032699585, |
| "learning_rate": 0.00017886919700889353, |
| "loss": 0.405, |
| "step": 8210 |
| }, |
| { |
| "epoch": 0.31738677169002666, |
| "grad_norm": 2.1500205993652344, |
| "learning_rate": 0.00017884345598929174, |
| "loss": 0.2519, |
| "step": 8220 |
| }, |
| { |
| "epoch": 0.3177728869840534, |
| "grad_norm": 1.3107216358184814, |
| "learning_rate": 0.00017881771496968996, |
| "loss": 0.2846, |
| "step": 8230 |
| }, |
| { |
| "epoch": 0.31815900227808025, |
| "grad_norm": 0.1899029165506363, |
| "learning_rate": 0.00017879197395008817, |
| "loss": 0.4597, |
| "step": 8240 |
| }, |
| { |
| "epoch": 0.318545117572107, |
| "grad_norm": 2.329299211502075, |
| "learning_rate": 0.00017876623293048638, |
| "loss": 0.6523, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.31893123286613384, |
| "grad_norm": 0.41523978114128113, |
| "learning_rate": 0.0001787404919108846, |
| "loss": 0.3266, |
| "step": 8260 |
| }, |
| { |
| "epoch": 0.3193173481601606, |
| "grad_norm": 0.7914639711380005, |
| "learning_rate": 0.0001787147508912828, |
| "loss": 0.4029, |
| "step": 8270 |
| }, |
| { |
| "epoch": 0.31970346345418743, |
| "grad_norm": 0.6159287691116333, |
| "learning_rate": 0.00017868900987168102, |
| "loss": 0.4426, |
| "step": 8280 |
| }, |
| { |
| "epoch": 0.3200895787482142, |
| "grad_norm": 1.3690640926361084, |
| "learning_rate": 0.00017866326885207924, |
| "loss": 0.2974, |
| "step": 8290 |
| }, |
| { |
| "epoch": 0.320475694042241, |
| "grad_norm": 0.8592869639396667, |
| "learning_rate": 0.00017863752783247745, |
| "loss": 0.232, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.3208618093362678, |
| "grad_norm": 0.43169018626213074, |
| "learning_rate": 0.00017861178681287566, |
| "loss": 0.4033, |
| "step": 8310 |
| }, |
| { |
| "epoch": 0.3212479246302946, |
| "grad_norm": 0.8405828475952148, |
| "learning_rate": 0.00017858604579327388, |
| "loss": 0.3339, |
| "step": 8320 |
| }, |
| { |
| "epoch": 0.3216340399243214, |
| "grad_norm": 2.3412604331970215, |
| "learning_rate": 0.0001785603047736721, |
| "loss": 0.2781, |
| "step": 8330 |
| }, |
| { |
| "epoch": 0.3220201552183482, |
| "grad_norm": 2.412045478820801, |
| "learning_rate": 0.0001785345637540703, |
| "loss": 0.4346, |
| "step": 8340 |
| }, |
| { |
| "epoch": 0.32240627051237497, |
| "grad_norm": 3.626305341720581, |
| "learning_rate": 0.00017850882273446852, |
| "loss": 0.327, |
| "step": 8350 |
| }, |
| { |
| "epoch": 0.3227923858064018, |
| "grad_norm": 0.5645825266838074, |
| "learning_rate": 0.00017848308171486673, |
| "loss": 0.234, |
| "step": 8360 |
| }, |
| { |
| "epoch": 0.32317850110042856, |
| "grad_norm": 4.27307653427124, |
| "learning_rate": 0.00017845734069526494, |
| "loss": 0.5493, |
| "step": 8370 |
| }, |
| { |
| "epoch": 0.3235646163944554, |
| "grad_norm": 0.4511154890060425, |
| "learning_rate": 0.00017843159967566316, |
| "loss": 0.3501, |
| "step": 8380 |
| }, |
| { |
| "epoch": 0.3239507316884822, |
| "grad_norm": 0.314996600151062, |
| "learning_rate": 0.00017840585865606137, |
| "loss": 0.3544, |
| "step": 8390 |
| }, |
| { |
| "epoch": 0.324336846982509, |
| "grad_norm": 1.6546530723571777, |
| "learning_rate": 0.00017838011763645958, |
| "loss": 0.2455, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.3247229622765358, |
| "grad_norm": 3.2812252044677734, |
| "learning_rate": 0.0001783543766168578, |
| "loss": 0.3333, |
| "step": 8410 |
| }, |
| { |
| "epoch": 0.32510907757056257, |
| "grad_norm": 3.5717616081237793, |
| "learning_rate": 0.000178328635597256, |
| "loss": 0.4679, |
| "step": 8420 |
| }, |
| { |
| "epoch": 0.3254951928645894, |
| "grad_norm": 1.12017023563385, |
| "learning_rate": 0.00017830289457765422, |
| "loss": 0.3481, |
| "step": 8430 |
| }, |
| { |
| "epoch": 0.32588130815861616, |
| "grad_norm": 1.869462490081787, |
| "learning_rate": 0.00017827715355805244, |
| "loss": 0.4566, |
| "step": 8440 |
| }, |
| { |
| "epoch": 0.326267423452643, |
| "grad_norm": 1.4613149166107178, |
| "learning_rate": 0.00017825141253845065, |
| "loss": 0.5456, |
| "step": 8450 |
| }, |
| { |
| "epoch": 0.32665353874666975, |
| "grad_norm": 0.6842670440673828, |
| "learning_rate": 0.0001782256715188489, |
| "loss": 0.2776, |
| "step": 8460 |
| }, |
| { |
| "epoch": 0.32703965404069657, |
| "grad_norm": 2.9485504627227783, |
| "learning_rate": 0.00017819993049924708, |
| "loss": 0.3204, |
| "step": 8470 |
| }, |
| { |
| "epoch": 0.32742576933472334, |
| "grad_norm": 3.2084853649139404, |
| "learning_rate": 0.0001781741894796453, |
| "loss": 0.3391, |
| "step": 8480 |
| }, |
| { |
| "epoch": 0.32781188462875016, |
| "grad_norm": 1.9732774496078491, |
| "learning_rate": 0.0001781484484600435, |
| "loss": 0.3283, |
| "step": 8490 |
| }, |
| { |
| "epoch": 0.32819799992277693, |
| "grad_norm": 0.6378610134124756, |
| "learning_rate": 0.00017812270744044172, |
| "loss": 0.4519, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.32858411521680375, |
| "grad_norm": 4.108947277069092, |
| "learning_rate": 0.00017809696642083993, |
| "loss": 0.4933, |
| "step": 8510 |
| }, |
| { |
| "epoch": 0.3289702305108305, |
| "grad_norm": 2.7623212337493896, |
| "learning_rate": 0.00017807122540123814, |
| "loss": 0.4197, |
| "step": 8520 |
| }, |
| { |
| "epoch": 0.32935634580485734, |
| "grad_norm": 1.8904645442962646, |
| "learning_rate": 0.00017804548438163638, |
| "loss": 0.325, |
| "step": 8530 |
| }, |
| { |
| "epoch": 0.3297424610988841, |
| "grad_norm": 0.5131659507751465, |
| "learning_rate": 0.00017801974336203457, |
| "loss": 0.2403, |
| "step": 8540 |
| }, |
| { |
| "epoch": 0.33012857639291093, |
| "grad_norm": 2.965916633605957, |
| "learning_rate": 0.0001779940023424328, |
| "loss": 0.4597, |
| "step": 8550 |
| }, |
| { |
| "epoch": 0.3305146916869377, |
| "grad_norm": 1.5409698486328125, |
| "learning_rate": 0.000177968261322831, |
| "loss": 0.4594, |
| "step": 8560 |
| }, |
| { |
| "epoch": 0.3309008069809645, |
| "grad_norm": 1.1746805906295776, |
| "learning_rate": 0.0001779425203032292, |
| "loss": 0.4581, |
| "step": 8570 |
| }, |
| { |
| "epoch": 0.3312869222749913, |
| "grad_norm": 4.493356227874756, |
| "learning_rate": 0.00017791677928362745, |
| "loss": 0.4699, |
| "step": 8580 |
| }, |
| { |
| "epoch": 0.3316730375690181, |
| "grad_norm": 3.506526470184326, |
| "learning_rate": 0.00017789103826402564, |
| "loss": 0.3974, |
| "step": 8590 |
| }, |
| { |
| "epoch": 0.3320591528630449, |
| "grad_norm": 2.3893234729766846, |
| "learning_rate": 0.00017786529724442388, |
| "loss": 0.2823, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.3324452681570717, |
| "grad_norm": 1.6228163242340088, |
| "learning_rate": 0.00017783955622482206, |
| "loss": 0.4199, |
| "step": 8610 |
| }, |
| { |
| "epoch": 0.3328313834510985, |
| "grad_norm": 3.2869131565093994, |
| "learning_rate": 0.0001778138152052203, |
| "loss": 0.3173, |
| "step": 8620 |
| }, |
| { |
| "epoch": 0.3332174987451253, |
| "grad_norm": 5.547116279602051, |
| "learning_rate": 0.0001777880741856185, |
| "loss": 0.4584, |
| "step": 8630 |
| }, |
| { |
| "epoch": 0.33360361403915206, |
| "grad_norm": 1.3338594436645508, |
| "learning_rate": 0.0001777623331660167, |
| "loss": 0.4235, |
| "step": 8640 |
| }, |
| { |
| "epoch": 0.3339897293331789, |
| "grad_norm": 1.9165093898773193, |
| "learning_rate": 0.00017773659214641494, |
| "loss": 0.2989, |
| "step": 8650 |
| }, |
| { |
| "epoch": 0.3343758446272057, |
| "grad_norm": 1.968935251235962, |
| "learning_rate": 0.00017771085112681313, |
| "loss": 0.4194, |
| "step": 8660 |
| }, |
| { |
| "epoch": 0.3347619599212325, |
| "grad_norm": 9.66997241973877, |
| "learning_rate": 0.00017768511010721137, |
| "loss": 0.5818, |
| "step": 8670 |
| }, |
| { |
| "epoch": 0.3351480752152593, |
| "grad_norm": 2.3636281490325928, |
| "learning_rate": 0.00017765936908760956, |
| "loss": 0.3317, |
| "step": 8680 |
| }, |
| { |
| "epoch": 0.33553419050928607, |
| "grad_norm": 3.3569977283477783, |
| "learning_rate": 0.0001776336280680078, |
| "loss": 0.4388, |
| "step": 8690 |
| }, |
| { |
| "epoch": 0.3359203058033129, |
| "grad_norm": 1.2452306747436523, |
| "learning_rate": 0.00017760788704840598, |
| "loss": 0.1368, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.33630642109733966, |
| "grad_norm": 0.0380173958837986, |
| "learning_rate": 0.0001775821460288042, |
| "loss": 0.3264, |
| "step": 8710 |
| }, |
| { |
| "epoch": 0.3366925363913665, |
| "grad_norm": 1.5271002054214478, |
| "learning_rate": 0.00017755640500920244, |
| "loss": 0.2943, |
| "step": 8720 |
| }, |
| { |
| "epoch": 0.33707865168539325, |
| "grad_norm": 0.9701687693595886, |
| "learning_rate": 0.00017753066398960062, |
| "loss": 0.353, |
| "step": 8730 |
| }, |
| { |
| "epoch": 0.33746476697942007, |
| "grad_norm": 1.9296154975891113, |
| "learning_rate": 0.00017750492296999886, |
| "loss": 0.3776, |
| "step": 8740 |
| }, |
| { |
| "epoch": 0.33785088227344684, |
| "grad_norm": 1.2136276960372925, |
| "learning_rate": 0.00017747918195039705, |
| "loss": 0.5126, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.33823699756747366, |
| "grad_norm": 1.7323212623596191, |
| "learning_rate": 0.0001774534409307953, |
| "loss": 0.3477, |
| "step": 8760 |
| }, |
| { |
| "epoch": 0.33862311286150043, |
| "grad_norm": 1.164534091949463, |
| "learning_rate": 0.0001774276999111935, |
| "loss": 0.4053, |
| "step": 8770 |
| }, |
| { |
| "epoch": 0.33900922815552725, |
| "grad_norm": 0.42989471554756165, |
| "learning_rate": 0.0001774019588915917, |
| "loss": 0.3026, |
| "step": 8780 |
| }, |
| { |
| "epoch": 0.339395343449554, |
| "grad_norm": 2.357590436935425, |
| "learning_rate": 0.00017737621787198993, |
| "loss": 0.3869, |
| "step": 8790 |
| }, |
| { |
| "epoch": 0.33978145874358084, |
| "grad_norm": 1.9374550580978394, |
| "learning_rate": 0.00017735047685238812, |
| "loss": 0.2975, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.3401675740376076, |
| "grad_norm": 4.8107428550720215, |
| "learning_rate": 0.00017732473583278636, |
| "loss": 0.3959, |
| "step": 8810 |
| }, |
| { |
| "epoch": 0.34055368933163443, |
| "grad_norm": 1.938700556755066, |
| "learning_rate": 0.00017729899481318454, |
| "loss": 0.3726, |
| "step": 8820 |
| }, |
| { |
| "epoch": 0.3409398046256612, |
| "grad_norm": 3.147167682647705, |
| "learning_rate": 0.00017727325379358278, |
| "loss": 0.1828, |
| "step": 8830 |
| }, |
| { |
| "epoch": 0.341325919919688, |
| "grad_norm": 1.8921313285827637, |
| "learning_rate": 0.000177247512773981, |
| "loss": 0.2038, |
| "step": 8840 |
| }, |
| { |
| "epoch": 0.3417120352137148, |
| "grad_norm": 0.9098349213600159, |
| "learning_rate": 0.00017722177175437918, |
| "loss": 0.2853, |
| "step": 8850 |
| }, |
| { |
| "epoch": 0.3420981505077416, |
| "grad_norm": 2.4006853103637695, |
| "learning_rate": 0.00017719603073477742, |
| "loss": 0.6054, |
| "step": 8860 |
| }, |
| { |
| "epoch": 0.3424842658017684, |
| "grad_norm": 1.9303867816925049, |
| "learning_rate": 0.0001771702897151756, |
| "loss": 0.3507, |
| "step": 8870 |
| }, |
| { |
| "epoch": 0.3428703810957952, |
| "grad_norm": 0.49361029267311096, |
| "learning_rate": 0.00017714454869557385, |
| "loss": 0.4661, |
| "step": 8880 |
| }, |
| { |
| "epoch": 0.343256496389822, |
| "grad_norm": 2.542618751525879, |
| "learning_rate": 0.00017711880767597204, |
| "loss": 0.6924, |
| "step": 8890 |
| }, |
| { |
| "epoch": 0.3436426116838488, |
| "grad_norm": 0.5868918895721436, |
| "learning_rate": 0.00017709306665637028, |
| "loss": 0.4507, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.3440287269778756, |
| "grad_norm": 2.4685137271881104, |
| "learning_rate": 0.0001770673256367685, |
| "loss": 0.4538, |
| "step": 8910 |
| }, |
| { |
| "epoch": 0.3444148422719024, |
| "grad_norm": 2.6662702560424805, |
| "learning_rate": 0.00017704158461716668, |
| "loss": 0.6181, |
| "step": 8920 |
| }, |
| { |
| "epoch": 0.3448009575659292, |
| "grad_norm": 1.705103874206543, |
| "learning_rate": 0.00017701584359756492, |
| "loss": 0.481, |
| "step": 8930 |
| }, |
| { |
| "epoch": 0.345187072859956, |
| "grad_norm": 2.0710952281951904, |
| "learning_rate": 0.0001769901025779631, |
| "loss": 0.4357, |
| "step": 8940 |
| }, |
| { |
| "epoch": 0.3455731881539828, |
| "grad_norm": 3.487117290496826, |
| "learning_rate": 0.00017696436155836134, |
| "loss": 0.4572, |
| "step": 8950 |
| }, |
| { |
| "epoch": 0.34595930344800957, |
| "grad_norm": 3.03472900390625, |
| "learning_rate": 0.00017693862053875956, |
| "loss": 0.4437, |
| "step": 8960 |
| }, |
| { |
| "epoch": 0.3463454187420364, |
| "grad_norm": 1.310692548751831, |
| "learning_rate": 0.00017691287951915777, |
| "loss": 0.4218, |
| "step": 8970 |
| }, |
| { |
| "epoch": 0.34673153403606316, |
| "grad_norm": 4.131219387054443, |
| "learning_rate": 0.00017688713849955598, |
| "loss": 0.445, |
| "step": 8980 |
| }, |
| { |
| "epoch": 0.34711764933009, |
| "grad_norm": 1.4199285507202148, |
| "learning_rate": 0.00017686139747995417, |
| "loss": 0.3093, |
| "step": 8990 |
| }, |
| { |
| "epoch": 0.34750376462411675, |
| "grad_norm": 1.9338914155960083, |
| "learning_rate": 0.0001768356564603524, |
| "loss": 0.275, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.3478898799181436, |
| "grad_norm": 3.425877332687378, |
| "learning_rate": 0.0001768099154407506, |
| "loss": 0.4621, |
| "step": 9010 |
| }, |
| { |
| "epoch": 0.34827599521217034, |
| "grad_norm": 3.6350486278533936, |
| "learning_rate": 0.00017678417442114884, |
| "loss": 0.3313, |
| "step": 9020 |
| }, |
| { |
| "epoch": 0.34866211050619716, |
| "grad_norm": 3.339202880859375, |
| "learning_rate": 0.00017675843340154705, |
| "loss": 0.5958, |
| "step": 9030 |
| }, |
| { |
| "epoch": 0.34904822580022393, |
| "grad_norm": 4.148682117462158, |
| "learning_rate": 0.00017673269238194526, |
| "loss": 0.2384, |
| "step": 9040 |
| }, |
| { |
| "epoch": 0.34943434109425076, |
| "grad_norm": 0.9697182178497314, |
| "learning_rate": 0.00017670695136234348, |
| "loss": 0.3119, |
| "step": 9050 |
| }, |
| { |
| "epoch": 0.3498204563882775, |
| "grad_norm": 0.53201824426651, |
| "learning_rate": 0.0001766812103427417, |
| "loss": 0.4339, |
| "step": 9060 |
| }, |
| { |
| "epoch": 0.35020657168230435, |
| "grad_norm": 0.9727185368537903, |
| "learning_rate": 0.0001766554693231399, |
| "loss": 0.3289, |
| "step": 9070 |
| }, |
| { |
| "epoch": 0.3505926869763311, |
| "grad_norm": 4.32904052734375, |
| "learning_rate": 0.00017662972830353812, |
| "loss": 0.4673, |
| "step": 9080 |
| }, |
| { |
| "epoch": 0.35097880227035794, |
| "grad_norm": 2.511558771133423, |
| "learning_rate": 0.00017660398728393633, |
| "loss": 0.2257, |
| "step": 9090 |
| }, |
| { |
| "epoch": 0.3513649175643847, |
| "grad_norm": 1.8378714323043823, |
| "learning_rate": 0.00017657824626433454, |
| "loss": 0.3977, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.3517510328584115, |
| "grad_norm": 1.3297137022018433, |
| "learning_rate": 0.00017655250524473276, |
| "loss": 0.3541, |
| "step": 9110 |
| }, |
| { |
| "epoch": 0.3521371481524383, |
| "grad_norm": 3.253089666366577, |
| "learning_rate": 0.00017652676422513097, |
| "loss": 0.6326, |
| "step": 9120 |
| }, |
| { |
| "epoch": 0.3525232634464651, |
| "grad_norm": 0.9691923260688782, |
| "learning_rate": 0.00017650102320552918, |
| "loss": 0.2206, |
| "step": 9130 |
| }, |
| { |
| "epoch": 0.3529093787404919, |
| "grad_norm": 1.570204496383667, |
| "learning_rate": 0.0001764752821859274, |
| "loss": 0.2769, |
| "step": 9140 |
| }, |
| { |
| "epoch": 0.3532954940345187, |
| "grad_norm": 1.9307161569595337, |
| "learning_rate": 0.0001764495411663256, |
| "loss": 0.3149, |
| "step": 9150 |
| }, |
| { |
| "epoch": 0.3536816093285455, |
| "grad_norm": 2.783297300338745, |
| "learning_rate": 0.00017642380014672382, |
| "loss": 0.3912, |
| "step": 9160 |
| }, |
| { |
| "epoch": 0.3540677246225723, |
| "grad_norm": 2.193371057510376, |
| "learning_rate": 0.00017639805912712204, |
| "loss": 0.3782, |
| "step": 9170 |
| }, |
| { |
| "epoch": 0.3544538399165991, |
| "grad_norm": 2.3460335731506348, |
| "learning_rate": 0.00017637231810752025, |
| "loss": 0.5051, |
| "step": 9180 |
| }, |
| { |
| "epoch": 0.3548399552106259, |
| "grad_norm": 2.4668326377868652, |
| "learning_rate": 0.00017634657708791846, |
| "loss": 0.2899, |
| "step": 9190 |
| }, |
| { |
| "epoch": 0.3552260705046527, |
| "grad_norm": 2.004683017730713, |
| "learning_rate": 0.00017632083606831668, |
| "loss": 0.3137, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.3556121857986795, |
| "grad_norm": 6.333971977233887, |
| "learning_rate": 0.0001762950950487149, |
| "loss": 0.5027, |
| "step": 9210 |
| }, |
| { |
| "epoch": 0.3559983010927063, |
| "grad_norm": 1.7840352058410645, |
| "learning_rate": 0.0001762693540291131, |
| "loss": 0.3988, |
| "step": 9220 |
| }, |
| { |
| "epoch": 0.35638441638673307, |
| "grad_norm": 0.9257024526596069, |
| "learning_rate": 0.00017624361300951132, |
| "loss": 0.3662, |
| "step": 9230 |
| }, |
| { |
| "epoch": 0.3567705316807599, |
| "grad_norm": 2.582887887954712, |
| "learning_rate": 0.00017621787198990953, |
| "loss": 0.2863, |
| "step": 9240 |
| }, |
| { |
| "epoch": 0.35715664697478666, |
| "grad_norm": 3.119943380355835, |
| "learning_rate": 0.00017619213097030774, |
| "loss": 0.4041, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.3575427622688135, |
| "grad_norm": 2.2561371326446533, |
| "learning_rate": 0.00017616638995070596, |
| "loss": 0.3969, |
| "step": 9260 |
| }, |
| { |
| "epoch": 0.35792887756284025, |
| "grad_norm": 2.104891538619995, |
| "learning_rate": 0.00017614064893110417, |
| "loss": 0.3216, |
| "step": 9270 |
| }, |
| { |
| "epoch": 0.3583149928568671, |
| "grad_norm": 1.6922805309295654, |
| "learning_rate": 0.00017611490791150238, |
| "loss": 0.3828, |
| "step": 9280 |
| }, |
| { |
| "epoch": 0.35870110815089384, |
| "grad_norm": 1.0928469896316528, |
| "learning_rate": 0.0001760891668919006, |
| "loss": 0.3225, |
| "step": 9290 |
| }, |
| { |
| "epoch": 0.35908722344492067, |
| "grad_norm": 2.4089863300323486, |
| "learning_rate": 0.0001760634258722988, |
| "loss": 0.4143, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.35947333873894743, |
| "grad_norm": 0.5562119483947754, |
| "learning_rate": 0.00017603768485269702, |
| "loss": 0.4597, |
| "step": 9310 |
| }, |
| { |
| "epoch": 0.35985945403297426, |
| "grad_norm": 1.3904486894607544, |
| "learning_rate": 0.00017601194383309524, |
| "loss": 0.4462, |
| "step": 9320 |
| }, |
| { |
| "epoch": 0.360245569327001, |
| "grad_norm": 2.1393306255340576, |
| "learning_rate": 0.00017598620281349345, |
| "loss": 0.2613, |
| "step": 9330 |
| }, |
| { |
| "epoch": 0.36063168462102785, |
| "grad_norm": 1.3657029867172241, |
| "learning_rate": 0.00017596046179389166, |
| "loss": 0.4968, |
| "step": 9340 |
| }, |
| { |
| "epoch": 0.3610177999150546, |
| "grad_norm": 2.424880027770996, |
| "learning_rate": 0.00017593472077428988, |
| "loss": 0.5982, |
| "step": 9350 |
| }, |
| { |
| "epoch": 0.36140391520908144, |
| "grad_norm": 6.178807735443115, |
| "learning_rate": 0.0001759089797546881, |
| "loss": 0.5355, |
| "step": 9360 |
| }, |
| { |
| "epoch": 0.3617900305031082, |
| "grad_norm": 1.5572419166564941, |
| "learning_rate": 0.0001758832387350863, |
| "loss": 0.4435, |
| "step": 9370 |
| }, |
| { |
| "epoch": 0.36217614579713503, |
| "grad_norm": 0.46649104356765747, |
| "learning_rate": 0.00017585749771548452, |
| "loss": 0.352, |
| "step": 9380 |
| }, |
| { |
| "epoch": 0.3625622610911618, |
| "grad_norm": 1.9611142873764038, |
| "learning_rate": 0.00017583175669588276, |
| "loss": 0.2684, |
| "step": 9390 |
| }, |
| { |
| "epoch": 0.3629483763851886, |
| "grad_norm": 1.7648595571517944, |
| "learning_rate": 0.00017580601567628094, |
| "loss": 0.3186, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.3633344916792154, |
| "grad_norm": 1.7970843315124512, |
| "learning_rate": 0.00017578027465667916, |
| "loss": 0.5339, |
| "step": 9410 |
| }, |
| { |
| "epoch": 0.3637206069732422, |
| "grad_norm": 3.084897994995117, |
| "learning_rate": 0.00017575453363707737, |
| "loss": 0.5143, |
| "step": 9420 |
| }, |
| { |
| "epoch": 0.36410672226726903, |
| "grad_norm": 1.440626621246338, |
| "learning_rate": 0.00017572879261747558, |
| "loss": 0.4067, |
| "step": 9430 |
| }, |
| { |
| "epoch": 0.3644928375612958, |
| "grad_norm": 0.44918450713157654, |
| "learning_rate": 0.0001757030515978738, |
| "loss": 0.2306, |
| "step": 9440 |
| }, |
| { |
| "epoch": 0.3648789528553226, |
| "grad_norm": 2.617272138595581, |
| "learning_rate": 0.000175677310578272, |
| "loss": 0.3166, |
| "step": 9450 |
| }, |
| { |
| "epoch": 0.3652650681493494, |
| "grad_norm": 2.575073719024658, |
| "learning_rate": 0.00017565156955867025, |
| "loss": 0.6645, |
| "step": 9460 |
| }, |
| { |
| "epoch": 0.3656511834433762, |
| "grad_norm": 0.9430664777755737, |
| "learning_rate": 0.00017562582853906844, |
| "loss": 0.2753, |
| "step": 9470 |
| }, |
| { |
| "epoch": 0.366037298737403, |
| "grad_norm": 1.9400445222854614, |
| "learning_rate": 0.00017560008751946665, |
| "loss": 0.4689, |
| "step": 9480 |
| }, |
| { |
| "epoch": 0.3664234140314298, |
| "grad_norm": 4.0443220138549805, |
| "learning_rate": 0.00017557434649986486, |
| "loss": 0.5373, |
| "step": 9490 |
| }, |
| { |
| "epoch": 0.3668095293254566, |
| "grad_norm": 3.4999184608459473, |
| "learning_rate": 0.00017554860548026308, |
| "loss": 0.3412, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.3671956446194834, |
| "grad_norm": 2.2023515701293945, |
| "learning_rate": 0.0001755228644606613, |
| "loss": 0.3385, |
| "step": 9510 |
| }, |
| { |
| "epoch": 0.36758175991351016, |
| "grad_norm": 1.213641881942749, |
| "learning_rate": 0.0001754971234410595, |
| "loss": 0.4785, |
| "step": 9520 |
| }, |
| { |
| "epoch": 0.367967875207537, |
| "grad_norm": 0.4178420603275299, |
| "learning_rate": 0.00017547138242145774, |
| "loss": 0.2605, |
| "step": 9530 |
| }, |
| { |
| "epoch": 0.36835399050156376, |
| "grad_norm": 2.676564931869507, |
| "learning_rate": 0.00017544564140185593, |
| "loss": 0.5297, |
| "step": 9540 |
| }, |
| { |
| "epoch": 0.3687401057955906, |
| "grad_norm": 0.8604353070259094, |
| "learning_rate": 0.00017541990038225414, |
| "loss": 0.3983, |
| "step": 9550 |
| }, |
| { |
| "epoch": 0.36912622108961735, |
| "grad_norm": 1.298893690109253, |
| "learning_rate": 0.00017539415936265236, |
| "loss": 0.3229, |
| "step": 9560 |
| }, |
| { |
| "epoch": 0.36951233638364417, |
| "grad_norm": 4.109025478363037, |
| "learning_rate": 0.00017536841834305057, |
| "loss": 0.519, |
| "step": 9570 |
| }, |
| { |
| "epoch": 0.36989845167767094, |
| "grad_norm": 3.440915584564209, |
| "learning_rate": 0.0001753426773234488, |
| "loss": 0.4061, |
| "step": 9580 |
| }, |
| { |
| "epoch": 0.37028456697169776, |
| "grad_norm": 0.1484186202287674, |
| "learning_rate": 0.000175316936303847, |
| "loss": 0.3443, |
| "step": 9590 |
| }, |
| { |
| "epoch": 0.3706706822657245, |
| "grad_norm": 3.114328145980835, |
| "learning_rate": 0.00017529119528424524, |
| "loss": 0.2451, |
| "step": 9600 |
| }, |
| { |
| "epoch": 0.37105679755975135, |
| "grad_norm": 1.8218796253204346, |
| "learning_rate": 0.00017526545426464342, |
| "loss": 0.2511, |
| "step": 9610 |
| }, |
| { |
| "epoch": 0.3714429128537781, |
| "grad_norm": 1.0732795000076294, |
| "learning_rate": 0.00017523971324504164, |
| "loss": 0.1581, |
| "step": 9620 |
| }, |
| { |
| "epoch": 0.37182902814780494, |
| "grad_norm": 1.0567959547042847, |
| "learning_rate": 0.00017521397222543985, |
| "loss": 0.1924, |
| "step": 9630 |
| }, |
| { |
| "epoch": 0.3722151434418317, |
| "grad_norm": 0.3467637896537781, |
| "learning_rate": 0.00017518823120583806, |
| "loss": 0.3571, |
| "step": 9640 |
| }, |
| { |
| "epoch": 0.37260125873585853, |
| "grad_norm": 2.6293838024139404, |
| "learning_rate": 0.0001751624901862363, |
| "loss": 0.3282, |
| "step": 9650 |
| }, |
| { |
| "epoch": 0.3729873740298853, |
| "grad_norm": 1.159696102142334, |
| "learning_rate": 0.0001751367491666345, |
| "loss": 0.2636, |
| "step": 9660 |
| }, |
| { |
| "epoch": 0.3733734893239121, |
| "grad_norm": 0.6884826421737671, |
| "learning_rate": 0.00017511100814703273, |
| "loss": 0.2842, |
| "step": 9670 |
| }, |
| { |
| "epoch": 0.3737596046179389, |
| "grad_norm": 3.789825201034546, |
| "learning_rate": 0.00017508526712743091, |
| "loss": 0.599, |
| "step": 9680 |
| }, |
| { |
| "epoch": 0.3741457199119657, |
| "grad_norm": 1.0705493688583374, |
| "learning_rate": 0.00017505952610782913, |
| "loss": 0.1746, |
| "step": 9690 |
| }, |
| { |
| "epoch": 0.37453183520599254, |
| "grad_norm": 1.8735803365707397, |
| "learning_rate": 0.00017503378508822734, |
| "loss": 0.3259, |
| "step": 9700 |
| }, |
| { |
| "epoch": 0.3749179505000193, |
| "grad_norm": 1.2987112998962402, |
| "learning_rate": 0.00017500804406862555, |
| "loss": 0.5738, |
| "step": 9710 |
| }, |
| { |
| "epoch": 0.3753040657940461, |
| "grad_norm": 1.5362507104873657, |
| "learning_rate": 0.0001749823030490238, |
| "loss": 0.3815, |
| "step": 9720 |
| }, |
| { |
| "epoch": 0.3756901810880729, |
| "grad_norm": 0.1640123724937439, |
| "learning_rate": 0.00017495656202942198, |
| "loss": 0.3672, |
| "step": 9730 |
| }, |
| { |
| "epoch": 0.3760762963820997, |
| "grad_norm": 0.6714594960212708, |
| "learning_rate": 0.00017493082100982022, |
| "loss": 0.2849, |
| "step": 9740 |
| }, |
| { |
| "epoch": 0.3764624116761265, |
| "grad_norm": 4.330246448516846, |
| "learning_rate": 0.0001749050799902184, |
| "loss": 0.4023, |
| "step": 9750 |
| }, |
| { |
| "epoch": 0.3768485269701533, |
| "grad_norm": 0.8616659641265869, |
| "learning_rate": 0.00017487933897061665, |
| "loss": 0.4434, |
| "step": 9760 |
| }, |
| { |
| "epoch": 0.3772346422641801, |
| "grad_norm": 2.6581578254699707, |
| "learning_rate": 0.00017485359795101486, |
| "loss": 0.4854, |
| "step": 9770 |
| }, |
| { |
| "epoch": 0.3776207575582069, |
| "grad_norm": 1.8269850015640259, |
| "learning_rate": 0.00017482785693141305, |
| "loss": 0.6033, |
| "step": 9780 |
| }, |
| { |
| "epoch": 0.37800687285223367, |
| "grad_norm": 2.256073236465454, |
| "learning_rate": 0.0001748021159118113, |
| "loss": 0.5317, |
| "step": 9790 |
| }, |
| { |
| "epoch": 0.3783929881462605, |
| "grad_norm": 0.8793076872825623, |
| "learning_rate": 0.00017477637489220947, |
| "loss": 0.3883, |
| "step": 9800 |
| }, |
| { |
| "epoch": 0.37877910344028726, |
| "grad_norm": 1.71831214427948, |
| "learning_rate": 0.00017475063387260772, |
| "loss": 0.2473, |
| "step": 9810 |
| }, |
| { |
| "epoch": 0.3791652187343141, |
| "grad_norm": 3.4802069664001465, |
| "learning_rate": 0.0001747248928530059, |
| "loss": 0.4847, |
| "step": 9820 |
| }, |
| { |
| "epoch": 0.37955133402834085, |
| "grad_norm": 5.419053077697754, |
| "learning_rate": 0.00017469915183340414, |
| "loss": 0.3668, |
| "step": 9830 |
| }, |
| { |
| "epoch": 0.37993744932236767, |
| "grad_norm": 1.567060112953186, |
| "learning_rate": 0.00017467341081380236, |
| "loss": 0.3342, |
| "step": 9840 |
| }, |
| { |
| "epoch": 0.38032356461639444, |
| "grad_norm": 3.0100274085998535, |
| "learning_rate": 0.00017464766979420054, |
| "loss": 0.476, |
| "step": 9850 |
| }, |
| { |
| "epoch": 0.38070967991042126, |
| "grad_norm": 0.7659344673156738, |
| "learning_rate": 0.00017462192877459878, |
| "loss": 0.2608, |
| "step": 9860 |
| }, |
| { |
| "epoch": 0.38109579520444803, |
| "grad_norm": 3.9540984630584717, |
| "learning_rate": 0.00017459618775499697, |
| "loss": 0.763, |
| "step": 9870 |
| }, |
| { |
| "epoch": 0.38148191049847485, |
| "grad_norm": 0.8768689036369324, |
| "learning_rate": 0.0001745704467353952, |
| "loss": 0.3365, |
| "step": 9880 |
| }, |
| { |
| "epoch": 0.3818680257925016, |
| "grad_norm": 0.9985928535461426, |
| "learning_rate": 0.00017454470571579342, |
| "loss": 0.3116, |
| "step": 9890 |
| }, |
| { |
| "epoch": 0.38225414108652844, |
| "grad_norm": 2.0326671600341797, |
| "learning_rate": 0.00017451896469619163, |
| "loss": 0.289, |
| "step": 9900 |
| }, |
| { |
| "epoch": 0.3826402563805552, |
| "grad_norm": 3.2696290016174316, |
| "learning_rate": 0.00017449322367658985, |
| "loss": 0.4097, |
| "step": 9910 |
| }, |
| { |
| "epoch": 0.38302637167458203, |
| "grad_norm": 3.048860788345337, |
| "learning_rate": 0.00017446748265698803, |
| "loss": 0.5181, |
| "step": 9920 |
| }, |
| { |
| "epoch": 0.3834124869686088, |
| "grad_norm": 1.7899913787841797, |
| "learning_rate": 0.00017444174163738627, |
| "loss": 0.2166, |
| "step": 9930 |
| }, |
| { |
| "epoch": 0.3837986022626356, |
| "grad_norm": 3.6762959957122803, |
| "learning_rate": 0.00017441600061778446, |
| "loss": 0.4971, |
| "step": 9940 |
| }, |
| { |
| "epoch": 0.3841847175566624, |
| "grad_norm": 0.9108519554138184, |
| "learning_rate": 0.0001743902595981827, |
| "loss": 0.4974, |
| "step": 9950 |
| }, |
| { |
| "epoch": 0.3845708328506892, |
| "grad_norm": 4.062527656555176, |
| "learning_rate": 0.00017436451857858091, |
| "loss": 0.4448, |
| "step": 9960 |
| }, |
| { |
| "epoch": 0.38495694814471604, |
| "grad_norm": 3.230902671813965, |
| "learning_rate": 0.00017433877755897913, |
| "loss": 0.2977, |
| "step": 9970 |
| }, |
| { |
| "epoch": 0.3853430634387428, |
| "grad_norm": 3.8190758228302, |
| "learning_rate": 0.00017431303653937734, |
| "loss": 0.4887, |
| "step": 9980 |
| }, |
| { |
| "epoch": 0.38572917873276963, |
| "grad_norm": 0.9079695343971252, |
| "learning_rate": 0.00017428729551977553, |
| "loss": 0.271, |
| "step": 9990 |
| }, |
| { |
| "epoch": 0.3861152940267964, |
| "grad_norm": 3.3730807304382324, |
| "learning_rate": 0.00017426155450017377, |
| "loss": 0.3782, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.3865014093208232, |
| "grad_norm": 1.07533860206604, |
| "learning_rate": 0.00017423581348057195, |
| "loss": 0.3905, |
| "step": 10010 |
| }, |
| { |
| "epoch": 0.38688752461485, |
| "grad_norm": 1.3856415748596191, |
| "learning_rate": 0.0001742100724609702, |
| "loss": 0.3757, |
| "step": 10020 |
| }, |
| { |
| "epoch": 0.3872736399088768, |
| "grad_norm": 5.751671314239502, |
| "learning_rate": 0.0001741843314413684, |
| "loss": 0.6657, |
| "step": 10030 |
| }, |
| { |
| "epoch": 0.3876597552029036, |
| "grad_norm": 0.6837680339813232, |
| "learning_rate": 0.00017415859042176662, |
| "loss": 0.2318, |
| "step": 10040 |
| }, |
| { |
| "epoch": 0.3880458704969304, |
| "grad_norm": 2.770787239074707, |
| "learning_rate": 0.00017413284940216483, |
| "loss": 0.3706, |
| "step": 10050 |
| }, |
| { |
| "epoch": 0.38843198579095717, |
| "grad_norm": 2.3058855533599854, |
| "learning_rate": 0.00017410710838256302, |
| "loss": 0.1641, |
| "step": 10060 |
| }, |
| { |
| "epoch": 0.388818101084984, |
| "grad_norm": 1.894718885421753, |
| "learning_rate": 0.00017408136736296126, |
| "loss": 0.4752, |
| "step": 10070 |
| }, |
| { |
| "epoch": 0.38920421637901076, |
| "grad_norm": 1.8346868753433228, |
| "learning_rate": 0.00017405562634335947, |
| "loss": 0.5007, |
| "step": 10080 |
| }, |
| { |
| "epoch": 0.3895903316730376, |
| "grad_norm": 5.277680397033691, |
| "learning_rate": 0.0001740298853237577, |
| "loss": 0.4399, |
| "step": 10090 |
| }, |
| { |
| "epoch": 0.38997644696706435, |
| "grad_norm": 1.306093692779541, |
| "learning_rate": 0.0001740041443041559, |
| "loss": 0.371, |
| "step": 10100 |
| }, |
| { |
| "epoch": 0.3903625622610912, |
| "grad_norm": 3.0306456089019775, |
| "learning_rate": 0.00017397840328455411, |
| "loss": 0.2515, |
| "step": 10110 |
| }, |
| { |
| "epoch": 0.39074867755511794, |
| "grad_norm": 0.7951543927192688, |
| "learning_rate": 0.00017395266226495233, |
| "loss": 0.3775, |
| "step": 10120 |
| }, |
| { |
| "epoch": 0.39113479284914476, |
| "grad_norm": 5.185150146484375, |
| "learning_rate": 0.00017392692124535051, |
| "loss": 0.3591, |
| "step": 10130 |
| }, |
| { |
| "epoch": 0.39152090814317153, |
| "grad_norm": 1.1718593835830688, |
| "learning_rate": 0.00017390118022574875, |
| "loss": 0.5484, |
| "step": 10140 |
| }, |
| { |
| "epoch": 0.39190702343719835, |
| "grad_norm": 1.6352128982543945, |
| "learning_rate": 0.00017387543920614697, |
| "loss": 0.2817, |
| "step": 10150 |
| }, |
| { |
| "epoch": 0.3922931387312251, |
| "grad_norm": 2.4863786697387695, |
| "learning_rate": 0.00017384969818654518, |
| "loss": 0.4027, |
| "step": 10160 |
| }, |
| { |
| "epoch": 0.39267925402525194, |
| "grad_norm": 2.069805383682251, |
| "learning_rate": 0.0001738239571669434, |
| "loss": 0.3559, |
| "step": 10170 |
| }, |
| { |
| "epoch": 0.3930653693192787, |
| "grad_norm": 1.671980619430542, |
| "learning_rate": 0.0001737982161473416, |
| "loss": 0.4405, |
| "step": 10180 |
| }, |
| { |
| "epoch": 0.39345148461330554, |
| "grad_norm": 4.298947334289551, |
| "learning_rate": 0.00017377247512773982, |
| "loss": 0.3005, |
| "step": 10190 |
| }, |
| { |
| "epoch": 0.3938375999073323, |
| "grad_norm": 0.4142851233482361, |
| "learning_rate": 0.000173746734108138, |
| "loss": 0.4248, |
| "step": 10200 |
| }, |
| { |
| "epoch": 0.3942237152013591, |
| "grad_norm": 3.5962865352630615, |
| "learning_rate": 0.00017372099308853625, |
| "loss": 0.27, |
| "step": 10210 |
| }, |
| { |
| "epoch": 0.39460983049538595, |
| "grad_norm": 2.20154070854187, |
| "learning_rate": 0.00017369525206893446, |
| "loss": 0.2858, |
| "step": 10220 |
| }, |
| { |
| "epoch": 0.3949959457894127, |
| "grad_norm": 0.2400553673505783, |
| "learning_rate": 0.00017366951104933267, |
| "loss": 0.2806, |
| "step": 10230 |
| }, |
| { |
| "epoch": 0.39538206108343954, |
| "grad_norm": 1.817741870880127, |
| "learning_rate": 0.0001736437700297309, |
| "loss": 0.3647, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.3957681763774663, |
| "grad_norm": 4.890044689178467, |
| "learning_rate": 0.0001736180290101291, |
| "loss": 0.4435, |
| "step": 10250 |
| }, |
| { |
| "epoch": 0.39615429167149313, |
| "grad_norm": 0.3407624065876007, |
| "learning_rate": 0.00017359228799052731, |
| "loss": 0.4857, |
| "step": 10260 |
| }, |
| { |
| "epoch": 0.3965404069655199, |
| "grad_norm": 2.4883463382720947, |
| "learning_rate": 0.00017356654697092553, |
| "loss": 0.2667, |
| "step": 10270 |
| }, |
| { |
| "epoch": 0.3969265222595467, |
| "grad_norm": 2.343823194503784, |
| "learning_rate": 0.00017354080595132374, |
| "loss": 0.3711, |
| "step": 10280 |
| }, |
| { |
| "epoch": 0.3973126375535735, |
| "grad_norm": 0.2056214064359665, |
| "learning_rate": 0.00017351506493172195, |
| "loss": 0.2695, |
| "step": 10290 |
| }, |
| { |
| "epoch": 0.3976987528476003, |
| "grad_norm": 0.20321065187454224, |
| "learning_rate": 0.00017348932391212017, |
| "loss": 0.3079, |
| "step": 10300 |
| }, |
| { |
| "epoch": 0.3980848681416271, |
| "grad_norm": 0.7993821501731873, |
| "learning_rate": 0.00017346358289251838, |
| "loss": 0.3599, |
| "step": 10310 |
| }, |
| { |
| "epoch": 0.3984709834356539, |
| "grad_norm": 2.0987348556518555, |
| "learning_rate": 0.0001734378418729166, |
| "loss": 0.3259, |
| "step": 10320 |
| }, |
| { |
| "epoch": 0.39885709872968067, |
| "grad_norm": 2.474246025085449, |
| "learning_rate": 0.0001734121008533148, |
| "loss": 0.3398, |
| "step": 10330 |
| }, |
| { |
| "epoch": 0.3992432140237075, |
| "grad_norm": 2.341064214706421, |
| "learning_rate": 0.00017338635983371302, |
| "loss": 0.5264, |
| "step": 10340 |
| }, |
| { |
| "epoch": 0.39962932931773426, |
| "grad_norm": 1.587437629699707, |
| "learning_rate": 0.00017336061881411123, |
| "loss": 0.4228, |
| "step": 10350 |
| }, |
| { |
| "epoch": 0.4000154446117611, |
| "grad_norm": 0.6692029237747192, |
| "learning_rate": 0.00017333487779450945, |
| "loss": 0.3576, |
| "step": 10360 |
| }, |
| { |
| "epoch": 0.40040155990578785, |
| "grad_norm": 2.088212251663208, |
| "learning_rate": 0.00017330913677490766, |
| "loss": 0.3096, |
| "step": 10370 |
| }, |
| { |
| "epoch": 0.4007876751998147, |
| "grad_norm": 1.5051954984664917, |
| "learning_rate": 0.00017328339575530587, |
| "loss": 0.3753, |
| "step": 10380 |
| }, |
| { |
| "epoch": 0.40117379049384144, |
| "grad_norm": 2.02595591545105, |
| "learning_rate": 0.0001732576547357041, |
| "loss": 0.3339, |
| "step": 10390 |
| }, |
| { |
| "epoch": 0.40155990578786827, |
| "grad_norm": 1.3062909841537476, |
| "learning_rate": 0.0001732319137161023, |
| "loss": 0.4301, |
| "step": 10400 |
| }, |
| { |
| "epoch": 0.40194602108189503, |
| "grad_norm": 2.5890421867370605, |
| "learning_rate": 0.00017320617269650051, |
| "loss": 0.3047, |
| "step": 10410 |
| }, |
| { |
| "epoch": 0.40233213637592186, |
| "grad_norm": 1.5994844436645508, |
| "learning_rate": 0.00017318043167689873, |
| "loss": 0.4158, |
| "step": 10420 |
| }, |
| { |
| "epoch": 0.4027182516699486, |
| "grad_norm": 0.5470211505889893, |
| "learning_rate": 0.00017315469065729694, |
| "loss": 0.4513, |
| "step": 10430 |
| }, |
| { |
| "epoch": 0.40310436696397545, |
| "grad_norm": 2.216935634613037, |
| "learning_rate": 0.00017312894963769515, |
| "loss": 0.5123, |
| "step": 10440 |
| }, |
| { |
| "epoch": 0.4034904822580022, |
| "grad_norm": 2.354724645614624, |
| "learning_rate": 0.00017310320861809337, |
| "loss": 0.2804, |
| "step": 10450 |
| }, |
| { |
| "epoch": 0.40387659755202904, |
| "grad_norm": 4.514159202575684, |
| "learning_rate": 0.00017307746759849158, |
| "loss": 0.3317, |
| "step": 10460 |
| }, |
| { |
| "epoch": 0.4042627128460558, |
| "grad_norm": 0.9874318242073059, |
| "learning_rate": 0.0001730517265788898, |
| "loss": 0.1948, |
| "step": 10470 |
| }, |
| { |
| "epoch": 0.40464882814008263, |
| "grad_norm": 2.0725696086883545, |
| "learning_rate": 0.000173025985559288, |
| "loss": 0.3627, |
| "step": 10480 |
| }, |
| { |
| "epoch": 0.40503494343410945, |
| "grad_norm": 2.4061577320098877, |
| "learning_rate": 0.00017300024453968622, |
| "loss": 0.3074, |
| "step": 10490 |
| }, |
| { |
| "epoch": 0.4054210587281362, |
| "grad_norm": 1.3369660377502441, |
| "learning_rate": 0.00017297450352008443, |
| "loss": 0.533, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.40580717402216304, |
| "grad_norm": 1.2730306386947632, |
| "learning_rate": 0.00017294876250048265, |
| "loss": 0.4688, |
| "step": 10510 |
| }, |
| { |
| "epoch": 0.4061932893161898, |
| "grad_norm": 0.6753021478652954, |
| "learning_rate": 0.00017292302148088086, |
| "loss": 0.4427, |
| "step": 10520 |
| }, |
| { |
| "epoch": 0.40657940461021663, |
| "grad_norm": 1.7279945611953735, |
| "learning_rate": 0.0001728972804612791, |
| "loss": 0.4921, |
| "step": 10530 |
| }, |
| { |
| "epoch": 0.4069655199042434, |
| "grad_norm": 0.9288708567619324, |
| "learning_rate": 0.0001728715394416773, |
| "loss": 0.363, |
| "step": 10540 |
| }, |
| { |
| "epoch": 0.4073516351982702, |
| "grad_norm": 0.5325084924697876, |
| "learning_rate": 0.0001728457984220755, |
| "loss": 0.4095, |
| "step": 10550 |
| }, |
| { |
| "epoch": 0.407737750492297, |
| "grad_norm": 1.2030489444732666, |
| "learning_rate": 0.0001728200574024737, |
| "loss": 0.3499, |
| "step": 10560 |
| }, |
| { |
| "epoch": 0.4081238657863238, |
| "grad_norm": 3.8157269954681396, |
| "learning_rate": 0.00017279431638287193, |
| "loss": 0.1622, |
| "step": 10570 |
| }, |
| { |
| "epoch": 0.4085099810803506, |
| "grad_norm": 0.6373336911201477, |
| "learning_rate": 0.00017276857536327017, |
| "loss": 0.4657, |
| "step": 10580 |
| }, |
| { |
| "epoch": 0.4088960963743774, |
| "grad_norm": 2.2850074768066406, |
| "learning_rate": 0.00017274283434366835, |
| "loss": 0.3585, |
| "step": 10590 |
| }, |
| { |
| "epoch": 0.40928221166840417, |
| "grad_norm": 0.8831659555435181, |
| "learning_rate": 0.0001727170933240666, |
| "loss": 0.293, |
| "step": 10600 |
| }, |
| { |
| "epoch": 0.409668326962431, |
| "grad_norm": 5.1165995597839355, |
| "learning_rate": 0.00017269135230446478, |
| "loss": 0.6539, |
| "step": 10610 |
| }, |
| { |
| "epoch": 0.41005444225645776, |
| "grad_norm": 4.901204586029053, |
| "learning_rate": 0.000172665611284863, |
| "loss": 0.4628, |
| "step": 10620 |
| }, |
| { |
| "epoch": 0.4104405575504846, |
| "grad_norm": 2.1492419242858887, |
| "learning_rate": 0.0001726398702652612, |
| "loss": 0.277, |
| "step": 10630 |
| }, |
| { |
| "epoch": 0.41082667284451135, |
| "grad_norm": 3.56510853767395, |
| "learning_rate": 0.00017261412924565942, |
| "loss": 0.4696, |
| "step": 10640 |
| }, |
| { |
| "epoch": 0.4112127881385382, |
| "grad_norm": 2.054769992828369, |
| "learning_rate": 0.00017258838822605766, |
| "loss": 0.4093, |
| "step": 10650 |
| }, |
| { |
| "epoch": 0.41159890343256494, |
| "grad_norm": 2.133474826812744, |
| "learning_rate": 0.00017256264720645585, |
| "loss": 0.3604, |
| "step": 10660 |
| }, |
| { |
| "epoch": 0.41198501872659177, |
| "grad_norm": 2.5062367916107178, |
| "learning_rate": 0.0001725369061868541, |
| "loss": 0.3916, |
| "step": 10670 |
| }, |
| { |
| "epoch": 0.41237113402061853, |
| "grad_norm": 0.431570827960968, |
| "learning_rate": 0.00017251116516725227, |
| "loss": 0.4048, |
| "step": 10680 |
| }, |
| { |
| "epoch": 0.41275724931464536, |
| "grad_norm": 1.2092580795288086, |
| "learning_rate": 0.0001724854241476505, |
| "loss": 0.602, |
| "step": 10690 |
| }, |
| { |
| "epoch": 0.4131433646086721, |
| "grad_norm": 2.712398052215576, |
| "learning_rate": 0.00017245968312804873, |
| "loss": 0.4172, |
| "step": 10700 |
| }, |
| { |
| "epoch": 0.41352947990269895, |
| "grad_norm": 3.914670467376709, |
| "learning_rate": 0.0001724339421084469, |
| "loss": 0.3843, |
| "step": 10710 |
| }, |
| { |
| "epoch": 0.4139155951967257, |
| "grad_norm": 1.7062132358551025, |
| "learning_rate": 0.00017240820108884515, |
| "loss": 0.343, |
| "step": 10720 |
| }, |
| { |
| "epoch": 0.41430171049075254, |
| "grad_norm": 0.5837095379829407, |
| "learning_rate": 0.00017238246006924334, |
| "loss": 0.3872, |
| "step": 10730 |
| }, |
| { |
| "epoch": 0.41468782578477936, |
| "grad_norm": 1.098900556564331, |
| "learning_rate": 0.00017235671904964158, |
| "loss": 0.2062, |
| "step": 10740 |
| }, |
| { |
| "epoch": 0.41507394107880613, |
| "grad_norm": 1.2533438205718994, |
| "learning_rate": 0.00017233097803003977, |
| "loss": 0.141, |
| "step": 10750 |
| }, |
| { |
| "epoch": 0.41546005637283295, |
| "grad_norm": 0.8688085079193115, |
| "learning_rate": 0.00017230523701043798, |
| "loss": 0.3686, |
| "step": 10760 |
| }, |
| { |
| "epoch": 0.4158461716668597, |
| "grad_norm": 1.868402361869812, |
| "learning_rate": 0.00017227949599083622, |
| "loss": 0.449, |
| "step": 10770 |
| }, |
| { |
| "epoch": 0.41623228696088654, |
| "grad_norm": 0.7168850898742676, |
| "learning_rate": 0.0001722537549712344, |
| "loss": 0.2317, |
| "step": 10780 |
| }, |
| { |
| "epoch": 0.4166184022549133, |
| "grad_norm": 3.1062309741973877, |
| "learning_rate": 0.00017222801395163265, |
| "loss": 0.4655, |
| "step": 10790 |
| }, |
| { |
| "epoch": 0.41700451754894013, |
| "grad_norm": 2.7296605110168457, |
| "learning_rate": 0.00017220227293203083, |
| "loss": 0.3934, |
| "step": 10800 |
| }, |
| { |
| "epoch": 0.4173906328429669, |
| "grad_norm": 2.3148224353790283, |
| "learning_rate": 0.00017217653191242907, |
| "loss": 0.2367, |
| "step": 10810 |
| }, |
| { |
| "epoch": 0.4177767481369937, |
| "grad_norm": 0.7049677968025208, |
| "learning_rate": 0.00017215079089282726, |
| "loss": 0.3157, |
| "step": 10820 |
| }, |
| { |
| "epoch": 0.4181628634310205, |
| "grad_norm": 3.3960344791412354, |
| "learning_rate": 0.00017212504987322547, |
| "loss": 0.4945, |
| "step": 10830 |
| }, |
| { |
| "epoch": 0.4185489787250473, |
| "grad_norm": 2.606316566467285, |
| "learning_rate": 0.0001720993088536237, |
| "loss": 0.4056, |
| "step": 10840 |
| }, |
| { |
| "epoch": 0.4189350940190741, |
| "grad_norm": 1.7469319105148315, |
| "learning_rate": 0.0001720735678340219, |
| "loss": 0.4176, |
| "step": 10850 |
| }, |
| { |
| "epoch": 0.4193212093131009, |
| "grad_norm": 0.8538552522659302, |
| "learning_rate": 0.00017204782681442014, |
| "loss": 0.3025, |
| "step": 10860 |
| }, |
| { |
| "epoch": 0.4197073246071277, |
| "grad_norm": 1.9576159715652466, |
| "learning_rate": 0.00017202208579481833, |
| "loss": 0.5626, |
| "step": 10870 |
| }, |
| { |
| "epoch": 0.4200934399011545, |
| "grad_norm": 0.8435356616973877, |
| "learning_rate": 0.00017199634477521657, |
| "loss": 0.2397, |
| "step": 10880 |
| }, |
| { |
| "epoch": 0.42047955519518126, |
| "grad_norm": 1.3026552200317383, |
| "learning_rate": 0.00017197060375561478, |
| "loss": 0.4793, |
| "step": 10890 |
| }, |
| { |
| "epoch": 0.4208656704892081, |
| "grad_norm": 1.8935116529464722, |
| "learning_rate": 0.00017194486273601297, |
| "loss": 0.2459, |
| "step": 10900 |
| }, |
| { |
| "epoch": 0.42125178578323486, |
| "grad_norm": 0.7297415137290955, |
| "learning_rate": 0.0001719191217164112, |
| "loss": 0.4115, |
| "step": 10910 |
| }, |
| { |
| "epoch": 0.4216379010772617, |
| "grad_norm": 2.730445146560669, |
| "learning_rate": 0.0001718933806968094, |
| "loss": 0.3467, |
| "step": 10920 |
| }, |
| { |
| "epoch": 0.42202401637128845, |
| "grad_norm": 1.5462249517440796, |
| "learning_rate": 0.00017186763967720763, |
| "loss": 0.2319, |
| "step": 10930 |
| }, |
| { |
| "epoch": 0.42241013166531527, |
| "grad_norm": 2.173388957977295, |
| "learning_rate": 0.00017184189865760582, |
| "loss": 0.3664, |
| "step": 10940 |
| }, |
| { |
| "epoch": 0.42279624695934204, |
| "grad_norm": 0.9086957573890686, |
| "learning_rate": 0.00017181615763800406, |
| "loss": 0.3928, |
| "step": 10950 |
| }, |
| { |
| "epoch": 0.42318236225336886, |
| "grad_norm": 1.6344754695892334, |
| "learning_rate": 0.00017179041661840227, |
| "loss": 0.32, |
| "step": 10960 |
| }, |
| { |
| "epoch": 0.4235684775473956, |
| "grad_norm": 3.7620887756347656, |
| "learning_rate": 0.00017176467559880049, |
| "loss": 0.3998, |
| "step": 10970 |
| }, |
| { |
| "epoch": 0.42395459284142245, |
| "grad_norm": 2.3914058208465576, |
| "learning_rate": 0.0001717389345791987, |
| "loss": 0.3003, |
| "step": 10980 |
| }, |
| { |
| "epoch": 0.4243407081354492, |
| "grad_norm": 1.1183325052261353, |
| "learning_rate": 0.00017171319355959689, |
| "loss": 0.2408, |
| "step": 10990 |
| }, |
| { |
| "epoch": 0.42472682342947604, |
| "grad_norm": 1.5570834875106812, |
| "learning_rate": 0.00017168745253999513, |
| "loss": 0.2638, |
| "step": 11000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 77697, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6698536796160000.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|