| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 504, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.003968253968253968, |
| "grad_norm": 480.6939697265625, |
| "learning_rate": 3.846153846153847e-07, |
| "loss": 1.9332, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.007936507936507936, |
| "grad_norm": 485.58526611328125, |
| "learning_rate": 7.692307692307694e-07, |
| "loss": 1.9959, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.011904761904761904, |
| "grad_norm": 502.3266296386719, |
| "learning_rate": 1.153846153846154e-06, |
| "loss": 2.0371, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.015873015873015872, |
| "grad_norm": 626.9771118164062, |
| "learning_rate": 1.5384615384615387e-06, |
| "loss": 2.1148, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01984126984126984, |
| "grad_norm": 491.1722412109375, |
| "learning_rate": 1.9230769230769234e-06, |
| "loss": 2.186, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.023809523809523808, |
| "grad_norm": 363.6409606933594, |
| "learning_rate": 2.307692307692308e-06, |
| "loss": 1.9466, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.027777777777777776, |
| "grad_norm": 224.33517456054688, |
| "learning_rate": 2.6923076923076923e-06, |
| "loss": 1.8087, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.031746031746031744, |
| "grad_norm": 65.55188751220703, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 1.6339, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03571428571428571, |
| "grad_norm": 107.53528594970703, |
| "learning_rate": 3.4615384615384617e-06, |
| "loss": 1.5849, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.03968253968253968, |
| "grad_norm": 93.20413208007812, |
| "learning_rate": 3.846153846153847e-06, |
| "loss": 1.5984, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04365079365079365, |
| "grad_norm": 25.42405891418457, |
| "learning_rate": 4.230769230769231e-06, |
| "loss": 1.4529, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.047619047619047616, |
| "grad_norm": 99.05838775634766, |
| "learning_rate": 4.615384615384616e-06, |
| "loss": 1.3944, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.051587301587301584, |
| "grad_norm": 16.312206268310547, |
| "learning_rate": 5e-06, |
| "loss": 1.3128, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05555555555555555, |
| "grad_norm": 14.814692497253418, |
| "learning_rate": 5.384615384615385e-06, |
| "loss": 1.3119, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.05952380952380952, |
| "grad_norm": 20.357574462890625, |
| "learning_rate": 5.769230769230769e-06, |
| "loss": 1.1976, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06349206349206349, |
| "grad_norm": 15.262699127197266, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 1.1386, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06746031746031746, |
| "grad_norm": 14.989243507385254, |
| "learning_rate": 6.538461538461539e-06, |
| "loss": 1.0688, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.07142857142857142, |
| "grad_norm": 9.59119701385498, |
| "learning_rate": 6.923076923076923e-06, |
| "loss": 1.0538, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.07539682539682539, |
| "grad_norm": 8.450692176818848, |
| "learning_rate": 7.307692307692308e-06, |
| "loss": 0.9889, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.07936507936507936, |
| "grad_norm": 9.870237350463867, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 0.9955, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.08333333333333333, |
| "grad_norm": 5.6950788497924805, |
| "learning_rate": 8.076923076923077e-06, |
| "loss": 0.9486, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.0873015873015873, |
| "grad_norm": 7.79123067855835, |
| "learning_rate": 8.461538461538462e-06, |
| "loss": 1.0479, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09126984126984126, |
| "grad_norm": 4.994154453277588, |
| "learning_rate": 8.846153846153847e-06, |
| "loss": 1.1122, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09523809523809523, |
| "grad_norm": 4.5346879959106445, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 1.0289, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.0992063492063492, |
| "grad_norm": 4.251474380493164, |
| "learning_rate": 9.615384615384616e-06, |
| "loss": 0.9206, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10317460317460317, |
| "grad_norm": 2.8367598056793213, |
| "learning_rate": 1e-05, |
| "loss": 0.6243, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.10714285714285714, |
| "grad_norm": 2.6357839107513428, |
| "learning_rate": 9.999892010284378e-06, |
| "loss": 0.6254, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.1111111111111111, |
| "grad_norm": 2.7344064712524414, |
| "learning_rate": 9.999568045802216e-06, |
| "loss": 0.6485, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11507936507936507, |
| "grad_norm": 2.1355581283569336, |
| "learning_rate": 9.999028120547456e-06, |
| "loss": 0.5566, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.11904761904761904, |
| "grad_norm": 2.1411855220794678, |
| "learning_rate": 9.99827225784264e-06, |
| "loss": 0.6042, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12301587301587301, |
| "grad_norm": 2.0088932514190674, |
| "learning_rate": 9.99730049033793e-06, |
| "loss": 0.5592, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.12698412698412698, |
| "grad_norm": 2.1030216217041016, |
| "learning_rate": 9.996112860009689e-06, |
| "loss": 0.5772, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13095238095238096, |
| "grad_norm": 2.0733652114868164, |
| "learning_rate": 9.994709418158652e-06, |
| "loss": 0.5338, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1349206349206349, |
| "grad_norm": 2.132509469985962, |
| "learning_rate": 9.993090225407743e-06, |
| "loss": 0.6218, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.1388888888888889, |
| "grad_norm": 1.952929139137268, |
| "learning_rate": 9.991255351699422e-06, |
| "loss": 0.5433, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.14285714285714285, |
| "grad_norm": 2.200437307357788, |
| "learning_rate": 9.98920487629269e-06, |
| "loss": 0.6256, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.14682539682539683, |
| "grad_norm": 2.1493747234344482, |
| "learning_rate": 9.986938887759643e-06, |
| "loss": 0.6211, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.15079365079365079, |
| "grad_norm": 2.1535778045654297, |
| "learning_rate": 9.98445748398167e-06, |
| "loss": 0.6083, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.15476190476190477, |
| "grad_norm": 2.30021071434021, |
| "learning_rate": 9.981760772145201e-06, |
| "loss": 0.6339, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.15873015873015872, |
| "grad_norm": 2.382420301437378, |
| "learning_rate": 9.978848868737099e-06, |
| "loss": 0.6369, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1626984126984127, |
| "grad_norm": 2.483940362930298, |
| "learning_rate": 9.975721899539607e-06, |
| "loss": 0.6274, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.16666666666666666, |
| "grad_norm": 2.7135977745056152, |
| "learning_rate": 9.972379999624935e-06, |
| "loss": 0.6848, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17063492063492064, |
| "grad_norm": 2.600355625152588, |
| "learning_rate": 9.968823313349412e-06, |
| "loss": 0.5952, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.1746031746031746, |
| "grad_norm": 2.6748218536376953, |
| "learning_rate": 9.96505199434725e-06, |
| "loss": 0.6108, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.17857142857142858, |
| "grad_norm": 2.8031914234161377, |
| "learning_rate": 9.961066205523917e-06, |
| "loss": 0.6465, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18253968253968253, |
| "grad_norm": 2.8012661933898926, |
| "learning_rate": 9.956866119049095e-06, |
| "loss": 0.6535, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.1865079365079365, |
| "grad_norm": 2.715590715408325, |
| "learning_rate": 9.952451916349242e-06, |
| "loss": 0.5868, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.19047619047619047, |
| "grad_norm": 2.4733760356903076, |
| "learning_rate": 9.947823788099754e-06, |
| "loss": 0.5289, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19444444444444445, |
| "grad_norm": 2.615929126739502, |
| "learning_rate": 9.942981934216731e-06, |
| "loss": 0.5471, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.1984126984126984, |
| "grad_norm": 3.093095302581787, |
| "learning_rate": 9.937926563848345e-06, |
| "loss": 0.5972, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20238095238095238, |
| "grad_norm": 2.6352145671844482, |
| "learning_rate": 9.9326578953658e-06, |
| "loss": 0.4981, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.20634920634920634, |
| "grad_norm": 2.5553276538848877, |
| "learning_rate": 9.9271761563539e-06, |
| "loss": 0.4797, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.21031746031746032, |
| "grad_norm": 2.4648044109344482, |
| "learning_rate": 9.921481583601218e-06, |
| "loss": 0.5402, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.21428571428571427, |
| "grad_norm": 2.549119472503662, |
| "learning_rate": 9.915574423089872e-06, |
| "loss": 0.4704, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.21825396825396826, |
| "grad_norm": 2.5682594776153564, |
| "learning_rate": 9.909454929984894e-06, |
| "loss": 0.4773, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 2.702852487564087, |
| "learning_rate": 9.903123368623216e-06, |
| "loss": 0.457, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.2261904761904762, |
| "grad_norm": 2.838491916656494, |
| "learning_rate": 9.896580012502238e-06, |
| "loss": 0.5493, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23015873015873015, |
| "grad_norm": 2.9078750610351562, |
| "learning_rate": 9.889825144268029e-06, |
| "loss": 0.4775, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.23412698412698413, |
| "grad_norm": 2.8761308193206787, |
| "learning_rate": 9.882859055703109e-06, |
| "loss": 0.457, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.23809523809523808, |
| "grad_norm": 2.8357808589935303, |
| "learning_rate": 9.875682047713847e-06, |
| "loss": 0.4793, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.24206349206349206, |
| "grad_norm": 3.1367876529693604, |
| "learning_rate": 9.868294430317464e-06, |
| "loss": 0.5198, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.24603174603174602, |
| "grad_norm": 3.1271591186523438, |
| "learning_rate": 9.860696522628638e-06, |
| "loss": 0.5188, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.2067031860351562, |
| "learning_rate": 9.852888652845729e-06, |
| "loss": 0.5269, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.25396825396825395, |
| "grad_norm": 3.4634389877319336, |
| "learning_rate": 9.84487115823659e-06, |
| "loss": 0.5052, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.25793650793650796, |
| "grad_norm": 3.568326711654663, |
| "learning_rate": 9.836644385124006e-06, |
| "loss": 0.4946, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.2619047619047619, |
| "grad_norm": 3.6015052795410156, |
| "learning_rate": 9.828208688870736e-06, |
| "loss": 0.5278, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.26587301587301587, |
| "grad_norm": 3.848020553588867, |
| "learning_rate": 9.81956443386415e-06, |
| "loss": 0.5644, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.2698412698412698, |
| "grad_norm": 3.9349775314331055, |
| "learning_rate": 9.810711993500506e-06, |
| "loss": 0.4741, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.27380952380952384, |
| "grad_norm": 4.226278305053711, |
| "learning_rate": 9.801651750168815e-06, |
| "loss": 0.4848, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.2777777777777778, |
| "grad_norm": 4.300480365753174, |
| "learning_rate": 9.792384095234312e-06, |
| "loss": 0.4714, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.28174603174603174, |
| "grad_norm": 4.545813083648682, |
| "learning_rate": 9.782909429021568e-06, |
| "loss": 0.4671, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2857142857142857, |
| "grad_norm": 3.9301016330718994, |
| "learning_rate": 9.773228160797187e-06, |
| "loss": 0.4722, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2896825396825397, |
| "grad_norm": 3.616159439086914, |
| "learning_rate": 9.76334070875213e-06, |
| "loss": 0.4701, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.29365079365079366, |
| "grad_norm": 3.7994768619537354, |
| "learning_rate": 9.753247499983649e-06, |
| "loss": 0.4218, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.2976190476190476, |
| "grad_norm": 4.179572105407715, |
| "learning_rate": 9.742948970476845e-06, |
| "loss": 0.4193, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.30158730158730157, |
| "grad_norm": 3.17275071144104, |
| "learning_rate": 9.732445565085823e-06, |
| "loss": 0.3507, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3055555555555556, |
| "grad_norm": 3.49688720703125, |
| "learning_rate": 9.721737737514492e-06, |
| "loss": 0.3275, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.30952380952380953, |
| "grad_norm": 3.5590732097625732, |
| "learning_rate": 9.71082595029695e-06, |
| "loss": 0.3758, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.3134920634920635, |
| "grad_norm": 3.6648528575897217, |
| "learning_rate": 9.699710674777519e-06, |
| "loss": 0.3371, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.31746031746031744, |
| "grad_norm": 3.7211573123931885, |
| "learning_rate": 9.688392391090374e-06, |
| "loss": 0.3903, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.32142857142857145, |
| "grad_norm": 3.65028715133667, |
| "learning_rate": 9.676871588138812e-06, |
| "loss": 0.3434, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3253968253968254, |
| "grad_norm": 3.7291440963745117, |
| "learning_rate": 9.665148763574123e-06, |
| "loss": 0.3484, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.32936507936507936, |
| "grad_norm": 3.76412296295166, |
| "learning_rate": 9.653224423774107e-06, |
| "loss": 0.3287, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 3.787276029586792, |
| "learning_rate": 9.64109908382119e-06, |
| "loss": 0.3141, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.3373015873015873, |
| "grad_norm": 3.8091228008270264, |
| "learning_rate": 9.628773267480177e-06, |
| "loss": 0.3818, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.3412698412698413, |
| "grad_norm": 3.8210535049438477, |
| "learning_rate": 9.616247507175624e-06, |
| "loss": 0.3543, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.34523809523809523, |
| "grad_norm": 3.869622230529785, |
| "learning_rate": 9.603522343968852e-06, |
| "loss": 0.3727, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.3492063492063492, |
| "grad_norm": 3.8349802494049072, |
| "learning_rate": 9.590598327534563e-06, |
| "loss": 0.3453, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.3531746031746032, |
| "grad_norm": 3.91221022605896, |
| "learning_rate": 9.577476016137105e-06, |
| "loss": 0.3196, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.35714285714285715, |
| "grad_norm": 3.895413398742676, |
| "learning_rate": 9.56415597660634e-06, |
| "loss": 0.3514, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.3611111111111111, |
| "grad_norm": 4.032989501953125, |
| "learning_rate": 9.550638784313187e-06, |
| "loss": 0.3396, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.36507936507936506, |
| "grad_norm": 4.055191516876221, |
| "learning_rate": 9.536925023144742e-06, |
| "loss": 0.3044, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.36904761904761907, |
| "grad_norm": 4.113070487976074, |
| "learning_rate": 9.523015285479076e-06, |
| "loss": 0.3536, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.373015873015873, |
| "grad_norm": 4.107305526733398, |
| "learning_rate": 9.508910172159635e-06, |
| "loss": 0.3166, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.376984126984127, |
| "grad_norm": 3.969236135482788, |
| "learning_rate": 9.494610292469287e-06, |
| "loss": 0.3001, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.38095238095238093, |
| "grad_norm": 4.075791358947754, |
| "learning_rate": 9.48011626410401e-06, |
| "loss": 0.3347, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.38492063492063494, |
| "grad_norm": 3.4200732707977295, |
| "learning_rate": 9.465428713146206e-06, |
| "loss": 0.3097, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3888888888888889, |
| "grad_norm": 3.148775339126587, |
| "learning_rate": 9.450548274037652e-06, |
| "loss": 0.3032, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.39285714285714285, |
| "grad_norm": 3.1194303035736084, |
| "learning_rate": 9.435475589552107e-06, |
| "loss": 0.2867, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.3968253968253968, |
| "grad_norm": 3.5353922843933105, |
| "learning_rate": 9.420211310767534e-06, |
| "loss": 0.3074, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4007936507936508, |
| "grad_norm": 2.467332363128662, |
| "learning_rate": 9.40475609703798e-06, |
| "loss": 0.2468, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.40476190476190477, |
| "grad_norm": 2.804072141647339, |
| "learning_rate": 9.389110615965102e-06, |
| "loss": 0.2623, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4087301587301587, |
| "grad_norm": 2.6840615272521973, |
| "learning_rate": 9.37327554336932e-06, |
| "loss": 0.1996, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.4126984126984127, |
| "grad_norm": 2.739384412765503, |
| "learning_rate": 9.35725156326063e-06, |
| "loss": 0.2387, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4166666666666667, |
| "grad_norm": 2.587768316268921, |
| "learning_rate": 9.341039367809056e-06, |
| "loss": 0.2555, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.42063492063492064, |
| "grad_norm": 2.5864408016204834, |
| "learning_rate": 9.324639657314742e-06, |
| "loss": 0.2634, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.4246031746031746, |
| "grad_norm": 2.4532299041748047, |
| "learning_rate": 9.308053140177722e-06, |
| "loss": 0.2237, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.42857142857142855, |
| "grad_norm": 2.4923110008239746, |
| "learning_rate": 9.291280532867301e-06, |
| "loss": 0.2183, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.43253968253968256, |
| "grad_norm": 2.3666458129882812, |
| "learning_rate": 9.27432255989112e-06, |
| "loss": 0.2509, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.4365079365079365, |
| "grad_norm": 2.2944254875183105, |
| "learning_rate": 9.257179953763846e-06, |
| "loss": 0.2376, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.44047619047619047, |
| "grad_norm": 2.174125909805298, |
| "learning_rate": 9.239853454975548e-06, |
| "loss": 0.2304, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 2.0383496284484863, |
| "learning_rate": 9.222343811959694e-06, |
| "loss": 0.2482, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.44841269841269843, |
| "grad_norm": 2.0460424423217773, |
| "learning_rate": 9.204651781060832e-06, |
| "loss": 0.1998, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.4523809523809524, |
| "grad_norm": 1.945266842842102, |
| "learning_rate": 9.186778126501916e-06, |
| "loss": 0.2179, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.45634920634920634, |
| "grad_norm": 1.8012604713439941, |
| "learning_rate": 9.168723620351298e-06, |
| "loss": 0.2255, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.4603174603174603, |
| "grad_norm": 1.6935973167419434, |
| "learning_rate": 9.150489042489368e-06, |
| "loss": 0.1993, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.4642857142857143, |
| "grad_norm": 1.698236346244812, |
| "learning_rate": 9.13207518057488e-06, |
| "loss": 0.229, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.46825396825396826, |
| "grad_norm": 1.4979168176651, |
| "learning_rate": 9.113482830010918e-06, |
| "loss": 0.1925, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.4722222222222222, |
| "grad_norm": 1.5552384853363037, |
| "learning_rate": 9.094712793910541e-06, |
| "loss": 0.2142, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.47619047619047616, |
| "grad_norm": 1.4717791080474854, |
| "learning_rate": 9.075765883062093e-06, |
| "loss": 0.2088, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.4801587301587302, |
| "grad_norm": 1.4267648458480835, |
| "learning_rate": 9.056642915894182e-06, |
| "loss": 0.207, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.48412698412698413, |
| "grad_norm": 1.3177474737167358, |
| "learning_rate": 9.037344718440321e-06, |
| "loss": 0.2254, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.4880952380952381, |
| "grad_norm": 1.243403673171997, |
| "learning_rate": 9.017872124303255e-06, |
| "loss": 0.2271, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.49206349206349204, |
| "grad_norm": 1.3508583307266235, |
| "learning_rate": 8.99822597461894e-06, |
| "loss": 0.183, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.49603174603174605, |
| "grad_norm": 1.3412503004074097, |
| "learning_rate": 8.978407118020226e-06, |
| "loss": 0.1905, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 1.2498844861984253, |
| "learning_rate": 8.958416410600188e-06, |
| "loss": 0.2132, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.503968253968254, |
| "grad_norm": 1.0604239702224731, |
| "learning_rate": 8.938254715875152e-06, |
| "loss": 0.1791, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5079365079365079, |
| "grad_norm": 1.0373965501785278, |
| "learning_rate": 8.917922904747385e-06, |
| "loss": 0.1659, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5119047619047619, |
| "grad_norm": 1.2815697193145752, |
| "learning_rate": 8.897421855467491e-06, |
| "loss": 0.2173, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5158730158730159, |
| "grad_norm": 1.3789927959442139, |
| "learning_rate": 8.876752453596462e-06, |
| "loss": 0.2577, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5198412698412699, |
| "grad_norm": 1.2774096727371216, |
| "learning_rate": 8.85591559196743e-06, |
| "loss": 0.2371, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5238095238095238, |
| "grad_norm": 1.1230323314666748, |
| "learning_rate": 8.834912170647102e-06, |
| "loss": 0.2083, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5277777777777778, |
| "grad_norm": 1.0352874994277954, |
| "learning_rate": 8.813743096896872e-06, |
| "loss": 0.1897, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5317460317460317, |
| "grad_norm": 1.0328984260559082, |
| "learning_rate": 8.792409285133644e-06, |
| "loss": 0.1948, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5357142857142857, |
| "grad_norm": 1.0041017532348633, |
| "learning_rate": 8.770911656890325e-06, |
| "loss": 0.2194, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5396825396825397, |
| "grad_norm": 1.1615849733352661, |
| "learning_rate": 8.749251140776016e-06, |
| "loss": 0.2055, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5436507936507936, |
| "grad_norm": 1.0535426139831543, |
| "learning_rate": 8.727428672435911e-06, |
| "loss": 0.2035, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5476190476190477, |
| "grad_norm": 1.1168123483657837, |
| "learning_rate": 8.705445194510868e-06, |
| "loss": 0.1957, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5515873015873016, |
| "grad_norm": 1.1942194700241089, |
| "learning_rate": 8.6833016565967e-06, |
| "loss": 0.2302, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.5555555555555556, |
| "grad_norm": 1.1593937873840332, |
| "learning_rate": 8.660999015203152e-06, |
| "loss": 0.1917, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5595238095238095, |
| "grad_norm": 1.19351065158844, |
| "learning_rate": 8.638538233712581e-06, |
| "loss": 0.2032, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5634920634920635, |
| "grad_norm": 1.2246677875518799, |
| "learning_rate": 8.615920282338355e-06, |
| "loss": 0.2091, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.5674603174603174, |
| "grad_norm": 1.3597017526626587, |
| "learning_rate": 8.593146138082925e-06, |
| "loss": 0.2555, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 1.2056429386138916, |
| "learning_rate": 8.570216784695637e-06, |
| "loss": 0.1968, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5753968253968254, |
| "grad_norm": 1.2337775230407715, |
| "learning_rate": 8.54713321263023e-06, |
| "loss": 0.1903, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5793650793650794, |
| "grad_norm": 1.2018787860870361, |
| "learning_rate": 8.52389641900206e-06, |
| "loss": 0.1913, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5833333333333334, |
| "grad_norm": 1.3740088939666748, |
| "learning_rate": 8.50050740754502e-06, |
| "loss": 0.2399, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.5873015873015873, |
| "grad_norm": 1.295432209968567, |
| "learning_rate": 8.476967188568187e-06, |
| "loss": 0.1896, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.5912698412698413, |
| "grad_norm": 1.19464910030365, |
| "learning_rate": 8.453276778912186e-06, |
| "loss": 0.2082, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.5952380952380952, |
| "grad_norm": 1.139990210533142, |
| "learning_rate": 8.429437201905254e-06, |
| "loss": 0.1717, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.5992063492063492, |
| "grad_norm": 1.0607414245605469, |
| "learning_rate": 8.405449487319049e-06, |
| "loss": 0.1722, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6031746031746031, |
| "grad_norm": 1.1491377353668213, |
| "learning_rate": 8.38131467132416e-06, |
| "loss": 0.1612, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6071428571428571, |
| "grad_norm": 1.1927894353866577, |
| "learning_rate": 8.357033796445356e-06, |
| "loss": 0.1813, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6111111111111112, |
| "grad_norm": 1.134826421737671, |
| "learning_rate": 8.332607911516545e-06, |
| "loss": 0.1515, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6150793650793651, |
| "grad_norm": 1.366958737373352, |
| "learning_rate": 8.308038071635475e-06, |
| "loss": 0.2749, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6190476190476191, |
| "grad_norm": 1.1841325759887695, |
| "learning_rate": 8.283325338118154e-06, |
| "loss": 0.1741, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.623015873015873, |
| "grad_norm": 1.2794548273086548, |
| "learning_rate": 8.258470778453005e-06, |
| "loss": 0.2085, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.626984126984127, |
| "grad_norm": 1.3475635051727295, |
| "learning_rate": 8.233475466254766e-06, |
| "loss": 0.1747, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6309523809523809, |
| "grad_norm": 1.4117634296417236, |
| "learning_rate": 8.208340481218094e-06, |
| "loss": 0.1848, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.6349206349206349, |
| "grad_norm": 1.281284213066101, |
| "learning_rate": 8.183066909070946e-06, |
| "loss": 0.1632, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6388888888888888, |
| "grad_norm": 1.350193977355957, |
| "learning_rate": 8.15765584152767e-06, |
| "loss": 0.1752, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.6428571428571429, |
| "grad_norm": 1.2969999313354492, |
| "learning_rate": 8.132108376241849e-06, |
| "loss": 0.1508, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.6468253968253969, |
| "grad_norm": 1.3409817218780518, |
| "learning_rate": 8.106425616758886e-06, |
| "loss": 0.2002, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.6507936507936508, |
| "grad_norm": 1.4258953332901, |
| "learning_rate": 8.08060867246834e-06, |
| "loss": 0.2164, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6547619047619048, |
| "grad_norm": 1.4764114618301392, |
| "learning_rate": 8.054658658555998e-06, |
| "loss": 0.2046, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6587301587301587, |
| "grad_norm": 1.3869717121124268, |
| "learning_rate": 8.028576695955711e-06, |
| "loss": 0.2042, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6626984126984127, |
| "grad_norm": 1.4131327867507935, |
| "learning_rate": 8.002363911300966e-06, |
| "loss": 0.1778, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 1.467511534690857, |
| "learning_rate": 7.976021436876232e-06, |
| "loss": 0.1637, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6706349206349206, |
| "grad_norm": 1.5328935384750366, |
| "learning_rate": 7.949550410568033e-06, |
| "loss": 0.1737, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6746031746031746, |
| "grad_norm": 1.4699673652648926, |
| "learning_rate": 7.92295197581581e-06, |
| "loss": 0.1521, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6785714285714286, |
| "grad_norm": 1.5993800163269043, |
| "learning_rate": 7.89622728156253e-06, |
| "loss": 0.1663, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.6825396825396826, |
| "grad_norm": 1.4890751838684082, |
| "learning_rate": 7.869377482205042e-06, |
| "loss": 0.2153, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.6865079365079365, |
| "grad_norm": 1.1941518783569336, |
| "learning_rate": 7.842403737544226e-06, |
| "loss": 0.1638, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.6904761904761905, |
| "grad_norm": 1.2471840381622314, |
| "learning_rate": 7.815307212734888e-06, |
| "loss": 0.1356, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.6944444444444444, |
| "grad_norm": 1.277548909187317, |
| "learning_rate": 7.788089078235432e-06, |
| "loss": 0.1715, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.6984126984126984, |
| "grad_norm": 1.2489705085754395, |
| "learning_rate": 7.7607505097573e-06, |
| "loss": 0.1787, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7023809523809523, |
| "grad_norm": 1.1460860967636108, |
| "learning_rate": 7.733292688214182e-06, |
| "loss": 0.1403, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.7063492063492064, |
| "grad_norm": 1.1960135698318481, |
| "learning_rate": 7.705716799671019e-06, |
| "loss": 0.1723, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7103174603174603, |
| "grad_norm": 1.2745386362075806, |
| "learning_rate": 7.678024035292757e-06, |
| "loss": 0.1464, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7142857142857143, |
| "grad_norm": 1.1538077592849731, |
| "learning_rate": 7.650215591292888e-06, |
| "loss": 0.1363, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7182539682539683, |
| "grad_norm": 1.1868458986282349, |
| "learning_rate": 7.622292668881805e-06, |
| "loss": 0.1686, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7222222222222222, |
| "grad_norm": 1.137481451034546, |
| "learning_rate": 7.594256474214883e-06, |
| "loss": 0.1437, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.7261904761904762, |
| "grad_norm": 1.206611156463623, |
| "learning_rate": 7.566108218340399e-06, |
| "loss": 0.1287, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7301587301587301, |
| "grad_norm": 1.2275463342666626, |
| "learning_rate": 7.537849117147212e-06, |
| "loss": 0.1682, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7341269841269841, |
| "grad_norm": 1.156783103942871, |
| "learning_rate": 7.509480391312243e-06, |
| "loss": 0.114, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7380952380952381, |
| "grad_norm": 1.312717318534851, |
| "learning_rate": 7.481003266247745e-06, |
| "loss": 0.1836, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7420634920634921, |
| "grad_norm": 1.3326442241668701, |
| "learning_rate": 7.452418972048372e-06, |
| "loss": 0.1698, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.746031746031746, |
| "grad_norm": 1.1632299423217773, |
| "learning_rate": 7.4237287434380485e-06, |
| "loss": 0.1727, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 1.304856777191162, |
| "learning_rate": 7.394933819716625e-06, |
| "loss": 0.1814, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.753968253968254, |
| "grad_norm": 1.054636001586914, |
| "learning_rate": 7.366035444706346e-06, |
| "loss": 0.1315, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7579365079365079, |
| "grad_norm": 1.4124561548233032, |
| "learning_rate": 7.337034866698138e-06, |
| "loss": 0.1527, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7619047619047619, |
| "grad_norm": 1.2737467288970947, |
| "learning_rate": 7.307933338397667e-06, |
| "loss": 0.2051, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7658730158730159, |
| "grad_norm": 1.2019504308700562, |
| "learning_rate": 7.278732116871239e-06, |
| "loss": 0.1599, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.7698412698412699, |
| "grad_norm": 1.1740680932998657, |
| "learning_rate": 7.249432463491498e-06, |
| "loss": 0.1309, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.7738095238095238, |
| "grad_norm": 1.3344628810882568, |
| "learning_rate": 7.220035643882938e-06, |
| "loss": 0.1452, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7777777777777778, |
| "grad_norm": 1.1748477220535278, |
| "learning_rate": 7.190542927867234e-06, |
| "loss": 0.1264, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.7817460317460317, |
| "grad_norm": 1.344280481338501, |
| "learning_rate": 7.160955589408395e-06, |
| "loss": 0.2139, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.7857142857142857, |
| "grad_norm": 1.1319468021392822, |
| "learning_rate": 7.131274906557725e-06, |
| "loss": 0.1705, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.7896825396825397, |
| "grad_norm": 1.0703827142715454, |
| "learning_rate": 7.101502161398626e-06, |
| "loss": 0.139, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.7936507936507936, |
| "grad_norm": 1.0375055074691772, |
| "learning_rate": 7.0716386399912075e-06, |
| "loss": 0.1329, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.7976190476190477, |
| "grad_norm": 1.0811136960983276, |
| "learning_rate": 7.041685632316748e-06, |
| "loss": 0.1423, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8015873015873016, |
| "grad_norm": 0.9610537886619568, |
| "learning_rate": 7.0116444322219575e-06, |
| "loss": 0.1137, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8055555555555556, |
| "grad_norm": 1.199352741241455, |
| "learning_rate": 6.981516337363099e-06, |
| "loss": 0.1073, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8095238095238095, |
| "grad_norm": 0.952040433883667, |
| "learning_rate": 6.95130264914993e-06, |
| "loss": 0.123, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.8134920634920635, |
| "grad_norm": 0.9396257400512695, |
| "learning_rate": 6.9210046726894885e-06, |
| "loss": 0.1092, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8174603174603174, |
| "grad_norm": 1.0251622200012207, |
| "learning_rate": 6.890623716729724e-06, |
| "loss": 0.1484, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8214285714285714, |
| "grad_norm": 1.2053864002227783, |
| "learning_rate": 6.860161093602949e-06, |
| "loss": 0.1455, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8253968253968254, |
| "grad_norm": 0.9859071373939514, |
| "learning_rate": 6.829618119169169e-06, |
| "loss": 0.1455, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8293650793650794, |
| "grad_norm": 1.0955175161361694, |
| "learning_rate": 6.798996112759233e-06, |
| "loss": 0.1515, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.8333333333333334, |
| "grad_norm": 0.956035315990448, |
| "learning_rate": 6.768296397117848e-06, |
| "loss": 0.1657, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8373015873015873, |
| "grad_norm": 1.0417743921279907, |
| "learning_rate": 6.737520298346438e-06, |
| "loss": 0.1245, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8412698412698413, |
| "grad_norm": 0.9253073930740356, |
| "learning_rate": 6.706669145845863e-06, |
| "loss": 0.1117, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.8452380952380952, |
| "grad_norm": 1.183728814125061, |
| "learning_rate": 6.6757442722590015e-06, |
| "loss": 0.1802, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8492063492063492, |
| "grad_norm": 1.0416171550750732, |
| "learning_rate": 6.6447470134131685e-06, |
| "loss": 0.1274, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8531746031746031, |
| "grad_norm": 0.9164001941680908, |
| "learning_rate": 6.613678708262439e-06, |
| "loss": 0.1032, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 1.1456245183944702, |
| "learning_rate": 6.5825406988297815e-06, |
| "loss": 0.1471, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8611111111111112, |
| "grad_norm": 1.0662381649017334, |
| "learning_rate": 6.551334330149114e-06, |
| "loss": 0.1478, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8650793650793651, |
| "grad_norm": 0.9697524905204773, |
| "learning_rate": 6.520060950207186e-06, |
| "loss": 0.1473, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8690476190476191, |
| "grad_norm": 1.0304609537124634, |
| "learning_rate": 6.488721909885359e-06, |
| "loss": 0.1768, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.873015873015873, |
| "grad_norm": 0.8136041164398193, |
| "learning_rate": 6.457318562901257e-06, |
| "loss": 0.097, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.876984126984127, |
| "grad_norm": 1.09493088722229, |
| "learning_rate": 6.425852265750282e-06, |
| "loss": 0.1575, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.8809523809523809, |
| "grad_norm": 1.1294385194778442, |
| "learning_rate": 6.394324377647028e-06, |
| "loss": 0.161, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.8849206349206349, |
| "grad_norm": 1.1903278827667236, |
| "learning_rate": 6.362736260466561e-06, |
| "loss": 0.1358, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 1.0960198640823364, |
| "learning_rate": 6.331089278685599e-06, |
| "loss": 0.1835, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.8928571428571429, |
| "grad_norm": 1.0908770561218262, |
| "learning_rate": 6.299384799323568e-06, |
| "loss": 0.1171, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.8968253968253969, |
| "grad_norm": 0.971628725528717, |
| "learning_rate": 6.267624191883551e-06, |
| "loss": 0.1335, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.9007936507936508, |
| "grad_norm": 0.8757287263870239, |
| "learning_rate": 6.235808828293135e-06, |
| "loss": 0.1229, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9047619047619048, |
| "grad_norm": 0.989013135433197, |
| "learning_rate": 6.203940082845144e-06, |
| "loss": 0.1633, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9087301587301587, |
| "grad_norm": 0.7591527104377747, |
| "learning_rate": 6.172019332138285e-06, |
| "loss": 0.1041, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.9126984126984127, |
| "grad_norm": 0.8997424244880676, |
| "learning_rate": 6.140047955017672e-06, |
| "loss": 0.1541, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9166666666666666, |
| "grad_norm": 0.7937338352203369, |
| "learning_rate": 6.108027332515276e-06, |
| "loss": 0.0954, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.9206349206349206, |
| "grad_norm": 0.9379100203514099, |
| "learning_rate": 6.075958847790262e-06, |
| "loss": 0.1284, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9246031746031746, |
| "grad_norm": 0.8153742551803589, |
| "learning_rate": 6.043843886069251e-06, |
| "loss": 0.119, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.9285714285714286, |
| "grad_norm": 0.9349873661994934, |
| "learning_rate": 6.011683834586474e-06, |
| "loss": 0.1286, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9325396825396826, |
| "grad_norm": 0.9875138998031616, |
| "learning_rate": 5.979480082523858e-06, |
| "loss": 0.1601, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9365079365079365, |
| "grad_norm": 0.9074680209159851, |
| "learning_rate": 5.947234020951015e-06, |
| "loss": 0.133, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.9404761904761905, |
| "grad_norm": 0.9646974802017212, |
| "learning_rate": 5.914947042765149e-06, |
| "loss": 0.1515, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.9444444444444444, |
| "grad_norm": 0.9159960746765137, |
| "learning_rate": 5.882620542630901e-06, |
| "loss": 0.1399, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9484126984126984, |
| "grad_norm": 0.9413661956787109, |
| "learning_rate": 5.850255916920093e-06, |
| "loss": 0.1329, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9523809523809523, |
| "grad_norm": 0.8890795707702637, |
| "learning_rate": 5.817854563651415e-06, |
| "loss": 0.1414, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9563492063492064, |
| "grad_norm": 0.9007329940795898, |
| "learning_rate": 5.785417882430035e-06, |
| "loss": 0.1165, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.9603174603174603, |
| "grad_norm": 1.0007832050323486, |
| "learning_rate": 5.752947274387147e-06, |
| "loss": 0.131, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9642857142857143, |
| "grad_norm": 0.8886591196060181, |
| "learning_rate": 5.720444142119445e-06, |
| "loss": 0.1492, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9682539682539683, |
| "grad_norm": 0.8927666544914246, |
| "learning_rate": 5.687909889628529e-06, |
| "loss": 0.1153, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9722222222222222, |
| "grad_norm": 0.9265084862709045, |
| "learning_rate": 5.6553459222602714e-06, |
| "loss": 0.1504, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9761904761904762, |
| "grad_norm": 0.8157972693443298, |
| "learning_rate": 5.622753646644102e-06, |
| "loss": 0.0995, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.9801587301587301, |
| "grad_norm": 0.9079906344413757, |
| "learning_rate": 5.59013447063225e-06, |
| "loss": 0.1582, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.9841269841269841, |
| "grad_norm": 0.9579424858093262, |
| "learning_rate": 5.557489803238934e-06, |
| "loss": 0.1196, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.9880952380952381, |
| "grad_norm": 0.8123847842216492, |
| "learning_rate": 5.524821054579491e-06, |
| "loss": 0.1295, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.9920634920634921, |
| "grad_norm": 0.8786181807518005, |
| "learning_rate": 5.492129635809473e-06, |
| "loss": 0.1142, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.996031746031746, |
| "grad_norm": 0.9151430130004883, |
| "learning_rate": 5.459416959063688e-06, |
| "loss": 0.1337, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.9185081720352173, |
| "learning_rate": 5.426684437395196e-06, |
| "loss": 0.171, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.003968253968254, |
| "grad_norm": 0.8412165641784668, |
| "learning_rate": 5.393933484714284e-06, |
| "loss": 0.1082, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.007936507936508, |
| "grad_norm": 0.8413107991218567, |
| "learning_rate": 5.361165515727374e-06, |
| "loss": 0.1153, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0119047619047619, |
| "grad_norm": 0.8458530902862549, |
| "learning_rate": 5.328381945875933e-06, |
| "loss": 0.123, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0158730158730158, |
| "grad_norm": 0.9277226328849792, |
| "learning_rate": 5.295584191275308e-06, |
| "loss": 0.121, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.0198412698412698, |
| "grad_norm": 0.9794259667396545, |
| "learning_rate": 5.26277366865358e-06, |
| "loss": 0.1504, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.0238095238095237, |
| "grad_norm": 0.8696649670600891, |
| "learning_rate": 5.229951795290353e-06, |
| "loss": 0.1193, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.0277777777777777, |
| "grad_norm": 0.7507579326629639, |
| "learning_rate": 5.197119988955534e-06, |
| "loss": 0.08, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0317460317460316, |
| "grad_norm": 0.74959397315979, |
| "learning_rate": 5.164279667848094e-06, |
| "loss": 0.0953, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.0357142857142858, |
| "grad_norm": 0.7617223262786865, |
| "learning_rate": 5.131432250534809e-06, |
| "loss": 0.0734, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.0396825396825398, |
| "grad_norm": 0.9482263922691345, |
| "learning_rate": 5.0985791558889785e-06, |
| "loss": 0.1184, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.0436507936507937, |
| "grad_norm": 0.9822360277175903, |
| "learning_rate": 5.065721803029146e-06, |
| "loss": 0.1303, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.0476190476190477, |
| "grad_norm": 0.8670525550842285, |
| "learning_rate": 5.032861611257783e-06, |
| "loss": 0.0961, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.0515873015873016, |
| "grad_norm": 0.9051881432533264, |
| "learning_rate": 5e-06, |
| "loss": 0.1017, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0555555555555556, |
| "grad_norm": 0.9463195204734802, |
| "learning_rate": 4.967138388742218e-06, |
| "loss": 0.1128, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.0595238095238095, |
| "grad_norm": 0.837642252445221, |
| "learning_rate": 4.934278196970857e-06, |
| "loss": 0.1014, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0634920634920635, |
| "grad_norm": 0.9752260446548462, |
| "learning_rate": 4.9014208441110215e-06, |
| "loss": 0.132, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.0674603174603174, |
| "grad_norm": 0.7861972451210022, |
| "learning_rate": 4.868567749465192e-06, |
| "loss": 0.0865, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0714285714285714, |
| "grad_norm": 0.7748413681983948, |
| "learning_rate": 4.835720332151907e-06, |
| "loss": 0.0887, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.0753968253968254, |
| "grad_norm": 0.769105076789856, |
| "learning_rate": 4.802880011044467e-06, |
| "loss": 0.0913, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.0793650793650793, |
| "grad_norm": 0.801043689250946, |
| "learning_rate": 4.770048204709648e-06, |
| "loss": 0.0862, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.0833333333333333, |
| "grad_norm": 0.8220937252044678, |
| "learning_rate": 4.73722633134642e-06, |
| "loss": 0.0845, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.0873015873015872, |
| "grad_norm": 0.8764519095420837, |
| "learning_rate": 4.7044158087246926e-06, |
| "loss": 0.1076, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.0912698412698412, |
| "grad_norm": 0.8750497102737427, |
| "learning_rate": 4.67161805412407e-06, |
| "loss": 0.1001, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.0952380952380953, |
| "grad_norm": 0.8208699226379395, |
| "learning_rate": 4.6388344842726266e-06, |
| "loss": 0.0887, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.0992063492063493, |
| "grad_norm": 0.7672670483589172, |
| "learning_rate": 4.606066515285719e-06, |
| "loss": 0.1031, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1031746031746033, |
| "grad_norm": 0.7054154872894287, |
| "learning_rate": 4.573315562604804e-06, |
| "loss": 0.0852, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.1071428571428572, |
| "grad_norm": 0.8251184821128845, |
| "learning_rate": 4.540583040936313e-06, |
| "loss": 0.0944, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.1111111111111112, |
| "grad_norm": 0.7829627990722656, |
| "learning_rate": 4.5078703641905275e-06, |
| "loss": 0.0807, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.1150793650793651, |
| "grad_norm": 0.7606924772262573, |
| "learning_rate": 4.4751789454205105e-06, |
| "loss": 0.0866, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.119047619047619, |
| "grad_norm": 0.8723813891410828, |
| "learning_rate": 4.442510196761068e-06, |
| "loss": 0.0936, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.123015873015873, |
| "grad_norm": 0.7986534833908081, |
| "learning_rate": 4.409865529367751e-06, |
| "loss": 0.0856, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.126984126984127, |
| "grad_norm": 0.78840571641922, |
| "learning_rate": 4.377246353355899e-06, |
| "loss": 0.0824, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.130952380952381, |
| "grad_norm": 0.8160957098007202, |
| "learning_rate": 4.34465407773973e-06, |
| "loss": 0.0844, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.1349206349206349, |
| "grad_norm": 0.8608824014663696, |
| "learning_rate": 4.312090110371473e-06, |
| "loss": 0.1139, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.1388888888888888, |
| "grad_norm": 0.8378157019615173, |
| "learning_rate": 4.279555857880558e-06, |
| "loss": 0.0813, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 0.8858137130737305, |
| "learning_rate": 4.247052725612853e-06, |
| "loss": 0.0903, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.1468253968253967, |
| "grad_norm": 0.8838244676589966, |
| "learning_rate": 4.214582117569966e-06, |
| "loss": 0.1004, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1507936507936507, |
| "grad_norm": 0.888434648513794, |
| "learning_rate": 4.182145436348587e-06, |
| "loss": 0.1037, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.1547619047619047, |
| "grad_norm": 0.8349224328994751, |
| "learning_rate": 4.1497440830799084e-06, |
| "loss": 0.0849, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.1587301587301586, |
| "grad_norm": 0.8272682428359985, |
| "learning_rate": 4.1173794573691e-06, |
| "loss": 0.1053, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1626984126984128, |
| "grad_norm": 0.8138509392738342, |
| "learning_rate": 4.0850529572348505e-06, |
| "loss": 0.0857, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1666666666666667, |
| "grad_norm": 0.8172481060028076, |
| "learning_rate": 4.052765979048986e-06, |
| "loss": 0.0908, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.1706349206349207, |
| "grad_norm": 0.8132976293563843, |
| "learning_rate": 4.0205199174761435e-06, |
| "loss": 0.0848, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.1746031746031746, |
| "grad_norm": 0.8448895215988159, |
| "learning_rate": 3.988316165413528e-06, |
| "loss": 0.0854, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.1785714285714286, |
| "grad_norm": 0.8900222182273865, |
| "learning_rate": 3.956156113930752e-06, |
| "loss": 0.0841, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.1825396825396826, |
| "grad_norm": 0.7989954352378845, |
| "learning_rate": 3.924041152209739e-06, |
| "loss": 0.0984, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.1865079365079365, |
| "grad_norm": 0.894929826259613, |
| "learning_rate": 3.891972667484726e-06, |
| "loss": 0.0983, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.1904761904761905, |
| "grad_norm": 0.8010528087615967, |
| "learning_rate": 3.859952044982329e-06, |
| "loss": 0.1, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.1944444444444444, |
| "grad_norm": 0.7610393762588501, |
| "learning_rate": 3.827980667861716e-06, |
| "loss": 0.0982, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.1984126984126984, |
| "grad_norm": 0.7270869612693787, |
| "learning_rate": 3.7960599171548572e-06, |
| "loss": 0.0808, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.2023809523809523, |
| "grad_norm": 0.8192585110664368, |
| "learning_rate": 3.764191171706867e-06, |
| "loss": 0.0991, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2063492063492063, |
| "grad_norm": 0.7628523707389832, |
| "learning_rate": 3.732375808116451e-06, |
| "loss": 0.0795, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2103174603174602, |
| "grad_norm": 0.8737412095069885, |
| "learning_rate": 3.7006152006764336e-06, |
| "loss": 0.1045, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.2142857142857142, |
| "grad_norm": 0.8825508952140808, |
| "learning_rate": 3.6689107213144025e-06, |
| "loss": 0.0845, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2182539682539684, |
| "grad_norm": 0.8843568563461304, |
| "learning_rate": 3.6372637395334416e-06, |
| "loss": 0.1215, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.2222222222222223, |
| "grad_norm": 0.8430192470550537, |
| "learning_rate": 3.6056756223529734e-06, |
| "loss": 0.09, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.2261904761904763, |
| "grad_norm": 0.678902804851532, |
| "learning_rate": 3.574147734249719e-06, |
| "loss": 0.0749, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2301587301587302, |
| "grad_norm": 0.8882924318313599, |
| "learning_rate": 3.542681437098745e-06, |
| "loss": 0.1003, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2341269841269842, |
| "grad_norm": 0.9567738771438599, |
| "learning_rate": 3.5112780901146426e-06, |
| "loss": 0.1038, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2380952380952381, |
| "grad_norm": 0.8461977243423462, |
| "learning_rate": 3.479939049792817e-06, |
| "loss": 0.0962, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.242063492063492, |
| "grad_norm": 0.8283154368400574, |
| "learning_rate": 3.448665669850888e-06, |
| "loss": 0.0889, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.246031746031746, |
| "grad_norm": 0.9821956753730774, |
| "learning_rate": 3.4174593011702197e-06, |
| "loss": 0.1202, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.7908430695533752, |
| "learning_rate": 3.386321291737563e-06, |
| "loss": 0.0789, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.253968253968254, |
| "grad_norm": 0.9723663330078125, |
| "learning_rate": 3.3552529865868323e-06, |
| "loss": 0.1009, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.257936507936508, |
| "grad_norm": 0.7394662499427795, |
| "learning_rate": 3.3242557277410015e-06, |
| "loss": 0.0743, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.2619047619047619, |
| "grad_norm": 0.8046512603759766, |
| "learning_rate": 3.2933308541541365e-06, |
| "loss": 0.0986, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2658730158730158, |
| "grad_norm": 0.7483407855033875, |
| "learning_rate": 3.2624797016535626e-06, |
| "loss": 0.0798, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.2698412698412698, |
| "grad_norm": 1.0179373025894165, |
| "learning_rate": 3.2317036028821523e-06, |
| "loss": 0.1198, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.2738095238095237, |
| "grad_norm": 0.794193685054779, |
| "learning_rate": 3.201003887240768e-06, |
| "loss": 0.0876, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.2777777777777777, |
| "grad_norm": 0.8476602435112, |
| "learning_rate": 3.1703818808308327e-06, |
| "loss": 0.0746, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.2817460317460316, |
| "grad_norm": 0.9657560586929321, |
| "learning_rate": 3.1398389063970512e-06, |
| "loss": 0.0952, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.2857142857142856, |
| "grad_norm": 0.8030561208724976, |
| "learning_rate": 3.1093762832702775e-06, |
| "loss": 0.1035, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.2896825396825398, |
| "grad_norm": 0.8488226532936096, |
| "learning_rate": 3.0789953273105123e-06, |
| "loss": 0.0968, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.2936507936507937, |
| "grad_norm": 0.9824974536895752, |
| "learning_rate": 3.048697350850073e-06, |
| "loss": 0.1233, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.2976190476190477, |
| "grad_norm": 0.8766565918922424, |
| "learning_rate": 3.0184836626369034e-06, |
| "loss": 0.1042, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.3015873015873016, |
| "grad_norm": 0.7361365556716919, |
| "learning_rate": 2.988355567778043e-06, |
| "loss": 0.0844, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.3055555555555556, |
| "grad_norm": 0.7968645691871643, |
| "learning_rate": 2.9583143676832526e-06, |
| "loss": 0.096, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.3095238095238095, |
| "grad_norm": 0.7606014609336853, |
| "learning_rate": 2.9283613600087933e-06, |
| "loss": 0.0665, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3134920634920635, |
| "grad_norm": 0.7167039513587952, |
| "learning_rate": 2.8984978386013767e-06, |
| "loss": 0.0875, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3174603174603174, |
| "grad_norm": 0.7731678485870361, |
| "learning_rate": 2.8687250934422774e-06, |
| "loss": 0.0818, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.3214285714285714, |
| "grad_norm": 0.9659709930419922, |
| "learning_rate": 2.839044410591606e-06, |
| "loss": 0.1325, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3253968253968254, |
| "grad_norm": 0.8399143815040588, |
| "learning_rate": 2.809457072132766e-06, |
| "loss": 0.103, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.3293650793650793, |
| "grad_norm": 0.7453393936157227, |
| "learning_rate": 2.779964356117063e-06, |
| "loss": 0.0803, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.8334883451461792, |
| "learning_rate": 2.750567536508504e-06, |
| "loss": 0.0927, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3373015873015874, |
| "grad_norm": 0.8300994038581848, |
| "learning_rate": 2.7212678831287627e-06, |
| "loss": 0.0941, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.3412698412698414, |
| "grad_norm": 0.8167857527732849, |
| "learning_rate": 2.692066661602333e-06, |
| "loss": 0.0859, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3452380952380953, |
| "grad_norm": 0.8473389744758606, |
| "learning_rate": 2.662965133301862e-06, |
| "loss": 0.0951, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.3492063492063493, |
| "grad_norm": 0.8304094076156616, |
| "learning_rate": 2.633964555293654e-06, |
| "loss": 0.091, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.3531746031746033, |
| "grad_norm": 0.7690606713294983, |
| "learning_rate": 2.605066180283378e-06, |
| "loss": 0.0758, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3571428571428572, |
| "grad_norm": 0.8668590784072876, |
| "learning_rate": 2.576271256561953e-06, |
| "loss": 0.0879, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.3611111111111112, |
| "grad_norm": 0.8535332083702087, |
| "learning_rate": 2.5475810279516287e-06, |
| "loss": 0.0975, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.3650793650793651, |
| "grad_norm": 0.828132688999176, |
| "learning_rate": 2.5189967337522574e-06, |
| "loss": 0.0844, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.369047619047619, |
| "grad_norm": 0.9968429207801819, |
| "learning_rate": 2.49051960868776e-06, |
| "loss": 0.1099, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.373015873015873, |
| "grad_norm": 0.7960136532783508, |
| "learning_rate": 2.46215088285279e-06, |
| "loss": 0.0855, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.376984126984127, |
| "grad_norm": 0.7763605713844299, |
| "learning_rate": 2.433891781659603e-06, |
| "loss": 0.0733, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.380952380952381, |
| "grad_norm": 0.8548492789268494, |
| "learning_rate": 2.4057435257851173e-06, |
| "loss": 0.1079, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.3849206349206349, |
| "grad_norm": 0.7584509253501892, |
| "learning_rate": 2.377707331118196e-06, |
| "loss": 0.0945, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.3888888888888888, |
| "grad_norm": 0.6710481643676758, |
| "learning_rate": 2.349784408707112e-06, |
| "loss": 0.0846, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.3928571428571428, |
| "grad_norm": 0.7176137566566467, |
| "learning_rate": 2.3219759647072467e-06, |
| "loss": 0.076, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.3968253968253967, |
| "grad_norm": 0.8469821810722351, |
| "learning_rate": 2.2942832003289823e-06, |
| "loss": 0.0865, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.4007936507936507, |
| "grad_norm": 0.8207787275314331, |
| "learning_rate": 2.2667073117858185e-06, |
| "loss": 0.1097, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4047619047619047, |
| "grad_norm": 0.6846061944961548, |
| "learning_rate": 2.2392494902427027e-06, |
| "loss": 0.0741, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.4087301587301586, |
| "grad_norm": 0.7113244533538818, |
| "learning_rate": 2.2119109217645697e-06, |
| "loss": 0.0873, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.4126984126984126, |
| "grad_norm": 0.7556560039520264, |
| "learning_rate": 2.1846927872651135e-06, |
| "loss": 0.0819, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.4166666666666667, |
| "grad_norm": 1.043052315711975, |
| "learning_rate": 2.1575962624557754e-06, |
| "loss": 0.0958, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.4206349206349207, |
| "grad_norm": 0.8609367609024048, |
| "learning_rate": 2.1306225177949584e-06, |
| "loss": 0.1056, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4246031746031746, |
| "grad_norm": 0.7151347994804382, |
| "learning_rate": 2.1037727184374705e-06, |
| "loss": 0.0737, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 0.8181217312812805, |
| "learning_rate": 2.07704802418419e-06, |
| "loss": 0.0946, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.4325396825396826, |
| "grad_norm": 0.9127845764160156, |
| "learning_rate": 2.050449589431969e-06, |
| "loss": 0.1061, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4365079365079365, |
| "grad_norm": 0.782051146030426, |
| "learning_rate": 2.023978563123771e-06, |
| "loss": 0.0819, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4404761904761905, |
| "grad_norm": 0.8060847520828247, |
| "learning_rate": 1.997636088699035e-06, |
| "loss": 0.09, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4444444444444444, |
| "grad_norm": 0.8724985122680664, |
| "learning_rate": 1.9714233040442915e-06, |
| "loss": 0.0938, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.4484126984126984, |
| "grad_norm": 0.7807143330574036, |
| "learning_rate": 1.9453413414440043e-06, |
| "loss": 0.091, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4523809523809523, |
| "grad_norm": 0.9308791160583496, |
| "learning_rate": 1.919391327531663e-06, |
| "loss": 0.1177, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.4563492063492063, |
| "grad_norm": 0.7229511141777039, |
| "learning_rate": 1.8935743832411163e-06, |
| "loss": 0.075, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.4603174603174602, |
| "grad_norm": 0.9192443490028381, |
| "learning_rate": 1.8678916237581524e-06, |
| "loss": 0.0981, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.4642857142857144, |
| "grad_norm": 0.9448950886726379, |
| "learning_rate": 1.8423441584723312e-06, |
| "loss": 0.0965, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.4682539682539684, |
| "grad_norm": 0.9117385149002075, |
| "learning_rate": 1.816933090929055e-06, |
| "loss": 0.0952, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.4722222222222223, |
| "grad_norm": 0.8257307410240173, |
| "learning_rate": 1.791659518781908e-06, |
| "loss": 0.0794, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.4761904761904763, |
| "grad_norm": 0.9818858504295349, |
| "learning_rate": 1.7665245337452368e-06, |
| "loss": 0.1018, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.4801587301587302, |
| "grad_norm": 0.8639586567878723, |
| "learning_rate": 1.7415292215469948e-06, |
| "loss": 0.0831, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.4841269841269842, |
| "grad_norm": 0.7928310036659241, |
| "learning_rate": 1.716674661881848e-06, |
| "loss": 0.0954, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.4880952380952381, |
| "grad_norm": 0.972807765007019, |
| "learning_rate": 1.6919619283645262e-06, |
| "loss": 0.1005, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.492063492063492, |
| "grad_norm": 0.7709330320358276, |
| "learning_rate": 1.667392088483456e-06, |
| "loss": 0.0866, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.496031746031746, |
| "grad_norm": 0.8894091248512268, |
| "learning_rate": 1.6429662035546451e-06, |
| "loss": 0.0831, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.6335495114326477, |
| "learning_rate": 1.6186853286758397e-06, |
| "loss": 0.0706, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.503968253968254, |
| "grad_norm": 0.6157211065292358, |
| "learning_rate": 1.5945505126809524e-06, |
| "loss": 0.0578, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.507936507936508, |
| "grad_norm": 0.6663343906402588, |
| "learning_rate": 1.570562798094747e-06, |
| "loss": 0.0643, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5119047619047619, |
| "grad_norm": 0.817520022392273, |
| "learning_rate": 1.5467232210878153e-06, |
| "loss": 0.1108, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.5158730158730158, |
| "grad_norm": 0.856833279132843, |
| "learning_rate": 1.5230328114318127e-06, |
| "loss": 0.0938, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5198412698412698, |
| "grad_norm": 0.6747552156448364, |
| "learning_rate": 1.4994925924549797e-06, |
| "loss": 0.0691, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5238095238095237, |
| "grad_norm": 0.7864953875541687, |
| "learning_rate": 1.4761035809979395e-06, |
| "loss": 0.0689, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.5277777777777777, |
| "grad_norm": 0.7300853729248047, |
| "learning_rate": 1.452866787369771e-06, |
| "loss": 0.0903, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5317460317460316, |
| "grad_norm": 0.7783157825469971, |
| "learning_rate": 1.4297832153043657e-06, |
| "loss": 0.0843, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5357142857142856, |
| "grad_norm": 0.7717552185058594, |
| "learning_rate": 1.4068538619170763e-06, |
| "loss": 0.0732, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.5396825396825395, |
| "grad_norm": 0.7834195494651794, |
| "learning_rate": 1.3840797176616467e-06, |
| "loss": 0.0751, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.5436507936507935, |
| "grad_norm": 0.8702894449234009, |
| "learning_rate": 1.3614617662874197e-06, |
| "loss": 0.1005, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.5476190476190477, |
| "grad_norm": 0.8433047533035278, |
| "learning_rate": 1.3390009847968505e-06, |
| "loss": 0.0861, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5515873015873016, |
| "grad_norm": 0.8168141841888428, |
| "learning_rate": 1.316698343403302e-06, |
| "loss": 0.1047, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.5555555555555556, |
| "grad_norm": 0.9714818596839905, |
| "learning_rate": 1.2945548054891322e-06, |
| "loss": 0.1194, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.5595238095238095, |
| "grad_norm": 0.8196632266044617, |
| "learning_rate": 1.27257132756409e-06, |
| "loss": 0.0811, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.5634920634920635, |
| "grad_norm": 0.8637920022010803, |
| "learning_rate": 1.2507488592239848e-06, |
| "loss": 0.0912, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.5674603174603174, |
| "grad_norm": 0.9072219133377075, |
| "learning_rate": 1.2290883431096778e-06, |
| "loss": 0.0846, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.5714285714285714, |
| "grad_norm": 0.8047322630882263, |
| "learning_rate": 1.2075907148663579e-06, |
| "loss": 0.0799, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.5753968253968254, |
| "grad_norm": 0.7913994193077087, |
| "learning_rate": 1.186256903103129e-06, |
| "loss": 0.073, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.5793650793650795, |
| "grad_norm": 0.9412828087806702, |
| "learning_rate": 1.1650878293528994e-06, |
| "loss": 0.1013, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.5833333333333335, |
| "grad_norm": 0.8140155673027039, |
| "learning_rate": 1.1440844080325703e-06, |
| "loss": 0.0901, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.5873015873015874, |
| "grad_norm": 0.7938934564590454, |
| "learning_rate": 1.1232475464035386e-06, |
| "loss": 0.0883, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.5912698412698414, |
| "grad_norm": 0.6305232048034668, |
| "learning_rate": 1.10257814453251e-06, |
| "loss": 0.0605, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.5952380952380953, |
| "grad_norm": 0.8313385248184204, |
| "learning_rate": 1.0820770952526155e-06, |
| "loss": 0.0903, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.5992063492063493, |
| "grad_norm": 0.6266860365867615, |
| "learning_rate": 1.0617452841248494e-06, |
| "loss": 0.0847, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.6031746031746033, |
| "grad_norm": 0.769794225692749, |
| "learning_rate": 1.0415835893998116e-06, |
| "loss": 0.0916, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.6071428571428572, |
| "grad_norm": 0.8159760236740112, |
| "learning_rate": 1.0215928819797744e-06, |
| "loss": 0.0984, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6111111111111112, |
| "grad_norm": 0.6972020268440247, |
| "learning_rate": 1.0017740253810608e-06, |
| "loss": 0.0589, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.6150793650793651, |
| "grad_norm": 0.7206509709358215, |
| "learning_rate": 9.821278756967467e-07, |
| "loss": 0.0778, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.619047619047619, |
| "grad_norm": 0.9815142750740051, |
| "learning_rate": 9.62655281559679e-07, |
| "loss": 0.1159, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.623015873015873, |
| "grad_norm": 0.7741109728813171, |
| "learning_rate": 9.433570841058187e-07, |
| "loss": 0.0804, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.626984126984127, |
| "grad_norm": 0.8201466202735901, |
| "learning_rate": 9.242341169379077e-07, |
| "loss": 0.0907, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.630952380952381, |
| "grad_norm": 0.7901651263237, |
| "learning_rate": 9.052872060894613e-07, |
| "loss": 0.0854, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.6349206349206349, |
| "grad_norm": 0.828625500202179, |
| "learning_rate": 8.865171699890835e-07, |
| "loss": 0.0786, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6388888888888888, |
| "grad_norm": 0.854513943195343, |
| "learning_rate": 8.679248194251211e-07, |
| "loss": 0.099, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.6428571428571428, |
| "grad_norm": 0.7350807189941406, |
| "learning_rate": 8.495109575106331e-07, |
| "loss": 0.0804, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.6468253968253967, |
| "grad_norm": 0.742985725402832, |
| "learning_rate": 8.312763796487038e-07, |
| "loss": 0.0731, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.6507936507936507, |
| "grad_norm": 0.7394562363624573, |
| "learning_rate": 8.132218734980852e-07, |
| "loss": 0.0811, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.6547619047619047, |
| "grad_norm": 0.8178054094314575, |
| "learning_rate": 7.953482189391687e-07, |
| "loss": 0.089, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.6587301587301586, |
| "grad_norm": 0.863472044467926, |
| "learning_rate": 7.776561880403072e-07, |
| "loss": 0.0876, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.6626984126984126, |
| "grad_norm": 1.0591928958892822, |
| "learning_rate": 7.601465450244528e-07, |
| "loss": 0.1542, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 0.8059977293014526, |
| "learning_rate": 7.42820046236154e-07, |
| "loss": 0.0785, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.6706349206349205, |
| "grad_norm": 0.9021475315093994, |
| "learning_rate": 7.256774401088817e-07, |
| "loss": 0.0971, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.6746031746031746, |
| "grad_norm": 0.8722649216651917, |
| "learning_rate": 7.087194671326986e-07, |
| "loss": 0.0797, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.6785714285714286, |
| "grad_norm": 0.9459144473075867, |
| "learning_rate": 6.91946859822279e-07, |
| "loss": 0.1087, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.6825396825396826, |
| "grad_norm": 0.8489049077033997, |
| "learning_rate": 6.753603426852589e-07, |
| "loss": 0.1114, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.6865079365079365, |
| "grad_norm": 0.6769583225250244, |
| "learning_rate": 6.589606321909464e-07, |
| "loss": 0.0799, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.6904761904761905, |
| "grad_norm": 0.806599497795105, |
| "learning_rate": 6.427484367393699e-07, |
| "loss": 0.1004, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.6944444444444444, |
| "grad_norm": 0.7337989807128906, |
| "learning_rate": 6.267244566306801e-07, |
| "loss": 0.0786, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.6984126984126984, |
| "grad_norm": 0.7469695806503296, |
| "learning_rate": 6.108893840348995e-07, |
| "loss": 0.1028, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7023809523809523, |
| "grad_norm": 0.7320832014083862, |
| "learning_rate": 5.952439029620222e-07, |
| "loss": 0.075, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7063492063492065, |
| "grad_norm": 0.6765589118003845, |
| "learning_rate": 5.797886892324695e-07, |
| "loss": 0.0674, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.7103174603174605, |
| "grad_norm": 0.6787246465682983, |
| "learning_rate": 5.645244104478947e-07, |
| "loss": 0.0773, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 0.8502627611160278, |
| "learning_rate": 5.494517259623478e-07, |
| "loss": 0.1127, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7182539682539684, |
| "grad_norm": 0.793020486831665, |
| "learning_rate": 5.34571286853795e-07, |
| "loss": 0.1059, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.7222222222222223, |
| "grad_norm": 0.8450143933296204, |
| "learning_rate": 5.198837358959901e-07, |
| "loss": 0.1051, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.7261904761904763, |
| "grad_norm": 0.6854594945907593, |
| "learning_rate": 5.05389707530714e-07, |
| "loss": 0.0646, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7301587301587302, |
| "grad_norm": 0.6840714812278748, |
| "learning_rate": 4.91089827840367e-07, |
| "loss": 0.0778, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.7341269841269842, |
| "grad_norm": 0.8606773018836975, |
| "learning_rate": 4.769847145209244e-07, |
| "loss": 0.1084, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7380952380952381, |
| "grad_norm": 0.7901908159255981, |
| "learning_rate": 4.6307497685525894e-07, |
| "loss": 0.0863, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.742063492063492, |
| "grad_norm": 0.7910974025726318, |
| "learning_rate": 4.4936121568681546e-07, |
| "loss": 0.0849, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.746031746031746, |
| "grad_norm": 0.7289157509803772, |
| "learning_rate": 4.3584402339366174e-07, |
| "loss": 0.0724, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.7375960350036621, |
| "learning_rate": 4.225239838628981e-07, |
| "loss": 0.0737, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.753968253968254, |
| "grad_norm": 0.8164241909980774, |
| "learning_rate": 4.0940167246543595e-07, |
| "loss": 0.086, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.757936507936508, |
| "grad_norm": 0.8139920234680176, |
| "learning_rate": 3.964776560311484e-07, |
| "loss": 0.0933, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.7619047619047619, |
| "grad_norm": 0.7812939286231995, |
| "learning_rate": 3.8375249282437743e-07, |
| "loss": 0.0882, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.7658730158730158, |
| "grad_norm": 0.8722679615020752, |
| "learning_rate": 3.71226732519826e-07, |
| "loss": 0.0974, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.7698412698412698, |
| "grad_norm": 0.8438254594802856, |
| "learning_rate": 3.589009161788104e-07, |
| "loss": 0.0828, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.7738095238095237, |
| "grad_norm": 0.7687845826148987, |
| "learning_rate": 3.4677557622589175e-07, |
| "loss": 0.0774, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.7777777777777777, |
| "grad_norm": 0.8137856125831604, |
| "learning_rate": 3.3485123642587657e-07, |
| "loss": 0.0812, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.7817460317460316, |
| "grad_norm": 0.7595266699790955, |
| "learning_rate": 3.2312841186118937e-07, |
| "loss": 0.079, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.7857142857142856, |
| "grad_norm": 0.8352352380752563, |
| "learning_rate": 3.116076089096265e-07, |
| "loss": 0.085, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.7896825396825395, |
| "grad_norm": 0.7053031325340271, |
| "learning_rate": 3.0028932522248256e-07, |
| "loss": 0.0771, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.7936507936507935, |
| "grad_norm": 0.6933891177177429, |
| "learning_rate": 2.8917404970305096e-07, |
| "loss": 0.0718, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.7976190476190477, |
| "grad_norm": 0.7048924565315247, |
| "learning_rate": 2.782622624855097e-07, |
| "loss": 0.0847, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.8015873015873016, |
| "grad_norm": 0.6969692707061768, |
| "learning_rate": 2.6755443491417786e-07, |
| "loss": 0.0814, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8055555555555556, |
| "grad_norm": 0.6592867374420166, |
| "learning_rate": 2.570510295231571e-07, |
| "loss": 0.0642, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.8095238095238095, |
| "grad_norm": 0.7861699461936951, |
| "learning_rate": 2.467525000163523e-07, |
| "loss": 0.085, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8134920634920635, |
| "grad_norm": 0.8022602200508118, |
| "learning_rate": 2.36659291247871e-07, |
| "loss": 0.0945, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.8174603174603174, |
| "grad_norm": 0.6735831499099731, |
| "learning_rate": 2.2677183920281342e-07, |
| "loss": 0.0562, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.8214285714285714, |
| "grad_norm": 0.7735519409179688, |
| "learning_rate": 2.1709057097843266e-07, |
| "loss": 0.0736, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.8253968253968254, |
| "grad_norm": 0.7292094826698303, |
| "learning_rate": 2.0761590476568893e-07, |
| "loss": 0.0664, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8293650793650795, |
| "grad_norm": 0.887444794178009, |
| "learning_rate": 1.9834824983118673e-07, |
| "loss": 0.1091, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.8333333333333335, |
| "grad_norm": 0.6806834936141968, |
| "learning_rate": 1.892880064994934e-07, |
| "loss": 0.0634, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8373015873015874, |
| "grad_norm": 0.8866720199584961, |
| "learning_rate": 1.8043556613585143e-07, |
| "loss": 0.0999, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.8412698412698414, |
| "grad_norm": 0.9056561589241028, |
| "learning_rate": 1.7179131112926628e-07, |
| "loss": 0.0974, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.8452380952380953, |
| "grad_norm": 0.7735084891319275, |
| "learning_rate": 1.6335561487599406e-07, |
| "loss": 0.0776, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8492063492063493, |
| "grad_norm": 1.7332282066345215, |
| "learning_rate": 1.551288417634106e-07, |
| "loss": 0.1426, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.8531746031746033, |
| "grad_norm": 0.8308102488517761, |
| "learning_rate": 1.471113471542712e-07, |
| "loss": 0.0869, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.8571428571428572, |
| "grad_norm": 0.8413865566253662, |
| "learning_rate": 1.3930347737136195e-07, |
| "loss": 0.099, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.8611111111111112, |
| "grad_norm": 1.0041447877883911, |
| "learning_rate": 1.3170556968253756e-07, |
| "loss": 0.1191, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.8650793650793651, |
| "grad_norm": 0.8478453159332275, |
| "learning_rate": 1.2431795228615372e-07, |
| "loss": 0.0849, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.869047619047619, |
| "grad_norm": 0.8766846656799316, |
| "learning_rate": 1.1714094429689127e-07, |
| "loss": 0.0827, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.873015873015873, |
| "grad_norm": 0.8661823272705078, |
| "learning_rate": 1.1017485573197151e-07, |
| "loss": 0.093, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.876984126984127, |
| "grad_norm": 0.7078859210014343, |
| "learning_rate": 1.0341998749776316e-07, |
| "loss": 0.0646, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.880952380952381, |
| "grad_norm": 0.8136516809463501, |
| "learning_rate": 9.687663137678605e-08, |
| "loss": 0.095, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.8849206349206349, |
| "grad_norm": 0.9041445851325989, |
| "learning_rate": 9.054507001510727e-08, |
| "loss": 0.1038, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.8888888888888888, |
| "grad_norm": 0.694397509098053, |
| "learning_rate": 8.442557691013042e-08, |
| "loss": 0.0794, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.8928571428571428, |
| "grad_norm": 0.8240735530853271, |
| "learning_rate": 7.851841639878399e-08, |
| "loss": 0.0805, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.8968253968253967, |
| "grad_norm": 0.7979876399040222, |
| "learning_rate": 7.282384364610207e-08, |
| "loss": 0.1017, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.9007936507936507, |
| "grad_norm": 0.6360365152359009, |
| "learning_rate": 6.734210463420099e-08, |
| "loss": 0.0615, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.9047619047619047, |
| "grad_norm": 0.6626116633415222, |
| "learning_rate": 6.207343615165562e-08, |
| "loss": 0.0671, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9087301587301586, |
| "grad_norm": 0.7120184302330017, |
| "learning_rate": 5.701806578327029e-08, |
| "loss": 0.0673, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9126984126984126, |
| "grad_norm": 0.7189626097679138, |
| "learning_rate": 5.21762119002478e-08, |
| "loss": 0.0731, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.9166666666666665, |
| "grad_norm": 0.6905253529548645, |
| "learning_rate": 4.7548083650759134e-08, |
| "loss": 0.0666, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.9206349206349205, |
| "grad_norm": 0.6117310523986816, |
| "learning_rate": 4.31338809509052e-08, |
| "loss": 0.0551, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.9246031746031746, |
| "grad_norm": 0.7856166362762451, |
| "learning_rate": 3.8933794476083143e-08, |
| "loss": 0.073, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9285714285714286, |
| "grad_norm": 0.7610016465187073, |
| "learning_rate": 3.494800565275125e-08, |
| "loss": 0.0715, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9325396825396826, |
| "grad_norm": 0.7038713097572327, |
| "learning_rate": 3.1176686650589147e-08, |
| "loss": 0.0746, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9365079365079365, |
| "grad_norm": 0.8484662771224976, |
| "learning_rate": 2.7620000375064848e-08, |
| "loss": 0.0991, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.9404761904761905, |
| "grad_norm": 0.8162146210670471, |
| "learning_rate": 2.4278100460393138e-08, |
| "loss": 0.0709, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9444444444444444, |
| "grad_norm": 0.8962053060531616, |
| "learning_rate": 2.115113126290258e-08, |
| "loss": 0.102, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.9484126984126984, |
| "grad_norm": 0.8036097884178162, |
| "learning_rate": 1.8239227854799368e-08, |
| "loss": 0.0728, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.9523809523809523, |
| "grad_norm": 0.8631817698478699, |
| "learning_rate": 1.554251601833201e-08, |
| "loss": 0.0912, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.9563492063492065, |
| "grad_norm": 0.8002811074256897, |
| "learning_rate": 1.3061112240357887e-08, |
| "loss": 0.0706, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.9603174603174605, |
| "grad_norm": 0.7505822777748108, |
| "learning_rate": 1.0795123707312283e-08, |
| "loss": 0.0708, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.9642857142857144, |
| "grad_norm": 0.8649543523788452, |
| "learning_rate": 8.744648300578196e-09, |
| "loss": 0.0926, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.9682539682539684, |
| "grad_norm": 0.8728803992271423, |
| "learning_rate": 6.9097745922580564e-09, |
| "loss": 0.081, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.9722222222222223, |
| "grad_norm": 0.7853608727455139, |
| "learning_rate": 5.2905818413478975e-09, |
| "loss": 0.0676, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.9761904761904763, |
| "grad_norm": 0.8135411143302917, |
| "learning_rate": 3.887139990313427e-09, |
| "loss": 0.0859, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.9801587301587302, |
| "grad_norm": 0.8057032823562622, |
| "learning_rate": 2.699509662069666e-09, |
| "loss": 0.0985, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.9841269841269842, |
| "grad_norm": 0.6384691596031189, |
| "learning_rate": 1.7277421573608234e-09, |
| "loss": 0.0708, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.9880952380952381, |
| "grad_norm": 0.6963675618171692, |
| "learning_rate": 9.71879452545399e-10, |
| "loss": 0.0785, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.992063492063492, |
| "grad_norm": 0.9572624564170837, |
| "learning_rate": 4.3195419778319095e-10, |
| "loss": 0.1097, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.996031746031746, |
| "grad_norm": 0.8002074360847473, |
| "learning_rate": 1.0798971562364647e-10, |
| "loss": 0.0811, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.6123743653297424, |
| "learning_rate": 0.0, |
| "loss": 0.0529, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 504, |
| "total_flos": 54880760659968.0, |
| "train_loss": 0.23895617312796058, |
| "train_runtime": 2813.5476, |
| "train_samples_per_second": 2.86, |
| "train_steps_per_second": 0.179 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 504, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 54880760659968.0, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|