| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.0, | |
| "global_step": 89992, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.972219752866922e-05, | |
| "loss": 2.1683, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9444395057338435e-05, | |
| "loss": 1.892, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9166592586007645e-05, | |
| "loss": 1.8415, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.888879011467686e-05, | |
| "loss": 1.7915, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.861098764334608e-05, | |
| "loss": 1.7884, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8333185172015294e-05, | |
| "loss": 1.7897, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.805538270068451e-05, | |
| "loss": 1.7978, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.777758022935373e-05, | |
| "loss": 1.7701, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.7499777758022936e-05, | |
| "loss": 1.7776, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.722197528669215e-05, | |
| "loss": 1.7768, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.694417281536137e-05, | |
| "loss": 1.784, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.6666370344030586e-05, | |
| "loss": 1.7417, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.6388567872699795e-05, | |
| "loss": 1.7433, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.611076540136901e-05, | |
| "loss": 1.7252, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.583296293003823e-05, | |
| "loss": 1.7322, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.555516045870744e-05, | |
| "loss": 1.7337, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.5277357987376654e-05, | |
| "loss": 1.7481, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.499955551604587e-05, | |
| "loss": 1.7261, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.472175304471509e-05, | |
| "loss": 1.7654, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.44439505733843e-05, | |
| "loss": 1.7241, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.416614810205352e-05, | |
| "loss": 1.7256, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.388834563072273e-05, | |
| "loss": 1.7115, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.3610543159391946e-05, | |
| "loss": 1.7019, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.333274068806116e-05, | |
| "loss": 1.7066, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.305493821673038e-05, | |
| "loss": 1.7005, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.2777135745399595e-05, | |
| "loss": 1.7056, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.249933327406881e-05, | |
| "loss": 1.7052, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.222153080273802e-05, | |
| "loss": 1.7141, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.194372833140724e-05, | |
| "loss": 1.7128, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.1665925860076454e-05, | |
| "loss": 1.6943, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.138812338874567e-05, | |
| "loss": 1.7187, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.1110320917414887e-05, | |
| "loss": 1.7134, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.08325184460841e-05, | |
| "loss": 1.7088, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.055471597475331e-05, | |
| "loss": 1.6984, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.027691350342253e-05, | |
| "loss": 1.7107, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.9999111032091745e-05, | |
| "loss": 1.7043, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9721308560760955e-05, | |
| "loss": 1.7023, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.944350608943017e-05, | |
| "loss": 1.6946, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.916570361809939e-05, | |
| "loss": 1.6791, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.8887901146768604e-05, | |
| "loss": 1.7051, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.8610098675437814e-05, | |
| "loss": 1.6723, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.833229620410703e-05, | |
| "loss": 1.7001, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.8054493732776247e-05, | |
| "loss": 1.6728, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.777669126144546e-05, | |
| "loss": 1.6689, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.749888879011468e-05, | |
| "loss": 1.6921, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.7221086318783896e-05, | |
| "loss": 1.653, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 3.6943283847453105e-05, | |
| "loss": 1.6692, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.666548137612232e-05, | |
| "loss": 1.6404, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.638767890479154e-05, | |
| "loss": 1.6781, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.6109876433460755e-05, | |
| "loss": 1.6622, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.583207396212997e-05, | |
| "loss": 1.645, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.555427149079919e-05, | |
| "loss": 1.6476, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.52764690194684e-05, | |
| "loss": 1.6429, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.4998666548137613e-05, | |
| "loss": 1.6649, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 3.472086407680683e-05, | |
| "loss": 1.6437, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.4443061605476046e-05, | |
| "loss": 1.6383, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.416525913414526e-05, | |
| "loss": 1.6574, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.388745666281448e-05, | |
| "loss": 1.6443, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.360965419148369e-05, | |
| "loss": 1.6584, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.3331851720152905e-05, | |
| "loss": 1.6487, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.3054049248822115e-05, | |
| "loss": 1.6691, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.277624677749133e-05, | |
| "loss": 1.663, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.249844430616055e-05, | |
| "loss": 1.6332, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.2220641834829764e-05, | |
| "loss": 1.6474, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.194283936349898e-05, | |
| "loss": 1.6257, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.166503689216819e-05, | |
| "loss": 1.6491, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.1387234420837406e-05, | |
| "loss": 1.6215, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 3.110943194950662e-05, | |
| "loss": 1.6691, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 3.083162947817584e-05, | |
| "loss": 1.6339, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.0553827006845055e-05, | |
| "loss": 1.6701, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.027602453551427e-05, | |
| "loss": 1.6105, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.9998222064183485e-05, | |
| "loss": 1.6545, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.97204195928527e-05, | |
| "loss": 1.6452, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.9442617121521914e-05, | |
| "loss": 1.6699, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.916481465019113e-05, | |
| "loss": 1.6366, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.8887012178860344e-05, | |
| "loss": 1.624, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.860920970752956e-05, | |
| "loss": 1.6441, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.8331407236198777e-05, | |
| "loss": 1.6528, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.805360476486799e-05, | |
| "loss": 1.6217, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.7775802293537206e-05, | |
| "loss": 1.6429, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.7497999822206422e-05, | |
| "loss": 1.6334, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.7220197350875635e-05, | |
| "loss": 1.6595, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.6942394879544852e-05, | |
| "loss": 1.636, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.6664592408214068e-05, | |
| "loss": 1.6176, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.6386789936883278e-05, | |
| "loss": 1.6374, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.610898746555249e-05, | |
| "loss": 1.6541, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.5831184994221707e-05, | |
| "loss": 1.6518, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.5553382522890924e-05, | |
| "loss": 1.645, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.5275580051560137e-05, | |
| "loss": 1.637, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.4997777580229356e-05, | |
| "loss": 1.6229, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.471997510889857e-05, | |
| "loss": 1.601, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.4442172637567782e-05, | |
| "loss": 1.6071, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 2.4164370166237e-05, | |
| "loss": 1.6028, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.3886567694906215e-05, | |
| "loss": 1.6091, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 2.3608765223575428e-05, | |
| "loss": 1.5878, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.3330962752244645e-05, | |
| "loss": 1.6169, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 2.305316028091386e-05, | |
| "loss": 1.6102, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 2.2775357809583074e-05, | |
| "loss": 1.6253, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 2.249755533825229e-05, | |
| "loss": 1.636, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 2.2219752866921507e-05, | |
| "loss": 1.6303, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 2.194195039559072e-05, | |
| "loss": 1.6342, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.1664147924259936e-05, | |
| "loss": 1.6082, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.138634545292915e-05, | |
| "loss": 1.6241, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.1108542981598366e-05, | |
| "loss": 1.6364, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.083074051026758e-05, | |
| "loss": 1.5943, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.0552938038936795e-05, | |
| "loss": 1.5904, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.027513556760601e-05, | |
| "loss": 1.6107, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.9997333096275224e-05, | |
| "loss": 1.623, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.971953062494444e-05, | |
| "loss": 1.5865, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.9441728153613657e-05, | |
| "loss": 1.6293, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.916392568228287e-05, | |
| "loss": 1.6075, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.8886123210952087e-05, | |
| "loss": 1.6136, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.8608320739621303e-05, | |
| "loss": 1.6275, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.8330518268290516e-05, | |
| "loss": 1.604, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.805271579695973e-05, | |
| "loss": 1.6248, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.7774913325628945e-05, | |
| "loss": 1.6077, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.749711085429816e-05, | |
| "loss": 1.6032, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.7219308382967375e-05, | |
| "loss": 1.6181, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 1.694150591163659e-05, | |
| "loss": 1.6061, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.6663703440305804e-05, | |
| "loss": 1.5769, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 1.638590096897502e-05, | |
| "loss": 1.6039, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.6108098497644237e-05, | |
| "loss": 1.6071, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.583029602631345e-05, | |
| "loss": 1.5944, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.5552493554982667e-05, | |
| "loss": 1.5803, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.5274691083651883e-05, | |
| "loss": 1.6037, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.4996888612321098e-05, | |
| "loss": 1.5921, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.4719086140990309e-05, | |
| "loss": 1.6005, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4441283669659525e-05, | |
| "loss": 1.5907, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.416348119832874e-05, | |
| "loss": 1.6009, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.3885678726997955e-05, | |
| "loss": 1.5981, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.3607876255667171e-05, | |
| "loss": 1.6182, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.3330073784336386e-05, | |
| "loss": 1.6073, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.30522713130056e-05, | |
| "loss": 1.6208, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.2774468841674817e-05, | |
| "loss": 1.5967, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.2496666370344032e-05, | |
| "loss": 1.6261, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 1.2218863899013246e-05, | |
| "loss": 1.5709, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.1941061427682461e-05, | |
| "loss": 1.604, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 1.1663258956351676e-05, | |
| "loss": 1.5782, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 1.1385456485020892e-05, | |
| "loss": 1.5971, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.1107654013690107e-05, | |
| "loss": 1.6089, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.0829851542359322e-05, | |
| "loss": 1.5871, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.0552049071028536e-05, | |
| "loss": 1.5854, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.0274246599697751e-05, | |
| "loss": 1.6105, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 9.996444128366966e-06, | |
| "loss": 1.5779, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 9.718641657036182e-06, | |
| "loss": 1.5993, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 9.440839185705397e-06, | |
| "loss": 1.5811, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 9.163036714374612e-06, | |
| "loss": 1.5707, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 8.885234243043826e-06, | |
| "loss": 1.5867, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 8.607431771713041e-06, | |
| "loss": 1.6162, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 8.329629300382257e-06, | |
| "loss": 1.6259, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 8.051826829051472e-06, | |
| "loss": 1.5916, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 7.774024357720687e-06, | |
| "loss": 1.5834, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 7.496221886389902e-06, | |
| "loss": 1.593, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 7.218419415059116e-06, | |
| "loss": 1.5937, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.940616943728332e-06, | |
| "loss": 1.5642, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.662814472397546e-06, | |
| "loss": 1.575, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.385012001066762e-06, | |
| "loss": 1.5726, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 6.1072095297359775e-06, | |
| "loss": 1.5616, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 5.829407058405191e-06, | |
| "loss": 1.5951, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.551604587074407e-06, | |
| "loss": 1.6033, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.2738021157436225e-06, | |
| "loss": 1.593, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 4.995999644412837e-06, | |
| "loss": 1.5798, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 4.718197173082052e-06, | |
| "loss": 1.5774, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 4.4403947017512674e-06, | |
| "loss": 1.5894, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 4.162592230420482e-06, | |
| "loss": 1.5609, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.884789759089697e-06, | |
| "loss": 1.5854, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.6069872877589124e-06, | |
| "loss": 1.5871, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.329184816428127e-06, | |
| "loss": 1.5745, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.0513823450973422e-06, | |
| "loss": 1.5986, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 2.773579873766557e-06, | |
| "loss": 1.5986, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 2.495777402435772e-06, | |
| "loss": 1.5965, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 2.217974931104987e-06, | |
| "loss": 1.583, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.9401724597742023e-06, | |
| "loss": 1.5983, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 1.662369988443417e-06, | |
| "loss": 1.5991, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.3845675171126324e-06, | |
| "loss": 1.5761, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.1067650457818473e-06, | |
| "loss": 1.583, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 8.289625744510624e-07, | |
| "loss": 1.6017, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 5.511601031202774e-07, | |
| "loss": 1.574, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 2.733576317894924e-07, | |
| "loss": 1.5734, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "step": 89992, | |
| "total_flos": 2.1371909200869427e+17, | |
| "train_loss": 1.6458884161285257, | |
| "train_runtime": 76560.3935, | |
| "train_samples_per_second": 3.526, | |
| "train_steps_per_second": 1.175 | |
| } | |
| ], | |
| "max_steps": 89992, | |
| "num_train_epochs": 4, | |
| "total_flos": 2.1371909200869427e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |