| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9433962264150944, | |
| "global_step": 200, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.5589, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.595, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3e-06, | |
| "loss": 0.5789, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.4756, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5e-06, | |
| "loss": 0.459, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6e-06, | |
| "loss": 0.4651, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 7e-06, | |
| "loss": 0.4627, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.4229, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9e-06, | |
| "loss": 0.4236, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4091, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 0.4054, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.4086, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 0.4248, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.4e-05, | |
| "loss": 0.4115, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.3929, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.3826, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.7e-05, | |
| "loss": 0.3515, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.3855, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9e-05, | |
| "loss": 0.3749, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2e-05, | |
| "loss": 0.3924, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999869950890106e-05, | |
| "loss": 0.3435, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999479806942976e-05, | |
| "loss": 0.3812, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9998829578306187e-05, | |
| "loss": 0.3787, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9997919281892066e-05, | |
| "loss": 0.3587, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9996748941377265e-05, | |
| "loss": 0.4033, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9995318587202132e-05, | |
| "loss": 0.352, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999362825656992e-05, | |
| "loss": 0.349, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9991677993445832e-05, | |
| "loss": 0.3826, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.998946784855586e-05, | |
| "loss": 0.3636, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9986997879385488e-05, | |
| "loss": 0.3419, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.998426815017817e-05, | |
| "loss": 0.4001, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.998127873193367e-05, | |
| "loss": 0.3487, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9978029702406236e-05, | |
| "loss": 0.3757, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9974521146102535e-05, | |
| "loss": 0.3584, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9970753154279506e-05, | |
| "loss": 0.3504, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9966725824941933e-05, | |
| "loss": 0.3571, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.996243926283994e-05, | |
| "loss": 0.359, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9957893579466252e-05, | |
| "loss": 0.3369, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9953088893053268e-05, | |
| "loss": 0.3519, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.994802532857004e-05, | |
| "loss": 0.3577, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9942703017718977e-05, | |
| "loss": 0.3461, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9937122098932428e-05, | |
| "loss": 0.3426, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9931282717369104e-05, | |
| "loss": 0.374, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.992518502491028e-05, | |
| "loss": 0.3566, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.991882918015585e-05, | |
| "loss": 0.3701, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.99122153484202e-05, | |
| "loss": 0.342, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9905343701727926e-05, | |
| "loss": 0.3396, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.989821441880933e-05, | |
| "loss": 0.3572, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.989082768509579e-05, | |
| "loss": 0.3535, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9883183692714935e-05, | |
| "loss": 0.3543, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9875282640485647e-05, | |
| "loss": 0.3513, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.986712473391289e-05, | |
| "loss": 0.3565, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.985871018518236e-05, | |
| "loss": 0.335, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9850039213154972e-05, | |
| "loss": 0.328, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.984111204336116e-05, | |
| "loss": 0.3126, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9831928907995032e-05, | |
| "loss": 0.3719, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9822490045908293e-05, | |
| "loss": 0.3852, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9812795702604073e-05, | |
| "loss": 0.351, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9802846130230508e-05, | |
| "loss": 0.3666, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9792641587574212e-05, | |
| "loss": 0.3609, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.978218234005352e-05, | |
| "loss": 0.3126, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9771468659711595e-05, | |
| "loss": 0.3305, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9760500825209362e-05, | |
| "loss": 0.3426, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9749279121818235e-05, | |
| "loss": 0.3118, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9737803841412732e-05, | |
| "loss": 0.3213, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9726075282462847e-05, | |
| "loss": 0.3293, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9714093750026308e-05, | |
| "loss": 0.3571, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9701859555740647e-05, | |
| "loss": 0.3239, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9689373017815076e-05, | |
| "loss": 0.3312, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9676634461022222e-05, | |
| "loss": 0.3802, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9663644216689683e-05, | |
| "loss": 0.327, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.96504026226914e-05, | |
| "loss": 0.3728, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9636910023438878e-05, | |
| "loss": 0.3283, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9623166769872216e-05, | |
| "loss": 0.3309, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9609173219450998e-05, | |
| "loss": 0.3307, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.3376, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9580436690424617e-05, | |
| "loss": 0.3178, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9565694459251457e-05, | |
| "loss": 0.3214, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.955070342606831e-05, | |
| "loss": 0.3754, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9535463980789277e-05, | |
| "loss": 0.3474, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9519976519789615e-05, | |
| "loss": 0.3201, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9504241445895434e-05, | |
| "loss": 0.3326, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9488259168373198e-05, | |
| "loss": 0.3196, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9472030102919102e-05, | |
| "loss": 0.3076, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9455554671648248e-05, | |
| "loss": 0.3871, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9438833303083677e-05, | |
| "loss": 0.3449, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9421866432145203e-05, | |
| "loss": 0.3474, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9404654500138117e-05, | |
| "loss": 0.3478, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.938719795474171e-05, | |
| "loss": 0.3307, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.936949724999762e-05, | |
| "loss": 0.3277, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9351552846298026e-05, | |
| "loss": 0.3278, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9333365210373668e-05, | |
| "loss": 0.3204, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9314934815281728e-05, | |
| "loss": 0.3328, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9296262140393498e-05, | |
| "loss": 0.3183, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9277347671381924e-05, | |
| "loss": 0.3662, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.925819190020898e-05, | |
| "loss": 0.3034, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9238795325112867e-05, | |
| "loss": 0.304, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.921915845059505e-05, | |
| "loss": 0.2996, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9199281787407136e-05, | |
| "loss": 0.3254, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9179165852537596e-05, | |
| "loss": 0.3244, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9158811169198315e-05, | |
| "loss": 0.3305, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.913821826681099e-05, | |
| "loss": 0.3426, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9117387680993333e-05, | |
| "loss": 0.3289, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9096319953545186e-05, | |
| "loss": 0.3276, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9075015632434382e-05, | |
| "loss": 0.3383, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9053475271782523e-05, | |
| "loss": 0.3166, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9031699431850553e-05, | |
| "loss": 0.3272, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.900968867902419e-05, | |
| "loss": 0.3438, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.898744358579921e-05, | |
| "loss": 0.329, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.896496473076651e-05, | |
| "loss": 0.3292, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.8942252698597113e-05, | |
| "loss": 0.3305, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.891930808002694e-05, | |
| "loss": 0.3257, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.889613147184143e-05, | |
| "loss": 0.3225, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8872723476860033e-05, | |
| "loss": 0.3302, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8849084703920536e-05, | |
| "loss": 0.3468, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8825215767863215e-05, | |
| "loss": 0.3448, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8801117289514847e-05, | |
| "loss": 0.3381, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8776789895672557e-05, | |
| "loss": 0.3191, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8752234219087538e-05, | |
| "loss": 0.2992, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8727450898448562e-05, | |
| "loss": 0.3463, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8702440578365387e-05, | |
| "loss": 0.3306, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.867720390935199e-05, | |
| "loss": 0.3113, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8651741547809633e-05, | |
| "loss": 0.3349, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8626054156009807e-05, | |
| "loss": 0.3386, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8600142402077006e-05, | |
| "loss": 0.3454, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8574006959971335e-05, | |
| "loss": 0.2986, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8547648509470985e-05, | |
| "loss": 0.2873, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8521067736154567e-05, | |
| "loss": 0.3618, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.849426533138326e-05, | |
| "loss": 0.3221, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8467241992282842e-05, | |
| "loss": 0.3083, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8439998421725555e-05, | |
| "loss": 0.3481, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.3263, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8384853426351793e-05, | |
| "loss": 0.3102, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.835695343584683e-05, | |
| "loss": 0.3209, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8328836082470717e-05, | |
| "loss": 0.3233, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8300502097550807e-05, | |
| "loss": 0.3186, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8271952218049004e-05, | |
| "loss": 0.353, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8243187186542594e-05, | |
| "loss": 0.3058, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8214207751204917e-05, | |
| "loss": 0.3083, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8185014665785936e-05, | |
| "loss": 0.2891, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8155608689592604e-05, | |
| "loss": 0.3288, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8125990587469125e-05, | |
| "loss": 0.2958, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.809616112977706e-05, | |
| "loss": 0.3177, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8066121092375303e-05, | |
| "loss": 0.3451, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8035871256599868e-05, | |
| "loss": 0.2998, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.8005412409243604e-05, | |
| "loss": 0.3137, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.797474534253571e-05, | |
| "loss": 0.2978, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.7943870854121126e-05, | |
| "loss": 0.3337, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.7912789747039805e-05, | |
| "loss": 0.3047, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.7881502829705808e-05, | |
| "loss": 0.3132, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.785001091588628e-05, | |
| "loss": 0.3065, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.78183148246803e-05, | |
| "loss": 0.3199, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.778641538049755e-05, | |
| "loss": 0.2953, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7754313413036907e-05, | |
| "loss": 0.3494, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7722009757264818e-05, | |
| "loss": 0.3144, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.768950525339362e-05, | |
| "loss": 0.3111, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.765680074685968e-05, | |
| "loss": 0.3105, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7623897088301387e-05, | |
| "loss": 0.2959, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7590795133537035e-05, | |
| "loss": 0.3398, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7557495743542586e-05, | |
| "loss": 0.3265, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7523999784429238e-05, | |
| "loss": 0.303, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7490308127420928e-05, | |
| "loss": 0.2883, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7456421648831658e-05, | |
| "loss": 0.3115, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.74223412300427e-05, | |
| "loss": 0.2881, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7388067757479684e-05, | |
| "loss": 0.2838, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7353602122589528e-05, | |
| "loss": 0.3163, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7318945221817255e-05, | |
| "loss": 0.3044, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7284097956582694e-05, | |
| "loss": 0.3064, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7249061233257004e-05, | |
| "loss": 0.2758, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.721383596313912e-05, | |
| "loss": 0.3367, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7178423062432052e-05, | |
| "loss": 0.3172, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7142823452219036e-05, | |
| "loss": 0.3042, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7107038058439606e-05, | |
| "loss": 0.3262, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.3223, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7034913648076357e-05, | |
| "loss": 0.314, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.699857650743562e-05, | |
| "loss": 0.3072, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6962057335065814e-05, | |
| "loss": 0.29, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.692535708082412e-05, | |
| "loss": 0.286, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6888476699277608e-05, | |
| "loss": 0.3031, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6851417149678442e-05, | |
| "loss": 0.3431, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6814179395938915e-05, | |
| "loss": 0.2891, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6776764406606362e-05, | |
| "loss": 0.2751, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6739173154838e-05, | |
| "loss": 0.3016, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.67014066183756e-05, | |
| "loss": 0.2722, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6663465779520042e-05, | |
| "loss": 0.3048, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6625351625105796e-05, | |
| "loss": 0.2792, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6587065146475232e-05, | |
| "loss": 0.3221, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.3173, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.650997920431936e-05, | |
| "loss": 0.337, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6471181745785673e-05, | |
| "loss": 0.3171, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.643221597296679e-05, | |
| "loss": 0.291, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6393082899355516e-05, | |
| "loss": 0.3013, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6353783542796137e-05, | |
| "loss": 0.276, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.631431892545791e-05, | |
| "loss": 0.3074, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.627469007380852e-05, | |
| "loss": 0.3104, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6234898018587336e-05, | |
| "loss": 0.3518, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.619494379477863e-05, | |
| "loss": 0.3075, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6154828441584655e-05, | |
| "loss": 0.2868, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6114553002398602e-05, | |
| "loss": 0.3187, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.607411852477748e-05, | |
| "loss": 0.2594, | |
| "step": 200 | |
| } | |
| ], | |
| "max_steps": 636, | |
| "num_train_epochs": 3, | |
| "total_flos": 5.211759089759027e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |