| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9963382737576287, |
| "global_step": 2148, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.0769230769230774e-07, |
| "loss": 1.4717, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 6.153846153846155e-07, |
| "loss": 1.4394, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 9.230769230769232e-07, |
| "loss": 1.6015, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.230769230769231e-06, |
| "loss": 1.5068, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.5384615384615387e-06, |
| "loss": 1.6053, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8461538461538465e-06, |
| "loss": 1.5636, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.153846153846154e-06, |
| "loss": 1.4664, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.461538461538462e-06, |
| "loss": 1.2902, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7692307692307697e-06, |
| "loss": 1.3183, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 1.4049, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.384615384615385e-06, |
| "loss": 1.4321, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.692307692307693e-06, |
| "loss": 1.3553, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.2785, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.307692307692308e-06, |
| "loss": 1.2924, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.615384615384616e-06, |
| "loss": 1.3955, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923076923076924e-06, |
| "loss": 1.3009, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 5.230769230769232e-06, |
| "loss": 1.22, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 5.538461538461539e-06, |
| "loss": 1.257, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 5.846153846153847e-06, |
| "loss": 1.3151, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 1.2092, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.461538461538463e-06, |
| "loss": 1.2724, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.76923076923077e-06, |
| "loss": 1.2892, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 7.076923076923078e-06, |
| "loss": 1.2776, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 7.384615384615386e-06, |
| "loss": 1.3943, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 1.3105, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.1544, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.307692307692309e-06, |
| "loss": 1.1968, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.615384615384617e-06, |
| "loss": 1.2995, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.923076923076925e-06, |
| "loss": 1.2815, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 1.2486, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 9.53846153846154e-06, |
| "loss": 1.2095, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 9.846153846153848e-06, |
| "loss": 1.2916, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.0153846153846154e-05, |
| "loss": 1.2441, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.0461538461538463e-05, |
| "loss": 1.1681, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.076923076923077e-05, |
| "loss": 1.3134, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.1076923076923079e-05, |
| "loss": 1.2234, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.1384615384615385e-05, |
| "loss": 1.193, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.1692307692307694e-05, |
| "loss": 1.1637, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.2e-05, |
| "loss": 1.1679, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 1.1705, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.2615384615384616e-05, |
| "loss": 1.2658, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.2923076923076925e-05, |
| "loss": 1.2689, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.3230769230769231e-05, |
| "loss": 1.1513, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.353846153846154e-05, |
| "loss": 1.2267, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.3846153846153847e-05, |
| "loss": 1.2034, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.4153846153846156e-05, |
| "loss": 1.2554, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.4461538461538462e-05, |
| "loss": 1.3188, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.4769230769230772e-05, |
| "loss": 1.2369, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.5076923076923078e-05, |
| "loss": 1.2056, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 1.1879, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.5692307692307693e-05, |
| "loss": 1.1966, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.2614, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.630769230769231e-05, |
| "loss": 1.2508, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.6615384615384618e-05, |
| "loss": 1.2303, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.6923076923076924e-05, |
| "loss": 1.183, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7230769230769234e-05, |
| "loss": 1.215, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.753846153846154e-05, |
| "loss": 1.1435, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.784615384615385e-05, |
| "loss": 1.2562, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.8153846153846155e-05, |
| "loss": 1.1887, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 1.2054, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.876923076923077e-05, |
| "loss": 1.2011, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.907692307692308e-05, |
| "loss": 1.1796, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9384615384615386e-05, |
| "loss": 1.267, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9692307692307696e-05, |
| "loss": 1.1658, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 2e-05, |
| "loss": 1.2247, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9999988626578683e-05, |
| "loss": 1.2006, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9999954506340598e-05, |
| "loss": 1.219, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.999989763936336e-05, |
| "loss": 1.1697, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9999818025776324e-05, |
| "loss": 1.2861, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9999715665760584e-05, |
| "loss": 1.2101, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.999959055954898e-05, |
| "loss": 1.1502, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9999442707426083e-05, |
| "loss": 1.236, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9999272109728216e-05, |
| "loss": 1.1992, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.999907876684343e-05, |
| "loss": 1.1795, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9998862679211524e-05, |
| "loss": 1.2148, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9998623847324027e-05, |
| "loss": 1.2245, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9998362271724203e-05, |
| "loss": 1.264, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.999807795300706e-05, |
| "loss": 1.2176, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.999777089181933e-05, |
| "loss": 1.2125, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9997441088859477e-05, |
| "loss": 1.1754, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9997088544877707e-05, |
| "loss": 1.234, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9996713260675938e-05, |
| "loss": 1.1579, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9996315237107823e-05, |
| "loss": 1.1571, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9995894475078747e-05, |
| "loss": 1.2166, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9995450975545808e-05, |
| "loss": 1.2772, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9994984739517826e-05, |
| "loss": 1.1261, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.999449576805534e-05, |
| "loss": 1.2533, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9993984062270604e-05, |
| "loss": 1.276, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9993449623327593e-05, |
| "loss": 1.268, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9992892452441986e-05, |
| "loss": 1.1903, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9992312550881167e-05, |
| "loss": 1.2145, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9991709919964227e-05, |
| "loss": 1.157, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9991084561061965e-05, |
| "loss": 1.2749, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.999043647559688e-05, |
| "loss": 1.2186, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9989765665043152e-05, |
| "loss": 1.2154, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.998907213092667e-05, |
| "loss": 1.1492, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9988355874825003e-05, |
| "loss": 1.1904, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9987616898367405e-05, |
| "loss": 1.188, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.998685520323482e-05, |
| "loss": 1.2361, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9986070791159855e-05, |
| "loss": 1.2308, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9985263663926808e-05, |
| "loss": 1.2864, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9984433823371636e-05, |
| "loss": 1.2753, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9983581271381964e-05, |
| "loss": 1.1349, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9982706009897076e-05, |
| "loss": 1.2002, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9981808040907925e-05, |
| "loss": 1.2492, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9980887366457093e-05, |
| "loss": 1.1327, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9979943988638834e-05, |
| "loss": 1.2301, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9978977909599033e-05, |
| "loss": 1.1957, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.997798913153521e-05, |
| "loss": 1.2296, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.997697765669653e-05, |
| "loss": 1.1441, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9975943487383768e-05, |
| "loss": 1.2527, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9974886625949344e-05, |
| "loss": 1.2105, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.997380707479728e-05, |
| "loss": 1.1275, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9972704836383216e-05, |
| "loss": 1.2446, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.997157991321439e-05, |
| "loss": 1.1617, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9970432307849654e-05, |
| "loss": 1.2486, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9969262022899442e-05, |
| "loss": 1.1145, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.996806906102579e-05, |
| "loss": 1.1679, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9966853424942303e-05, |
| "loss": 1.2007, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.996561511741417e-05, |
| "loss": 1.1321, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9964354141258156e-05, |
| "loss": 1.2503, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9963070499342575e-05, |
| "loss": 1.1545, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.996176419458731e-05, |
| "loss": 1.181, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.99604352299638e-05, |
| "loss": 1.2791, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.995908360849501e-05, |
| "loss": 1.2134, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.995770933325545e-05, |
| "loss": 1.1846, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.995631240737117e-05, |
| "loss": 1.1478, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9954892834019732e-05, |
| "loss": 1.2466, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9953450616430217e-05, |
| "loss": 1.2402, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9951985757883218e-05, |
| "loss": 1.2601, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9950498261710822e-05, |
| "loss": 1.2187, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9948988131296614e-05, |
| "loss": 1.1763, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9947455370075663e-05, |
| "loss": 1.2212, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.994589998153452e-05, |
| "loss": 1.1876, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9944321969211198e-05, |
| "loss": 1.1339, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.994272133669518e-05, |
| "loss": 1.172, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.99410980876274e-05, |
| "loss": 1.1351, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9939452225700236e-05, |
| "loss": 1.1818, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9937783754657503e-05, |
| "loss": 1.1651, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.993609267829445e-05, |
| "loss": 1.1706, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9934379000457736e-05, |
| "loss": 1.2073, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9932642725045442e-05, |
| "loss": 1.2186, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9930883856007047e-05, |
| "loss": 1.2186, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9929102397343417e-05, |
| "loss": 1.2422, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.992729835310681e-05, |
| "loss": 1.2145, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.992547172740086e-05, |
| "loss": 1.1659, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9923622524380558e-05, |
| "loss": 1.2141, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9921750748252266e-05, |
| "loss": 1.2255, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.991985640327368e-05, |
| "loss": 1.1423, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.991793949375383e-05, |
| "loss": 1.2387, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9916000024053087e-05, |
| "loss": 1.1863, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.991403799858313e-05, |
| "loss": 1.1563, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9912053421806948e-05, |
| "loss": 1.2779, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9910046298238827e-05, |
| "loss": 1.1193, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.990801663244434e-05, |
| "loss": 1.2894, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9905964429040335e-05, |
| "loss": 1.1416, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9903889692694926e-05, |
| "loss": 1.1508, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.990179242812748e-05, |
| "loss": 1.2213, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9899672640108618e-05, |
| "loss": 1.2102, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9897530333460187e-05, |
| "loss": 1.1932, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9895365513055256e-05, |
| "loss": 1.2408, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.989317818381811e-05, |
| "loss": 1.1277, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.989096835072423e-05, |
| "loss": 1.1267, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.988873601880029e-05, |
| "loss": 1.1168, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.988648119312414e-05, |
| "loss": 1.2347, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9884203878824798e-05, |
| "loss": 1.1614, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9881904081082433e-05, |
| "loss": 1.1882, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9879581805128357e-05, |
| "loss": 1.212, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.987723705624502e-05, |
| "loss": 1.1359, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.987486983976598e-05, |
| "loss": 1.1284, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.987248016107591e-05, |
| "loss": 1.1582, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9870068025610574e-05, |
| "loss": 1.1563, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9867633438856823e-05, |
| "loss": 1.228, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9865176406352562e-05, |
| "loss": 1.2773, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9862696933686774e-05, |
| "loss": 1.1441, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9860195026499473e-05, |
| "loss": 1.229, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9857670690481708e-05, |
| "loss": 1.0738, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9855123931375547e-05, |
| "loss": 1.2238, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.985255475497406e-05, |
| "loss": 1.078, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.984996316712132e-05, |
| "loss": 1.1612, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.984734917371236e-05, |
| "loss": 1.216, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9844712780693197e-05, |
| "loss": 1.2418, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9842053994060793e-05, |
| "loss": 1.177, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9839372819863046e-05, |
| "loss": 1.2282, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.983666926419878e-05, |
| "loss": 1.2166, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.983394333321773e-05, |
| "loss": 1.1608, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9831195033120534e-05, |
| "loss": 1.2168, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.98284243701587e-05, |
| "loss": 1.1598, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9825631350634615e-05, |
| "loss": 1.2375, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9822815980901518e-05, |
| "loss": 1.2102, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9819978267363476e-05, |
| "loss": 1.1879, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9817118216475407e-05, |
| "loss": 1.2183, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9814235834743014e-05, |
| "loss": 1.2001, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9811331128722807e-05, |
| "loss": 1.1948, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9808404105022076e-05, |
| "loss": 1.2549, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9805454770298877e-05, |
| "loss": 1.1271, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.980248313126201e-05, |
| "loss": 1.1222, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9799489194671023e-05, |
| "loss": 1.1922, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9796472967336172e-05, |
| "loss": 1.202, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.979343445611842e-05, |
| "loss": 1.1822, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9790373667929422e-05, |
| "loss": 1.2069, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9787290609731512e-05, |
| "loss": 1.1859, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9784185288537662e-05, |
| "loss": 1.2879, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9781057711411505e-05, |
| "loss": 1.0384, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9777907885467293e-05, |
| "loss": 1.2297, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9774735817869875e-05, |
| "loss": 1.2124, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9771541515834714e-05, |
| "loss": 1.2308, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9768324986627837e-05, |
| "loss": 1.1843, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9765086237565826e-05, |
| "loss": 1.1966, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.976182527601582e-05, |
| "loss": 1.2213, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.975854210939547e-05, |
| "loss": 1.2288, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.975523674517295e-05, |
| "loss": 1.2117, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.9751909190866916e-05, |
| "loss": 1.2378, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.9748559454046506e-05, |
| "loss": 1.2743, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.974518754233131e-05, |
| "loss": 1.1426, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.9741793463391365e-05, |
| "loss": 1.2211, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.973837722494713e-05, |
| "loss": 1.2085, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.973493883476947e-05, |
| "loss": 1.1199, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.973147830067963e-05, |
| "loss": 1.1606, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9727995630549237e-05, |
| "loss": 1.1984, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9724490832300266e-05, |
| "loss": 1.2662, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9720963913905028e-05, |
| "loss": 1.2539, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9717414883386148e-05, |
| "loss": 1.1718, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9713843748816545e-05, |
| "loss": 1.189, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.971025051831943e-05, |
| "loss": 1.1482, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.970663520006826e-05, |
| "loss": 1.2271, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9702997802286754e-05, |
| "loss": 1.159, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9699338333248833e-05, |
| "loss": 1.2196, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9695656801278635e-05, |
| "loss": 1.1728, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9691953214750488e-05, |
| "loss": 1.2025, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9688227582088878e-05, |
| "loss": 1.2473, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.968447991176844e-05, |
| "loss": 1.1849, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.968071021231395e-05, |
| "loss": 1.1599, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.9676918492300276e-05, |
| "loss": 1.2472, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.9673104760352387e-05, |
| "loss": 1.2208, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.966926902514532e-05, |
| "loss": 1.2025, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.9665411295404156e-05, |
| "loss": 1.2443, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.966153157990402e-05, |
| "loss": 1.2123, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.9657629887470038e-05, |
| "loss": 1.226, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.9653706226977323e-05, |
| "loss": 1.1827, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9649760607350965e-05, |
| "loss": 1.1731, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9645793037566008e-05, |
| "loss": 1.2196, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9641803526647414e-05, |
| "loss": 1.2292, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9637792083670072e-05, |
| "loss": 1.2433, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.963375871775873e-05, |
| "loss": 1.1826, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.962970343808804e-05, |
| "loss": 1.1897, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.962562625388247e-05, |
| "loss": 1.1795, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.962152717441634e-05, |
| "loss": 1.0895, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9617406209013748e-05, |
| "loss": 1.1578, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9613263367048595e-05, |
| "loss": 1.0988, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.960909865794454e-05, |
| "loss": 1.1786, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9604912091174978e-05, |
| "loss": 1.1984, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.960070367626303e-05, |
| "loss": 1.2843, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9596473422781507e-05, |
| "loss": 1.1384, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.95922213403529e-05, |
| "loss": 1.1976, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.9587947438649362e-05, |
| "loss": 1.297, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.958365172739266e-05, |
| "loss": 1.1631, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.9579334216354184e-05, |
| "loss": 1.0906, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.957499491535491e-05, |
| "loss": 1.1082, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.957063383426538e-05, |
| "loss": 1.1622, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.956625098300567e-05, |
| "loss": 1.2316, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.956184637154539e-05, |
| "loss": 1.2528, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9557420009903636e-05, |
| "loss": 1.2346, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9552971908148985e-05, |
| "loss": 1.1708, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9548502076399458e-05, |
| "loss": 1.1591, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9544010524822517e-05, |
| "loss": 1.2303, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9539497263635027e-05, |
| "loss": 1.1443, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9534962303103223e-05, |
| "loss": 1.19, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9530405653542716e-05, |
| "loss": 1.1791, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9525827325318442e-05, |
| "loss": 1.1365, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9521227328844654e-05, |
| "loss": 1.1455, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.951660567458489e-05, |
| "loss": 1.1289, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.951196237305195e-05, |
| "loss": 1.1259, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9507297434807888e-05, |
| "loss": 1.201, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9502610870463955e-05, |
| "loss": 1.1428, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9497902690680613e-05, |
| "loss": 1.1922, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9493172906167484e-05, |
| "loss": 1.296, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.948842152768333e-05, |
| "loss": 1.2057, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9483648566036044e-05, |
| "loss": 1.1589, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9478854032082598e-05, |
| "loss": 1.1562, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9474037936729052e-05, |
| "loss": 1.2475, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9469200290930494e-05, |
| "loss": 1.2004, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9464341105691045e-05, |
| "loss": 1.134, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9459460392063816e-05, |
| "loss": 1.1794, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9454558161150895e-05, |
| "loss": 1.2167, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.94496344241033e-05, |
| "loss": 1.1299, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.944468919212098e-05, |
| "loss": 1.1845, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9439722476452783e-05, |
| "loss": 1.1807, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9434734288396418e-05, |
| "loss": 1.1416, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.942972463929843e-05, |
| "loss": 1.1854, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9424693540554195e-05, |
| "loss": 1.1516, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9419641003607874e-05, |
| "loss": 1.09, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.941456703995239e-05, |
| "loss": 1.2451, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9409471661129414e-05, |
| "loss": 1.1746, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.940435487872932e-05, |
| "loss": 1.1115, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9399216704391172e-05, |
| "loss": 1.0982, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.93940571498027e-05, |
| "loss": 1.2504, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.938887622670025e-05, |
| "loss": 1.1533, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.93836739468688e-05, |
| "loss": 1.1865, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.937845032214188e-05, |
| "loss": 1.1739, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.93732053644016e-05, |
| "loss": 1.1649, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.9367939085578576e-05, |
| "loss": 1.0735, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.936265149765193e-05, |
| "loss": 1.2163, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.9357342612649254e-05, |
| "loss": 1.232, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.935201244264659e-05, |
| "loss": 1.1653, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.934666099976839e-05, |
| "loss": 1.2064, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.9341288296187494e-05, |
| "loss": 1.1383, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.9335894344125105e-05, |
| "loss": 1.0994, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.933047915585077e-05, |
| "loss": 1.1814, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.932504274368232e-05, |
| "loss": 1.1784, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.9319585119985886e-05, |
| "loss": 1.1661, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9314106297175833e-05, |
| "loss": 1.2625, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.930860628771476e-05, |
| "loss": 1.2726, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.930308510411344e-05, |
| "loss": 1.1634, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9297542758930836e-05, |
| "loss": 1.1552, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.929197926477403e-05, |
| "loss": 1.2052, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9286394634298206e-05, |
| "loss": 1.1897, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.928078888020664e-05, |
| "loss": 1.2657, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9275162015250656e-05, |
| "loss": 1.1693, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.926951405222959e-05, |
| "loss": 1.1583, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9263845003990782e-05, |
| "loss": 1.1679, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.925815488342952e-05, |
| "loss": 1.2153, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.925244370348903e-05, |
| "loss": 1.171, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9246711477160453e-05, |
| "loss": 1.1764, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9240958217482783e-05, |
| "loss": 1.1394, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.923518393754288e-05, |
| "loss": 1.1531, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9229388650475397e-05, |
| "loss": 1.1347, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.922357236946279e-05, |
| "loss": 1.1427, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9217735107735255e-05, |
| "loss": 1.268, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9211876878570727e-05, |
| "loss": 1.1485, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9205997695294822e-05, |
| "loss": 1.1996, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.920009757128083e-05, |
| "loss": 1.1876, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9194176519949668e-05, |
| "loss": 1.2266, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9188234554769857e-05, |
| "loss": 1.2082, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9182271689257494e-05, |
| "loss": 1.2875, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9176287936976214e-05, |
| "loss": 1.1865, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9170283311537165e-05, |
| "loss": 1.1937, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9164257826598973e-05, |
| "loss": 1.1441, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9158211495867716e-05, |
| "loss": 1.1047, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9152144333096885e-05, |
| "loss": 1.1542, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.914605635208736e-05, |
| "loss": 1.181, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.9139947566687374e-05, |
| "loss": 1.2321, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.913381799079249e-05, |
| "loss": 1.2395, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.9127667638345553e-05, |
| "loss": 1.1813, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.9121496523336676e-05, |
| "loss": 1.1771, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.9115304659803196e-05, |
| "loss": 1.1702, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.910909206182965e-05, |
| "loss": 1.1314, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.910285874354773e-05, |
| "loss": 1.219, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.909660471913627e-05, |
| "loss": 1.2025, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.9090330002821207e-05, |
| "loss": 1.1078, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.908403460887553e-05, |
| "loss": 1.2186, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.907771855161928e-05, |
| "loss": 1.1887, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.907138184541949e-05, |
| "loss": 1.1649, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.9065024504690172e-05, |
| "loss": 1.1401, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9058646543892253e-05, |
| "loss": 1.1406, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9052247977533598e-05, |
| "loss": 1.2044, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9045828820168914e-05, |
| "loss": 1.1851, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.903938908639976e-05, |
| "loss": 1.1922, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9032928790874502e-05, |
| "loss": 1.1851, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9026447948288265e-05, |
| "loss": 1.0981, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.901994657338292e-05, |
| "loss": 1.1349, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.901342468094705e-05, |
| "loss": 1.1336, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.9006882285815893e-05, |
| "loss": 1.2705, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.900031940287134e-05, |
| "loss": 1.1983, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.8993736047041868e-05, |
| "loss": 1.1481, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.8987132233302538e-05, |
| "loss": 1.2455, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.8980507976674943e-05, |
| "loss": 1.3446, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.8973863292227173e-05, |
| "loss": 1.2166, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.896719819507379e-05, |
| "loss": 1.1852, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.896051270037578e-05, |
| "loss": 1.2208, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8953806823340536e-05, |
| "loss": 1.1541, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8947080579221813e-05, |
| "loss": 1.1566, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8940333983319687e-05, |
| "loss": 1.235, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8933567050980542e-05, |
| "loss": 1.2012, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8926779797597003e-05, |
| "loss": 1.2131, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8919972238607936e-05, |
| "loss": 1.1663, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8913144389498384e-05, |
| "loss": 1.1273, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8906296265799547e-05, |
| "loss": 1.1685, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.889942788308875e-05, |
| "loss": 1.1065, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8892539256989385e-05, |
| "loss": 1.1729, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8885630403170915e-05, |
| "loss": 1.1507, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.887870133734879e-05, |
| "loss": 1.2105, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8871752075284454e-05, |
| "loss": 1.1375, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8864782632785277e-05, |
| "loss": 1.2125, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8857793025704545e-05, |
| "loss": 1.146, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8850783269941407e-05, |
| "loss": 1.19, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8843753381440846e-05, |
| "loss": 1.2291, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.883670337619363e-05, |
| "loss": 1.1684, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8829633270236308e-05, |
| "loss": 1.2472, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.882254307965113e-05, |
| "loss": 1.1395, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.881543282056604e-05, |
| "loss": 1.2327, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8808302509154638e-05, |
| "loss": 1.2428, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.880115216163613e-05, |
| "loss": 1.168, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8793981794275295e-05, |
| "loss": 1.2491, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.878679142338246e-05, |
| "loss": 1.2567, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.877958106531344e-05, |
| "loss": 1.2501, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8772350736469533e-05, |
| "loss": 1.1345, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.876510045329745e-05, |
| "loss": 1.2642, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8757830232289293e-05, |
| "loss": 1.2016, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8750540089982528e-05, |
| "loss": 1.2061, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8743230042959918e-05, |
| "loss": 1.1883, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8735900107849515e-05, |
| "loss": 1.1378, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.872855030132461e-05, |
| "loss": 1.1498, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8721180640103688e-05, |
| "loss": 1.0477, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8713791140950405e-05, |
| "loss": 1.1917, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.870638182067353e-05, |
| "loss": 1.2271, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.869895269612694e-05, |
| "loss": 1.2007, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.869150378420954e-05, |
| "loss": 1.2042, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.8684035101865255e-05, |
| "loss": 1.1675, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.8676546666082975e-05, |
| "loss": 1.2355, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.8669038493896534e-05, |
| "loss": 1.1827, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.866151060238465e-05, |
| "loss": 1.1474, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8653963008670897e-05, |
| "loss": 1.2114, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.864639572992367e-05, |
| "loss": 1.2332, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8638808783356138e-05, |
| "loss": 1.2373, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.863120218622621e-05, |
| "loss": 1.1972, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.862357595583649e-05, |
| "loss": 1.2143, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.861593010953425e-05, |
| "loss": 1.1628, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.860826466471137e-05, |
| "loss": 1.208, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.860057963880432e-05, |
| "loss": 1.2277, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8592875049294107e-05, |
| "loss": 1.2415, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.858515091370624e-05, |
| "loss": 1.19, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8577407249610688e-05, |
| "loss": 1.1189, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.856964407462184e-05, |
| "loss": 1.1291, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8561861406398472e-05, |
| "loss": 1.272, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8554059262643694e-05, |
| "loss": 1.1438, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8546237661104923e-05, |
| "loss": 1.1797, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8538396619573827e-05, |
| "loss": 1.2159, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8530536155886304e-05, |
| "loss": 1.2523, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.852265628792243e-05, |
| "loss": 1.1934, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.851475703360641e-05, |
| "loss": 1.2205, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8506838410906556e-05, |
| "loss": 1.2039, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8498900437835237e-05, |
| "loss": 1.2439, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.849094313244883e-05, |
| "loss": 1.113, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8482966512847698e-05, |
| "loss": 1.0863, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.847497059717613e-05, |
| "loss": 1.2663, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8466955403622307e-05, |
| "loss": 1.1951, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.845892095041827e-05, |
| "loss": 1.1718, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8450867255839854e-05, |
| "loss": 1.2259, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.844279433820668e-05, |
| "loss": 1.1898, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8434702215882085e-05, |
| "loss": 1.1148, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8426590907273087e-05, |
| "loss": 1.1661, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8418460430830353e-05, |
| "loss": 1.2616, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.841031080504816e-05, |
| "loss": 1.1619, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.840214204846432e-05, |
| "loss": 1.1855, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8393954179660182e-05, |
| "loss": 1.2489, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8385747217260564e-05, |
| "loss": 1.176, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8377521179933707e-05, |
| "loss": 1.1447, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8369276086391257e-05, |
| "loss": 1.2419, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8361011955388193e-05, |
| "loss": 1.1833, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.835272880572281e-05, |
| "loss": 1.2339, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8344426656236646e-05, |
| "loss": 1.2059, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.833610552581448e-05, |
| "loss": 1.19, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.832776543338426e-05, |
| "loss": 1.1525, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.831940639791705e-05, |
| "loss": 1.1986, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8311028438427026e-05, |
| "loss": 1.2028, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8302631573971397e-05, |
| "loss": 1.1734, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8294215823650382e-05, |
| "loss": 1.1524, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8285781206607147e-05, |
| "loss": 1.2589, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8277327742027797e-05, |
| "loss": 1.1144, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.826885544914128e-05, |
| "loss": 1.2293, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.82603643472194e-05, |
| "loss": 1.1522, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.825185445557672e-05, |
| "loss": 1.1918, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.824332579357057e-05, |
| "loss": 1.17, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8234778380600953e-05, |
| "loss": 1.117, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8226212236110537e-05, |
| "loss": 1.1722, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8217627379584602e-05, |
| "loss": 1.1993, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8209023830550978e-05, |
| "loss": 1.1941, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.820040160858003e-05, |
| "loss": 1.168, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.8191760733284586e-05, |
| "loss": 1.2401, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.8183101224319906e-05, |
| "loss": 1.2243, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.8174423101383646e-05, |
| "loss": 1.2346, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.8165726384215797e-05, |
| "loss": 1.1089, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.8157011092598634e-05, |
| "loss": 1.1745, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.81482772463567e-05, |
| "loss": 1.1749, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.813952486535674e-05, |
| "loss": 1.2021, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8130753969507654e-05, |
| "loss": 1.2345, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.812196457876046e-05, |
| "loss": 1.2219, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8113156713108247e-05, |
| "loss": 1.17, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8104330392586135e-05, |
| "loss": 1.1589, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.809548563727121e-05, |
| "loss": 1.2126, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8086622467282497e-05, |
| "loss": 1.2074, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8077740902780913e-05, |
| "loss": 1.0972, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8068840963969208e-05, |
| "loss": 1.2737, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.805992267109194e-05, |
| "loss": 1.1111, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8050986044435405e-05, |
| "loss": 1.1575, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.80420311043276e-05, |
| "loss": 1.1572, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.80330578711382e-05, |
| "loss": 1.2203, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8024066365278475e-05, |
| "loss": 1.1889, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8015056607201253e-05, |
| "loss": 1.0945, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8006028617400896e-05, |
| "loss": 1.0848, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7996982416413223e-05, |
| "loss": 1.1379, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7987918024815497e-05, |
| "loss": 1.1773, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7978835463226338e-05, |
| "loss": 1.2081, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7969734752305707e-05, |
| "loss": 1.1862, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.796061591275485e-05, |
| "loss": 1.1603, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7951478965316245e-05, |
| "loss": 1.1829, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7942323930773568e-05, |
| "loss": 1.274, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7933150829951626e-05, |
| "loss": 1.2594, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.792395968371633e-05, |
| "loss": 1.1782, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7914750512974635e-05, |
| "loss": 1.1877, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.79055233386745e-05, |
| "loss": 1.1477, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7896278181804824e-05, |
| "loss": 1.073, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.788701506339543e-05, |
| "loss": 1.1325, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7877734004516986e-05, |
| "loss": 1.1092, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7868435026280965e-05, |
| "loss": 1.1498, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.785911814983961e-05, |
| "loss": 1.1628, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7849783396385873e-05, |
| "loss": 1.0997, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.784043078715337e-05, |
| "loss": 1.2472, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.783106034341634e-05, |
| "loss": 1.1879, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.782167208648957e-05, |
| "loss": 1.241, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7812266037728395e-05, |
| "loss": 1.1788, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.78028422185286e-05, |
| "loss": 1.1961, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.77934006503264e-05, |
| "loss": 1.138, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7783941354598376e-05, |
| "loss": 1.2101, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7774464352861446e-05, |
| "loss": 1.1435, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.776496966667279e-05, |
| "loss": 1.1957, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.775545731762983e-05, |
| "loss": 1.1467, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.774592732737015e-05, |
| "loss": 1.2863, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7736379717571467e-05, |
| "loss": 1.1845, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7726814509951588e-05, |
| "loss": 1.1712, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.771723172626833e-05, |
| "loss": 1.1627, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.770763138831951e-05, |
| "loss": 1.1607, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7698013517942857e-05, |
| "loss": 1.1639, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7688378137015993e-05, |
| "loss": 1.1406, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7678725267456368e-05, |
| "loss": 1.1745, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7669054931221213e-05, |
| "loss": 1.1929, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7659367150307486e-05, |
| "loss": 1.2069, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7649661946751834e-05, |
| "loss": 1.1996, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.763993934263053e-05, |
| "loss": 1.1616, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7630199360059425e-05, |
| "loss": 1.1172, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.762044202119391e-05, |
| "loss": 1.2319, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7610667348228845e-05, |
| "loss": 1.1339, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7600875363398528e-05, |
| "loss": 1.2145, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7591066088976632e-05, |
| "loss": 1.1435, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7581239547276156e-05, |
| "loss": 1.1795, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7571395760649382e-05, |
| "loss": 1.1812, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.756153475148782e-05, |
| "loss": 1.1643, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7551656542222147e-05, |
| "loss": 1.1671, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7541761155322175e-05, |
| "loss": 1.129, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7531848613296782e-05, |
| "loss": 1.255, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.752191893869387e-05, |
| "loss": 1.1061, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7511972154100316e-05, |
| "loss": 1.1279, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7502008282141916e-05, |
| "loss": 1.2521, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7492027345483328e-05, |
| "loss": 1.2057, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7482029366828034e-05, |
| "loss": 1.1596, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.747201436891828e-05, |
| "loss": 1.15, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7461982374535022e-05, |
| "loss": 1.1808, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.745193340649788e-05, |
| "loss": 1.1758, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7441867487665084e-05, |
| "loss": 1.324, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7431784640933423e-05, |
| "loss": 1.2155, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.742168488923819e-05, |
| "loss": 1.1455, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7411568255553124e-05, |
| "loss": 1.149, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.740143476289038e-05, |
| "loss": 1.1563, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7391284434300454e-05, |
| "loss": 1.1617, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.7381117292872133e-05, |
| "loss": 1.2069, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.737093336173246e-05, |
| "loss": 1.1508, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.7360732664046663e-05, |
| "loss": 1.2176, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.73505152230181e-05, |
| "loss": 1.1715, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.734028106188823e-05, |
| "loss": 1.2187, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.7330030203936542e-05, |
| "loss": 1.1669, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.7319762672480496e-05, |
| "loss": 1.1905, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.730947849087548e-05, |
| "loss": 1.1859, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.7299177682514767e-05, |
| "loss": 1.2081, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.728886027082944e-05, |
| "loss": 1.121, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.727852627928836e-05, |
| "loss": 1.1692, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.7268175731398086e-05, |
| "loss": 1.0347, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.725780865070285e-05, |
| "loss": 1.1838, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.724742506078449e-05, |
| "loss": 1.1592, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.723702498526239e-05, |
| "loss": 1.2058, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7226608447793438e-05, |
| "loss": 1.1428, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.721617547207197e-05, |
| "loss": 1.1725, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7205726081829713e-05, |
| "loss": 1.235, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7195260300835733e-05, |
| "loss": 1.0361, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7184778152896367e-05, |
| "loss": 1.152, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7174279661855206e-05, |
| "loss": 1.1899, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.716376485159299e-05, |
| "loss": 1.1411, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7153233746027596e-05, |
| "loss": 1.1026, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.714268636911397e-05, |
| "loss": 1.1394, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.713212274484405e-05, |
| "loss": 1.2067, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7121542897246765e-05, |
| "loss": 1.172, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7110946850387914e-05, |
| "loss": 1.2459, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7100334628370165e-05, |
| "loss": 1.1609, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7089706255332966e-05, |
| "loss": 1.1553, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7079061755452518e-05, |
| "loss": 1.2186, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7068401152941692e-05, |
| "loss": 1.1506, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.705772447204999e-05, |
| "loss": 1.1598, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.70470317370635e-05, |
| "loss": 1.2171, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7036322972304812e-05, |
| "loss": 1.183, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7025598202132978e-05, |
| "loss": 1.1113, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7014857450943476e-05, |
| "loss": 1.2317, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.7004100743168116e-05, |
| "loss": 1.1582, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6993328103275016e-05, |
| "loss": 1.2284, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6982539555768527e-05, |
| "loss": 1.2533, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6971735125189187e-05, |
| "loss": 1.107, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.696091483611367e-05, |
| "loss": 1.0996, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6950078713154716e-05, |
| "loss": 1.1809, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.693922678096108e-05, |
| "loss": 1.1708, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6928359064217484e-05, |
| "loss": 1.1636, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.691747558764455e-05, |
| "loss": 1.1048, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6906576375998755e-05, |
| "loss": 1.1933, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.689566145407236e-05, |
| "loss": 1.2491, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6884730846693368e-05, |
| "loss": 1.0875, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6873784578725456e-05, |
| "loss": 1.153, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6862822675067933e-05, |
| "loss": 1.1399, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6851845160655665e-05, |
| "loss": 1.2338, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6840852060459035e-05, |
| "loss": 1.1503, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6829843399483872e-05, |
| "loss": 1.173, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6818819202771405e-05, |
| "loss": 1.1897, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.68077794953982e-05, |
| "loss": 1.1458, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6796724302476106e-05, |
| "loss": 1.2346, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.67856536491522e-05, |
| "loss": 1.1245, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6774567560608712e-05, |
| "loss": 1.164, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6763466062063005e-05, |
| "loss": 1.1393, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.675234917876748e-05, |
| "loss": 1.1972, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6741216936009532e-05, |
| "loss": 1.2736, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.67300693591115e-05, |
| "loss": 1.177, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.671890647343061e-05, |
| "loss": 1.1944, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.670772830435889e-05, |
| "loss": 1.1668, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6696534877323155e-05, |
| "loss": 1.2268, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.668532621778491e-05, |
| "loss": 1.1614, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6674102351240317e-05, |
| "loss": 1.2074, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6662863303220134e-05, |
| "loss": 1.2491, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6651609099289645e-05, |
| "loss": 1.1345, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6640339765048612e-05, |
| "loss": 1.28, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6629055326131205e-05, |
| "loss": 1.1877, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6617755808205966e-05, |
| "loss": 1.2251, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6606441236975732e-05, |
| "loss": 1.0982, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6595111638177573e-05, |
| "loss": 1.0871, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6583767037582755e-05, |
| "loss": 1.1261, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.657240746099666e-05, |
| "loss": 1.1676, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6561032934258744e-05, |
| "loss": 1.1646, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6549643483242453e-05, |
| "loss": 1.1672, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6538239133855197e-05, |
| "loss": 1.2043, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6526819912038275e-05, |
| "loss": 1.1851, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6515385843766805e-05, |
| "loss": 1.1836, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6503936955049684e-05, |
| "loss": 1.1813, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.649247327192952e-05, |
| "loss": 1.2147, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.648099482048257e-05, |
| "loss": 1.1937, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6469501626818694e-05, |
| "loss": 1.1912, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.645799371708127e-05, |
| "loss": 1.1884, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6446471117447162e-05, |
| "loss": 1.2361, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6434933854126646e-05, |
| "loss": 1.1961, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6423381953363355e-05, |
| "loss": 1.1824, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6411815441434214e-05, |
| "loss": 1.2111, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.640023434464939e-05, |
| "loss": 1.1995, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6388638689352216e-05, |
| "loss": 1.2135, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.637702850191915e-05, |
| "loss": 1.2705, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.63654038087597e-05, |
| "loss": 1.1969, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6353764636316375e-05, |
| "loss": 1.2371, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6342111011064616e-05, |
| "loss": 1.2054, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6330442959512743e-05, |
| "loss": 1.1821, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6318760508201887e-05, |
| "loss": 1.1781, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6307063683705937e-05, |
| "loss": 1.1514, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6295352512631474e-05, |
| "loss": 1.1559, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6283627021617716e-05, |
| "loss": 1.2323, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6271887237336454e-05, |
| "loss": 1.1605, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.626013318649199e-05, |
| "loss": 1.1884, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.624836489582108e-05, |
| "loss": 1.1566, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6236582392092863e-05, |
| "loss": 1.0873, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6224785702108822e-05, |
| "loss": 1.157, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6212974852702702e-05, |
| "loss": 1.1681, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6201149870740448e-05, |
| "loss": 1.1802, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6189310783120173e-05, |
| "loss": 1.2086, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.617745761677205e-05, |
| "loss": 1.2703, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.61655903986583e-05, |
| "loss": 1.171, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.615370915577309e-05, |
| "loss": 1.1751, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6141813915142505e-05, |
| "loss": 1.2857, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6129904703824454e-05, |
| "loss": 1.1493, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6117981548908638e-05, |
| "loss": 1.103, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6106044477516466e-05, |
| "loss": 1.1476, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6094093516801005e-05, |
| "loss": 1.2008, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.608212869394692e-05, |
| "loss": 1.1321, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6070150036170408e-05, |
| "loss": 1.1623, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6058157570719125e-05, |
| "loss": 1.1381, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6046151324872153e-05, |
| "loss": 1.1885, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.603413132593991e-05, |
| "loss": 1.1757, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6022097601264087e-05, |
| "loss": 1.1659, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6010050178217624e-05, |
| "loss": 1.1627, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.599798908420459e-05, |
| "loss": 1.2379, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.5985914346660177e-05, |
| "loss": 1.1305, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.597382599305059e-05, |
| "loss": 1.1694, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.5961724050873025e-05, |
| "loss": 1.1812, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.5949608547655582e-05, |
| "loss": 1.2042, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5937479510957196e-05, |
| "loss": 1.2699, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5925336968367598e-05, |
| "loss": 1.0963, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5913180947507244e-05, |
| "loss": 1.1765, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.590101147602724e-05, |
| "loss": 1.2056, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5888828581609294e-05, |
| "loss": 1.1901, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5876632291965637e-05, |
| "loss": 1.2822, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5864422634838987e-05, |
| "loss": 1.127, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.585219963800245e-05, |
| "loss": 1.2087, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.583996332925949e-05, |
| "loss": 1.1694, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.5827713736443844e-05, |
| "loss": 1.1093, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.581545088741947e-05, |
| "loss": 1.2218, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.580317481008047e-05, |
| "loss": 1.1349, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.579088553235106e-05, |
| "loss": 1.1091, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.577858308218545e-05, |
| "loss": 1.2632, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5766267487567836e-05, |
| "loss": 1.1202, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.575393877651231e-05, |
| "loss": 1.1587, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5741596977062795e-05, |
| "loss": 1.15, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5729242117292986e-05, |
| "loss": 1.1889, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5716874225306292e-05, |
| "loss": 1.0798, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5704493329235756e-05, |
| "loss": 1.2704, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5692099457244017e-05, |
| "loss": 1.1857, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5679692637523213e-05, |
| "loss": 1.1825, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.5667272898294942e-05, |
| "loss": 1.1883, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.5654840267810196e-05, |
| "loss": 1.1555, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.5642394774349275e-05, |
| "loss": 1.1231, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.562993644622175e-05, |
| "loss": 1.2109, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.561746531176639e-05, |
| "loss": 1.1532, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.560498139935108e-05, |
| "loss": 1.2574, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.5592484737372782e-05, |
| "loss": 1.1988, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.557997535425746e-05, |
| "loss": 1.1886, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5567453278460006e-05, |
| "loss": 1.1012, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5554918538464186e-05, |
| "loss": 1.1243, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5542371162782588e-05, |
| "loss": 1.0718, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.552981117995652e-05, |
| "loss": 0.9951, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5517238618555976e-05, |
| "loss": 0.926, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.550465350717957e-05, |
| "loss": 0.8722, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5492055874454454e-05, |
| "loss": 0.8137, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.547944574903627e-05, |
| "loss": 0.885, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.546682315960906e-05, |
| "loss": 0.8681, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.545418813488524e-05, |
| "loss": 0.78, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5441540703605496e-05, |
| "loss": 0.877, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5428880894538745e-05, |
| "loss": 0.8386, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5416208736482054e-05, |
| "loss": 0.9246, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.540352425826058e-05, |
| "loss": 0.9193, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5390827488727506e-05, |
| "loss": 0.8523, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5378118456763977e-05, |
| "loss": 0.903, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5365397191279025e-05, |
| "loss": 0.8741, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5352663721209515e-05, |
| "loss": 0.8557, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.533991807552007e-05, |
| "loss": 0.9143, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5327160283203008e-05, |
| "loss": 0.9255, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5314390373278278e-05, |
| "loss": 0.9299, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5301608374793398e-05, |
| "loss": 0.8338, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5288814316823374e-05, |
| "loss": 0.8034, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.527600822847065e-05, |
| "loss": 0.8677, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.526319013886503e-05, |
| "loss": 0.8384, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5250360077163629e-05, |
| "loss": 0.8242, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5237518072550781e-05, |
| "loss": 0.8574, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5224664154237991e-05, |
| "loss": 0.8175, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5211798351463867e-05, |
| "loss": 0.9271, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5198920693494049e-05, |
| "loss": 0.9223, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5186031209621139e-05, |
| "loss": 0.8342, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5173129929164645e-05, |
| "loss": 0.8286, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5160216881470908e-05, |
| "loss": 0.8439, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5147292095913033e-05, |
| "loss": 0.8282, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5134355601890827e-05, |
| "loss": 0.9179, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.512140742883073e-05, |
| "loss": 0.8347, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5108447606185745e-05, |
| "loss": 0.9676, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.509547616343538e-05, |
| "loss": 0.8379, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5082493130085571e-05, |
| "loss": 0.8115, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5069498535668618e-05, |
| "loss": 0.8005, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5056492409743117e-05, |
| "loss": 0.7604, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5043474781893906e-05, |
| "loss": 0.8077, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5030445681731975e-05, |
| "loss": 0.8192, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5017405138894413e-05, |
| "loss": 0.8141, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5004353183044331e-05, |
| "loss": 0.877, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.4991289843870817e-05, |
| "loss": 0.9167, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.4978215151088842e-05, |
| "loss": 0.9478, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.4965129134439199e-05, |
| "loss": 0.9403, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.4952031823688446e-05, |
| "loss": 0.9017, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4938923248628832e-05, |
| "loss": 0.8269, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4925803439078224e-05, |
| "loss": 0.866, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4912672424880046e-05, |
| "loss": 0.9083, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4899530235903213e-05, |
| "loss": 0.8554, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.488637690204205e-05, |
| "loss": 0.9207, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4873212453216244e-05, |
| "loss": 0.8745, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4860036919370755e-05, |
| "loss": 0.8551, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4846850330475766e-05, |
| "loss": 0.8603, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.48336527165266e-05, |
| "loss": 0.8333, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4820444107543665e-05, |
| "loss": 0.878, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4807224533572376e-05, |
| "loss": 0.8964, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4793994024683087e-05, |
| "loss": 0.9128, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4780752610971027e-05, |
| "loss": 0.9211, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4767500322556237e-05, |
| "loss": 0.7794, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4754237189583485e-05, |
| "loss": 0.895, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4740963242222213e-05, |
| "loss": 0.923, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4727678510666458e-05, |
| "loss": 0.9196, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4714383025134792e-05, |
| "loss": 0.8562, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4701076815870246e-05, |
| "loss": 0.9224, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4687759913140245e-05, |
| "loss": 0.8321, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4674432347236538e-05, |
| "loss": 0.8289, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4661094148475126e-05, |
| "loss": 0.8618, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4647745347196205e-05, |
| "loss": 0.8229, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4634385973764081e-05, |
| "loss": 0.7998, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.462101605856711e-05, |
| "loss": 0.8487, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4607635632017628e-05, |
| "loss": 0.9288, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.459424472455188e-05, |
| "loss": 0.8488, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4580843366629953e-05, |
| "loss": 0.8621, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4567431588735707e-05, |
| "loss": 0.883, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.45540094213767e-05, |
| "loss": 0.9022, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4540576895084129e-05, |
| "loss": 0.8648, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4527134040412743e-05, |
| "loss": 0.8755, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4513680887940798e-05, |
| "loss": 0.8138, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4500217468269963e-05, |
| "loss": 0.9024, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.448674381202527e-05, |
| "loss": 0.8732, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4473259949855034e-05, |
| "loss": 0.8694, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4459765912430783e-05, |
| "loss": 0.857, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4446261730447191e-05, |
| "loss": 0.883, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4432747434622007e-05, |
| "loss": 0.8459, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.441922305569599e-05, |
| "loss": 0.9156, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.440568862443283e-05, |
| "loss": 0.8347, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4392144171619085e-05, |
| "loss": 0.9418, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.437858972806411e-05, |
| "loss": 0.854, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.436502532459998e-05, |
| "loss": 0.8598, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4351450992081435e-05, |
| "loss": 0.9002, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4337866761385798e-05, |
| "loss": 0.8616, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4324272663412897e-05, |
| "loss": 0.849, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4310668729085016e-05, |
| "loss": 0.805, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4297054989346812e-05, |
| "loss": 0.8386, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4283431475165245e-05, |
| "loss": 0.9254, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4269798217529507e-05, |
| "loss": 0.7647, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4256155247450951e-05, |
| "loss": 0.8474, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4242502595963032e-05, |
| "loss": 0.8706, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.422884029412122e-05, |
| "loss": 0.8341, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4215168373002937e-05, |
| "loss": 0.8343, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4201486863707485e-05, |
| "loss": 0.8659, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.418779579735598e-05, |
| "loss": 0.9398, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4174095205091276e-05, |
| "loss": 0.8822, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4160385118077895e-05, |
| "loss": 0.8781, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4146665567501957e-05, |
| "loss": 0.8397, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.41329365845711e-05, |
| "loss": 0.8572, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4119198200514435e-05, |
| "loss": 0.8202, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4105450446582445e-05, |
| "loss": 0.817, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4091693354046928e-05, |
| "loss": 0.8989, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4077926954200925e-05, |
| "loss": 0.9239, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4064151278358651e-05, |
| "loss": 0.8314, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.405036635785542e-05, |
| "loss": 0.8113, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4036572224047574e-05, |
| "loss": 0.8797, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4022768908312408e-05, |
| "loss": 0.8048, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.400895644204811e-05, |
| "loss": 0.8954, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.3995134856673677e-05, |
| "loss": 0.8406, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.398130418362886e-05, |
| "loss": 0.9708, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3967464454374063e-05, |
| "loss": 0.7799, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3953615700390306e-05, |
| "loss": 0.8598, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3939757953179132e-05, |
| "loss": 0.7912, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3925891244262539e-05, |
| "loss": 0.8484, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.391201560518291e-05, |
| "loss": 0.8313, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3898131067502947e-05, |
| "loss": 0.8837, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3884237662805585e-05, |
| "loss": 0.8208, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3870335422693938e-05, |
| "loss": 0.9286, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3856424378791205e-05, |
| "loss": 0.89, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3842504562740624e-05, |
| "loss": 0.8884, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3828576006205383e-05, |
| "loss": 0.8353, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3814638740868549e-05, |
| "loss": 0.8545, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3800692798432997e-05, |
| "loss": 0.8845, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3786738210621346e-05, |
| "loss": 0.7829, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3772775009175875e-05, |
| "loss": 0.8549, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3758803225858463e-05, |
| "loss": 0.8659, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3744822892450504e-05, |
| "loss": 0.7929, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.373083404075284e-05, |
| "loss": 0.929, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.371683670258569e-05, |
| "loss": 0.8784, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3702830909788584e-05, |
| "loss": 0.832, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3688816694220278e-05, |
| "loss": 0.8677, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3674794087758685e-05, |
| "loss": 0.7896, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3660763122300806e-05, |
| "loss": 0.9225, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3646723829762663e-05, |
| "loss": 0.8672, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3632676242079208e-05, |
| "loss": 0.8587, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.361862039120427e-05, |
| "loss": 0.8558, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3604556309110473e-05, |
| "loss": 0.9334, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.359048402778916e-05, |
| "loss": 0.8402, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3576403579250333e-05, |
| "loss": 0.9429, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3562314995522563e-05, |
| "loss": 0.8501, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3548218308652927e-05, |
| "loss": 0.9584, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3534113550706941e-05, |
| "loss": 0.8615, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3520000753768478e-05, |
| "loss": 0.809, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3505879949939693e-05, |
| "loss": 0.8131, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3491751171340955e-05, |
| "loss": 0.8055, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.347761445011077e-05, |
| "loss": 0.9068, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3463469818405725e-05, |
| "loss": 0.8774, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3449317308400385e-05, |
| "loss": 0.8686, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3435156952287243e-05, |
| "loss": 0.8621, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3420988782276636e-05, |
| "loss": 0.902, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3406812830596682e-05, |
| "loss": 0.872, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3392629129493191e-05, |
| "loss": 0.8972, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3378437711229608e-05, |
| "loss": 0.9022, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3364238608086929e-05, |
| "loss": 0.8502, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3350031852363624e-05, |
| "loss": 0.8394, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3335817476375587e-05, |
| "loss": 0.8753, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3321595512456028e-05, |
| "loss": 0.9206, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3307365992955428e-05, |
| "loss": 0.7924, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3293128950241445e-05, |
| "loss": 0.913, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3278884416698863e-05, |
| "loss": 0.9098, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3264632424729499e-05, |
| "loss": 0.8652, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3250373006752129e-05, |
| "loss": 0.905, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3236106195202427e-05, |
| "loss": 0.8808, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.322183202253289e-05, |
| "loss": 0.8609, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3207550521212751e-05, |
| "loss": 0.8274, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3193261723727914e-05, |
| "loss": 0.8378, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3178965662580887e-05, |
| "loss": 0.8155, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.316466237029069e-05, |
| "loss": 0.8696, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3150351879392802e-05, |
| "loss": 0.8457, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3136034222439067e-05, |
| "loss": 0.9545, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3121709431997638e-05, |
| "loss": 0.8966, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3107377540652883e-05, |
| "loss": 0.9064, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3093038581005342e-05, |
| "loss": 0.9158, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3078692585671608e-05, |
| "loss": 0.7427, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.30643395872843e-05, |
| "loss": 0.842, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.3049979618491953e-05, |
| "loss": 0.777, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.3035612711958962e-05, |
| "loss": 0.8333, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.3021238900365507e-05, |
| "loss": 0.7957, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.3006858216407468e-05, |
| "loss": 0.8057, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.299247069279636e-05, |
| "loss": 0.8589, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.2978076362259256e-05, |
| "loss": 0.7948, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2963675257538717e-05, |
| "loss": 0.7813, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2949267411392708e-05, |
| "loss": 0.8126, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2934852856594524e-05, |
| "loss": 0.9242, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2920431625932734e-05, |
| "loss": 0.8683, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2906003752211083e-05, |
| "loss": 0.807, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2891569268248425e-05, |
| "loss": 0.8524, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2877128206878655e-05, |
| "loss": 0.8659, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2862680600950628e-05, |
| "loss": 0.8665, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2848226483328085e-05, |
| "loss": 0.9283, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2833765886889584e-05, |
| "loss": 0.8938, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2819298844528412e-05, |
| "loss": 0.9273, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.280482538915252e-05, |
| "loss": 0.8961, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2790345553684458e-05, |
| "loss": 0.8329, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.277585937106127e-05, |
| "loss": 0.862, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2761366874234455e-05, |
| "loss": 0.8787, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2746868096169865e-05, |
| "loss": 0.8863, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2732363069847642e-05, |
| "loss": 0.8825, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2717851828262141e-05, |
| "loss": 0.7991, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2703334404421856e-05, |
| "loss": 0.9907, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2688810831349341e-05, |
| "loss": 0.8619, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.267428114208114e-05, |
| "loss": 0.8511, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2659745369667709e-05, |
| "loss": 0.8463, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2645203547173338e-05, |
| "loss": 0.8928, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.263065570767609e-05, |
| "loss": 0.8789, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2616101884267693e-05, |
| "loss": 0.8942, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2601542110053511e-05, |
| "loss": 0.829, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.258697641815243e-05, |
| "loss": 0.8311, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.25724048416968e-05, |
| "loss": 0.9047, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2557827413832355e-05, |
| "loss": 0.8134, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2543244167718142e-05, |
| "loss": 0.883, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2528655136526447e-05, |
| "loss": 0.9176, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2514060353442702e-05, |
| "loss": 0.8332, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2499459851665432e-05, |
| "loss": 0.8568, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2484853664406169e-05, |
| "loss": 0.8691, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.247024182488938e-05, |
| "loss": 0.8411, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2455624366352384e-05, |
| "loss": 0.8416, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2441001322045285e-05, |
| "loss": 0.8551, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2426372725230889e-05, |
| "loss": 0.8329, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2411738609184638e-05, |
| "loss": 0.9153, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2397099007194524e-05, |
| "loss": 0.8764, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2382453952561022e-05, |
| "loss": 0.813, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2367803478597002e-05, |
| "loss": 0.8734, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2353147618627673e-05, |
| "loss": 0.8858, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2338486405990482e-05, |
| "loss": 0.8797, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2323819874035064e-05, |
| "loss": 0.8251, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2309148056123146e-05, |
| "loss": 0.8663, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2294470985628483e-05, |
| "loss": 0.864, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2279788695936779e-05, |
| "loss": 0.8838, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2265101220445598e-05, |
| "loss": 0.8661, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.225040859256432e-05, |
| "loss": 0.956, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2235710845714027e-05, |
| "loss": 0.8908, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2221008013327456e-05, |
| "loss": 0.8516, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2206300128848907e-05, |
| "loss": 0.8885, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2191587225734173e-05, |
| "loss": 0.9068, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2176869337450466e-05, |
| "loss": 0.8755, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2162146497476332e-05, |
| "loss": 0.892, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2147418739301582e-05, |
| "loss": 0.7795, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2132686096427222e-05, |
| "loss": 0.823, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2117948602365357e-05, |
| "loss": 0.8897, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2103206290639137e-05, |
| "loss": 0.9289, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2088459194782662e-05, |
| "loss": 0.8261, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2073707348340922e-05, |
| "loss": 0.8258, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.205895078486971e-05, |
| "loss": 0.8952, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2044189537935546e-05, |
| "loss": 0.8629, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2029423641115609e-05, |
| "loss": 0.8786, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.2014653127997653e-05, |
| "loss": 0.8536, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1999878032179928e-05, |
| "loss": 0.8006, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1985098387271115e-05, |
| "loss": 0.8544, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1970314226890238e-05, |
| "loss": 0.9043, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1955525584666594e-05, |
| "loss": 0.8769, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1940732494239676e-05, |
| "loss": 0.8914, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1925934989259092e-05, |
| "loss": 0.8359, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1911133103384493e-05, |
| "loss": 0.8901, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1896326870285504e-05, |
| "loss": 0.8344, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1881516323641621e-05, |
| "loss": 0.8418, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1866701497142164e-05, |
| "loss": 0.8585, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1851882424486187e-05, |
| "loss": 0.9042, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1837059139382404e-05, |
| "loss": 0.8722, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1822231675549101e-05, |
| "loss": 0.8987, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1807400066714083e-05, |
| "loss": 0.8548, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1792564346614572e-05, |
| "loss": 0.9085, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1777724548997154e-05, |
| "loss": 0.8791, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.176288070761768e-05, |
| "loss": 0.7954, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1748032856241197e-05, |
| "loss": 0.8582, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1733181028641885e-05, |
| "loss": 0.8487, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1718325258602963e-05, |
| "loss": 0.8326, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1703465579916611e-05, |
| "loss": 0.933, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1688602026383915e-05, |
| "loss": 0.8065, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.167373463181476e-05, |
| "loss": 0.8861, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1658863430027775e-05, |
| "loss": 0.9331, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1643988454850255e-05, |
| "loss": 0.8435, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1629109740118063e-05, |
| "loss": 0.8088, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1614227319675584e-05, |
| "loss": 0.9031, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.159934122737562e-05, |
| "loss": 0.8964, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1584451497079333e-05, |
| "loss": 0.8656, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1569558162656164e-05, |
| "loss": 0.8668, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.155466125798374e-05, |
| "loss": 0.8452, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1539760816947814e-05, |
| "loss": 0.8585, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.152485687344219e-05, |
| "loss": 0.9309, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1509949461368628e-05, |
| "loss": 0.8722, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1495038614636791e-05, |
| "loss": 0.8686, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1480124367164144e-05, |
| "loss": 0.9718, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1465206752875889e-05, |
| "loss": 0.9608, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.145028580570489e-05, |
| "loss": 0.853, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1435361559591594e-05, |
| "loss": 0.8634, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1420434048483942e-05, |
| "loss": 0.9267, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1405503306337313e-05, |
| "loss": 0.8512, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1390569367114433e-05, |
| "loss": 0.9281, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1375632264785295e-05, |
| "loss": 0.9114, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.136069203332709e-05, |
| "loss": 0.7929, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1345748706724128e-05, |
| "loss": 0.885, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1330802318967762e-05, |
| "loss": 0.8147, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1315852904056303e-05, |
| "loss": 0.8718, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1300900495994949e-05, |
| "loss": 0.8223, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.128594512879571e-05, |
| "loss": 0.8715, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.127098683647732e-05, |
| "loss": 0.9205, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1256025653065174e-05, |
| "loss": 0.8265, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1241061612591242e-05, |
| "loss": 0.7664, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1226094749093988e-05, |
| "loss": 0.9004, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1211125096618302e-05, |
| "loss": 0.8588, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.119615268921542e-05, |
| "loss": 0.873, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1181177560942837e-05, |
| "loss": 0.8329, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.1166199745864246e-05, |
| "loss": 0.9059, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.115121927804944e-05, |
| "loss": 0.8643, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.1136236191574262e-05, |
| "loss": 0.8863, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.1121250520520499e-05, |
| "loss": 0.8305, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.110626229897582e-05, |
| "loss": 0.8117, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.1091271561033696e-05, |
| "loss": 0.8782, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.107627834079333e-05, |
| "loss": 0.856, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1061282672359552e-05, |
| "loss": 0.9187, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1046284589842785e-05, |
| "loss": 0.9023, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1031284127358924e-05, |
| "loss": 0.877, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1016281319029287e-05, |
| "loss": 0.796, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1001276198980528e-05, |
| "loss": 0.914, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0986268801344555e-05, |
| "loss": 0.8536, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.097125916025846e-05, |
| "loss": 0.8817, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0956247309864434e-05, |
| "loss": 0.8602, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0941233284309705e-05, |
| "loss": 0.8931, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0926217117746433e-05, |
| "loss": 0.986, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0911198844331659e-05, |
| "loss": 0.8645, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.089617849822721e-05, |
| "loss": 0.883, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0881156113599639e-05, |
| "loss": 0.8275, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0866131724620117e-05, |
| "loss": 0.819, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0851105365464393e-05, |
| "loss": 0.9415, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0836077070312688e-05, |
| "loss": 0.8883, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0821046873349628e-05, |
| "loss": 0.9572, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0806014808764164e-05, |
| "loss": 0.8338, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.07909809107495e-05, |
| "loss": 0.882, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0775945213503004e-05, |
| "loss": 0.9092, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0760907751226141e-05, |
| "loss": 0.804, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0745868558124392e-05, |
| "loss": 0.8374, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0730827668407171e-05, |
| "loss": 0.9351, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0715785116287753e-05, |
| "loss": 0.8707, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0700740935983194e-05, |
| "loss": 0.8461, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0685695161714256e-05, |
| "loss": 0.8286, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0670647827705323e-05, |
| "loss": 0.8877, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0655598968184329e-05, |
| "loss": 0.8728, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.064054861738268e-05, |
| "loss": 0.87, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0625496809535169e-05, |
| "loss": 0.9321, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0610443578879913e-05, |
| "loss": 0.9134, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.059538895965825e-05, |
| "loss": 0.824, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.058033298611469e-05, |
| "loss": 0.842, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.056527569249682e-05, |
| "loss": 0.9118, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0550217113055228e-05, |
| "loss": 0.842, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0535157282043426e-05, |
| "loss": 0.8774, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0520096233717778e-05, |
| "loss": 0.9194, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.050503400233741e-05, |
| "loss": 0.8288, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0489970622164146e-05, |
| "loss": 0.8307, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0474906127462416e-05, |
| "loss": 0.8888, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0459840552499192e-05, |
| "loss": 0.8824, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0444773931543898e-05, |
| "loss": 0.8971, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0429706298868343e-05, |
| "loss": 0.8819, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.041463768874663e-05, |
| "loss": 0.8525, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0399568135455092e-05, |
| "loss": 0.8804, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0384497673272202e-05, |
| "loss": 0.8205, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0369426336478508e-05, |
| "loss": 0.8546, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0354354159356541e-05, |
| "loss": 0.8789, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0339281176190743e-05, |
| "loss": 0.7981, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0324207421267392e-05, |
| "loss": 0.814, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.030913292887452e-05, |
| "loss": 0.9397, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0294057733301841e-05, |
| "loss": 0.8631, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0278981868840663e-05, |
| "loss": 0.9086, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.026390536978382e-05, |
| "loss": 0.8188, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0248828270425581e-05, |
| "loss": 0.8997, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0233750605061594e-05, |
| "loss": 0.8182, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0218672407988784e-05, |
| "loss": 0.8617, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0203593713505287e-05, |
| "loss": 0.8381, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0188514555910374e-05, |
| "loss": 0.8852, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0173434969504368e-05, |
| "loss": 0.9444, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0158354988588568e-05, |
| "loss": 0.8405, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0143274647465165e-05, |
| "loss": 0.8646, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0128193980437175e-05, |
| "loss": 0.8423, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0113113021808356e-05, |
| "loss": 0.8758, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0098031805883128e-05, |
| "loss": 0.9255, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0082950366966494e-05, |
| "loss": 0.8501, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0067868739363963e-05, |
| "loss": 0.8826, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0052786957381479e-05, |
| "loss": 0.8242, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0037705055325335e-05, |
| "loss": 0.7993, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0022623067502096e-05, |
| "loss": 0.8892, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0007541028218519e-05, |
| "loss": 0.9225, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 9.992458971781484e-06, |
| "loss": 0.9377, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.977376932497906e-06, |
| "loss": 0.8422, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.962294944674667e-06, |
| "loss": 0.9221, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.947213042618523e-06, |
| "loss": 0.9194, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.932131260636038e-06, |
| "loss": 0.8007, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.917049633033511e-06, |
| "loss": 0.8146, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.901968194116875e-06, |
| "loss": 0.8568, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.886886978191644e-06, |
| "loss": 0.8056, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.871806019562828e-06, |
| "loss": 0.8578, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.856725352534838e-06, |
| "loss": 0.8546, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.841645011411438e-06, |
| "loss": 0.8534, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.826565030495635e-06, |
| "loss": 0.8349, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.811485444089626e-06, |
| "loss": 0.816, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.796406286494715e-06, |
| "loss": 0.8548, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.78132759201122e-06, |
| "loss": 0.8757, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.76624939493841e-06, |
| "loss": 0.8128, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.75117172957442e-06, |
| "loss": 0.806, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.736094630216182e-06, |
| "loss": 0.892, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.721018131159342e-06, |
| "loss": 0.9101, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.705942266698162e-06, |
| "loss": 0.9487, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.690867071125482e-06, |
| "loss": 0.903, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.675792578732613e-06, |
| "loss": 0.8535, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.66071882380926e-06, |
| "loss": 0.8494, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.645645840643465e-06, |
| "loss": 0.8087, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.630573663521494e-06, |
| "loss": 0.8598, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.615502326727798e-06, |
| "loss": 0.8856, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.600431864544913e-06, |
| "loss": 0.8823, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.585362311253374e-06, |
| "loss": 0.8487, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.57029370113166e-06, |
| "loss": 0.8557, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.555226068456106e-06, |
| "loss": 0.9581, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.540159447500811e-06, |
| "loss": 0.8392, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.52509387253759e-06, |
| "loss": 0.8386, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.51002937783586e-06, |
| "loss": 0.9124, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.494965997662592e-06, |
| "loss": 0.9015, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.479903766282227e-06, |
| "loss": 0.8449, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.464842717956576e-06, |
| "loss": 0.875, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.449782886944777e-06, |
| "loss": 0.8442, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.434724307503182e-06, |
| "loss": 0.9113, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.419667013885312e-06, |
| "loss": 0.8238, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.404611040341755e-06, |
| "loss": 0.8376, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.389556421120092e-06, |
| "loss": 0.8134, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.374503190464831e-06, |
| "loss": 0.8397, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.359451382617323e-06, |
| "loss": 0.9778, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.344401031815675e-06, |
| "loss": 0.7893, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.329352172294682e-06, |
| "loss": 0.8972, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.314304838285747e-06, |
| "loss": 0.8443, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.299259064016807e-06, |
| "loss": 0.8974, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.28421488371225e-06, |
| "loss": 0.9403, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.26917233159283e-06, |
| "loss": 0.8913, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.254131441875606e-06, |
| "loss": 0.8716, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.23909224877386e-06, |
| "loss": 0.8959, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.224054786496996e-06, |
| "loss": 0.849, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.209019089250504e-06, |
| "loss": 0.8191, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.193985191235837e-06, |
| "loss": 0.846, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.178953126650373e-06, |
| "loss": 0.8792, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.163922929687317e-06, |
| "loss": 0.9379, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.148894634535608e-06, |
| "loss": 0.8598, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.133868275379886e-06, |
| "loss": 0.9306, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.118843886400365e-06, |
| "loss": 0.8881, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.103821501772789e-06, |
| "loss": 0.9151, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.088801155668345e-06, |
| "loss": 0.9051, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.07378288225357e-06, |
| "loss": 0.8664, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.058766715690296e-06, |
| "loss": 0.802, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.043752690135567e-06, |
| "loss": 0.8957, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.028740839741544e-06, |
| "loss": 0.9545, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.01373119865545e-06, |
| "loss": 0.8372, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.998723801019475e-06, |
| "loss": 0.9176, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.983718680970713e-06, |
| "loss": 0.8157, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.96871587264108e-06, |
| "loss": 0.8179, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.953715410157217e-06, |
| "loss": 0.864, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.93871732764045e-06, |
| "loss": 0.851, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.923721659206674e-06, |
| "loss": 0.8463, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.908728438966305e-06, |
| "loss": 0.8808, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.893737701024185e-06, |
| "loss": 0.8413, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.878749479479504e-06, |
| "loss": 0.9245, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.863763808425738e-06, |
| "loss": 0.8232, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.848780721950563e-06, |
| "loss": 0.8894, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.833800254135758e-06, |
| "loss": 0.9169, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.818822439057166e-06, |
| "loss": 0.8293, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.803847310784583e-06, |
| "loss": 0.848, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.788874903381698e-06, |
| "loss": 0.9037, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.773905250906015e-06, |
| "loss": 0.8824, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.758938387408761e-06, |
| "loss": 0.9233, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.743974346934827e-06, |
| "loss": 0.9699, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.729013163522683e-06, |
| "loss": 0.8693, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.714054871204294e-06, |
| "loss": 0.8735, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.699099504005054e-06, |
| "loss": 0.8191, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.684147095943699e-06, |
| "loss": 0.8708, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.66919768103224e-06, |
| "loss": 0.8231, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.654251293275875e-06, |
| "loss": 0.8964, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.639307966672913e-06, |
| "loss": 0.7958, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.624367735214711e-06, |
| "loss": 0.8975, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.60943063288557e-06, |
| "loss": 0.8934, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.594496693662688e-06, |
| "loss": 0.8553, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.579565951516063e-06, |
| "loss": 0.9041, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.564638440408411e-06, |
| "loss": 0.8352, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.549714194295115e-06, |
| "loss": 0.8514, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.534793247124115e-06, |
| "loss": 0.853, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.519875632835858e-06, |
| "loss": 0.8732, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.504961385363212e-06, |
| "loss": 0.8377, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.490050538631375e-06, |
| "loss": 0.8896, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.475143126557814e-06, |
| "loss": 0.9242, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.46023918305219e-06, |
| "loss": 0.8478, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.445338742016263e-06, |
| "loss": 0.8388, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.430441837343841e-06, |
| "loss": 0.8257, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.415548502920668e-06, |
| "loss": 0.8666, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.400658772624382e-06, |
| "loss": 0.8762, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.38577268032442e-06, |
| "loss": 0.9543, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.370890259881938e-06, |
| "loss": 0.8631, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.356011545149752e-06, |
| "loss": 0.801, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.341136569972227e-06, |
| "loss": 0.8904, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.326265368185242e-06, |
| "loss": 0.8823, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.31139797361609e-06, |
| "loss": 0.9037, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.29653442008339e-06, |
| "loss": 0.8477, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.28167474139704e-06, |
| "loss": 0.8924, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.266818971358118e-06, |
| "loss": 0.8828, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.251967143758806e-06, |
| "loss": 0.9561, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.237119292382327e-06, |
| "loss": 0.8416, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.22227545100285e-06, |
| "loss": 0.8815, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.207435653385428e-06, |
| "loss": 0.7973, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.192599933285922e-06, |
| "loss": 0.916, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.1777683244509e-06, |
| "loss": 0.8082, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.162940860617598e-06, |
| "loss": 0.8195, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.148117575513816e-06, |
| "loss": 0.8559, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.133298502857836e-06, |
| "loss": 0.8052, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.118483676358384e-06, |
| "loss": 0.8569, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.1036731297145e-06, |
| "loss": 0.8101, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.088866896615507e-06, |
| "loss": 0.8717, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.07406501074091e-06, |
| "loss": 0.8799, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.05926750576033e-06, |
| "loss": 0.8907, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.044474415333412e-06, |
| "loss": 0.915, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.029685773109766e-06, |
| "loss": 0.8735, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.014901612728887e-06, |
| "loss": 0.8799, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.000121967820077e-06, |
| "loss": 0.8005, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 7.985346872002349e-06, |
| "loss": 0.8986, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 7.970576358884391e-06, |
| "loss": 0.8006, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 7.955810462064457e-06, |
| "loss": 0.8613, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.941049215130292e-06, |
| "loss": 0.8695, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.926292651659083e-06, |
| "loss": 0.7912, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.911540805217341e-06, |
| "loss": 0.8759, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.896793709360865e-06, |
| "loss": 0.9341, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.882051397634646e-06, |
| "loss": 0.8267, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.86731390357278e-06, |
| "loss": 0.9242, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.85258126069842e-06, |
| "loss": 0.9165, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.837853502523671e-06, |
| "loss": 0.8172, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.823130662549537e-06, |
| "loss": 0.7805, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.80841277426583e-06, |
| "loss": 0.8644, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.793699871151096e-06, |
| "loss": 0.7943, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.778991986672546e-06, |
| "loss": 0.8663, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.764289154285977e-06, |
| "loss": 0.9321, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.749591407435683e-06, |
| "loss": 0.8254, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.734898779554405e-06, |
| "loss": 0.8495, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.720211304063225e-06, |
| "loss": 0.7735, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.705529014371517e-06, |
| "loss": 0.8281, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.690851943876857e-06, |
| "loss": 0.8624, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.676180125964937e-06, |
| "loss": 0.8576, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.661513594009522e-06, |
| "loss": 0.8365, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.646852381372332e-06, |
| "loss": 0.8533, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.632196521403001e-06, |
| "loss": 0.888, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.617546047438983e-06, |
| "loss": 0.8601, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.602900992805477e-06, |
| "loss": 0.8137, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.588261390815364e-06, |
| "loss": 0.752, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.5736272747691155e-06, |
| "loss": 0.8993, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.558998677954719e-06, |
| "loss": 0.907, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.54437563364762e-06, |
| "loss": 0.8303, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.529758175110623e-06, |
| "loss": 0.9041, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.515146335593831e-06, |
| "loss": 0.852, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.500540148334573e-06, |
| "loss": 0.8235, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.485939646557302e-06, |
| "loss": 0.9119, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.471344863473557e-06, |
| "loss": 0.8792, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.456755832281859e-06, |
| "loss": 0.7845, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.442172586167646e-06, |
| "loss": 0.8212, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.427595158303205e-06, |
| "loss": 0.89, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.4130235818475716e-06, |
| "loss": 0.9237, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.398457889946491e-06, |
| "loss": 0.7663, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.3838981157323106e-06, |
| "loss": 0.8394, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.369344292323915e-06, |
| "loss": 0.9196, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.354796452826665e-06, |
| "loss": 0.8249, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.340254630332294e-06, |
| "loss": 0.8618, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.325718857918861e-06, |
| "loss": 0.8295, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.311189168650663e-06, |
| "loss": 0.8183, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.296665595578147e-06, |
| "loss": 0.7794, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.282148171737864e-06, |
| "loss": 0.8062, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.267636930152362e-06, |
| "loss": 0.924, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.2531319038301375e-06, |
| "loss": 0.834, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.238633125765547e-06, |
| "loss": 0.8682, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.224140628938733e-06, |
| "loss": 0.8873, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.209654446315546e-06, |
| "loss": 0.84, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.195174610847482e-06, |
| "loss": 0.9044, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.180701155471592e-06, |
| "loss": 0.8749, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.166234113110422e-06, |
| "loss": 0.8453, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.151773516671917e-06, |
| "loss": 0.9285, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.1373193990493736e-06, |
| "loss": 0.8518, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.12287179312135e-06, |
| "loss": 0.8322, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.1084307317515785e-06, |
| "loss": 0.7956, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.093996247788919e-06, |
| "loss": 0.9011, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.079568374067269e-06, |
| "loss": 0.8109, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.065147143405477e-06, |
| "loss": 0.873, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.050732588607298e-06, |
| "loss": 0.8081, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.0363247424612866e-06, |
| "loss": 0.8634, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.021923637740745e-06, |
| "loss": 0.8678, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.0075293072036444e-06, |
| "loss": 0.8572, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.993141783592534e-06, |
| "loss": 0.9368, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.978761099634497e-06, |
| "loss": 0.8942, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.964387288041039e-06, |
| "loss": 0.865, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.950020381508048e-06, |
| "loss": 0.8522, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.935660412715703e-06, |
| "loss": 0.9353, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.921307414328393e-06, |
| "loss": 0.8812, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.9069614189946595e-06, |
| "loss": 0.9038, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.892622459347118e-06, |
| "loss": 0.8982, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.878290568002366e-06, |
| "loss": 0.8896, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.863965777560937e-06, |
| "loss": 0.8789, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.849648120607202e-06, |
| "loss": 0.8992, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.835337629709311e-06, |
| "loss": 0.8166, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.821034337419117e-06, |
| "loss": 0.8793, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.806738276272089e-06, |
| "loss": 0.887, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.792449478787254e-06, |
| "loss": 0.8764, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.778167977467112e-06, |
| "loss": 0.9078, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.763893804797574e-06, |
| "loss": 0.8724, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.749626993247876e-06, |
| "loss": 0.8296, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.735367575270505e-06, |
| "loss": 0.8192, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.7211155833011365e-06, |
| "loss": 0.8894, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.706871049758557e-06, |
| "loss": 0.8137, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.692634007044576e-06, |
| "loss": 0.8575, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.678404487543977e-06, |
| "loss": 0.8756, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.6641825236244165e-06, |
| "loss": 0.8874, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.6499681476363755e-06, |
| "loss": 0.7312, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.6357613919130756e-06, |
| "loss": 0.8994, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.621562288770393e-06, |
| "loss": 0.8871, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.607370870506812e-06, |
| "loss": 0.9007, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.593187169403321e-06, |
| "loss": 0.8452, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.5790112177233665e-06, |
| "loss": 0.7653, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.564843047712762e-06, |
| "loss": 0.8619, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.5506826915996195e-06, |
| "loss": 0.8752, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.536530181594276e-06, |
| "loss": 0.8493, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.522385549889232e-06, |
| "loss": 0.8408, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.5082488286590495e-06, |
| "loss": 0.8361, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.494120050060312e-06, |
| "loss": 0.8988, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.479999246231523e-06, |
| "loss": 0.8483, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.465886449293057e-06, |
| "loss": 0.812, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.451781691347076e-06, |
| "loss": 0.8965, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.437685004477441e-06, |
| "loss": 0.8683, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.4235964207496715e-06, |
| "loss": 0.8534, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.409515972210843e-06, |
| "loss": 0.8882, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.39544369088953e-06, |
| "loss": 0.8821, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.3813796087957325e-06, |
| "loss": 0.8779, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.367323757920794e-06, |
| "loss": 0.8037, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.353276170237339e-06, |
| "loss": 0.8293, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.339236877699196e-06, |
| "loss": 0.8385, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.3252059122413174e-06, |
| "loss": 0.8543, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.3111833057797266e-06, |
| "loss": 0.8408, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.297169090211418e-06, |
| "loss": 0.9593, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.28316329741431e-06, |
| "loss": 0.8642, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.269165959247166e-06, |
| "loss": 0.8369, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.255177107549499e-06, |
| "loss": 0.8829, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.24119677414154e-06, |
| "loss": 0.8821, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.227224990824126e-06, |
| "loss": 0.8428, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.213261789378656e-06, |
| "loss": 0.8961, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.199307201567007e-06, |
| "loss": 0.9534, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.185361259131454e-06, |
| "loss": 0.7416, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.1714239937946186e-06, |
| "loss": 0.8136, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.157495437259378e-06, |
| "loss": 0.892, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.143575621208797e-06, |
| "loss": 0.8934, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.129664577306069e-06, |
| "loss": 0.9142, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.115762337194418e-06, |
| "loss": 0.9032, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.1018689324970545e-06, |
| "loss": 0.8014, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.087984394817093e-06, |
| "loss": 0.7952, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.074108755737463e-06, |
| "loss": 0.8896, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.0602420468208675e-06, |
| "loss": 0.886, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.046384299609695e-06, |
| "loss": 0.8737, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.032535545625939e-06, |
| "loss": 0.8013, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.0186958163711425e-06, |
| "loss": 0.8038, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.004865143326324e-06, |
| "loss": 0.7449, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.9910435579518925e-06, |
| "loss": 0.8309, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.977231091687596e-06, |
| "loss": 0.7667, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.963427775952429e-06, |
| "loss": 0.8381, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.949633642144585e-06, |
| "loss": 0.8217, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.9358487216413505e-06, |
| "loss": 0.8281, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.922073045799077e-06, |
| "loss": 0.8996, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.9083066459530765e-06, |
| "loss": 0.8479, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.894549553417559e-06, |
| "loss": 0.8805, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.880801799485565e-06, |
| "loss": 0.8423, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.867063415428902e-06, |
| "loss": 0.845, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.853334432498047e-06, |
| "loss": 0.8581, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.839614881922109e-06, |
| "loss": 0.9121, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.825904794908723e-06, |
| "loss": 0.8969, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.812204202644021e-06, |
| "loss": 0.8614, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.7985131362925184e-06, |
| "loss": 0.8348, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.784831626997067e-06, |
| "loss": 0.8754, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.7711597058787846e-06, |
| "loss": 0.8247, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.757497404036973e-06, |
| "loss": 0.8671, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.743844752549051e-06, |
| "loss": 0.8705, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.7302017824705e-06, |
| "loss": 0.8406, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.716568524834758e-06, |
| "loss": 0.7667, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.702945010653189e-06, |
| "loss": 0.789, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.689331270914986e-06, |
| "loss": 0.9366, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.675727336587104e-06, |
| "loss": 0.8864, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.662133238614208e-06, |
| "loss": 0.8793, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.648549007918565e-06, |
| "loss": 0.8797, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.6349746754000215e-06, |
| "loss": 0.8187, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.621410271935894e-06, |
| "loss": 0.8347, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.60785582838092e-06, |
| "loss": 0.8705, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.594311375567174e-06, |
| "loss": 0.9126, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.580776944304014e-06, |
| "loss": 0.827, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.567252565377994e-06, |
| "loss": 0.8589, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.553738269552815e-06, |
| "loss": 0.8509, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.5402340875692184e-06, |
| "loss": 0.8497, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.526740050144967e-06, |
| "loss": 0.8232, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.5132561879747314e-06, |
| "loss": 0.8533, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.4997825317300404e-06, |
| "loss": 0.8497, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.486319112059208e-06, |
| "loss": 0.876, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.472865959587258e-06, |
| "loss": 0.8147, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.459423104915873e-06, |
| "loss": 0.8146, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.445990578623301e-06, |
| "loss": 0.8156, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.432568411264294e-06, |
| "loss": 0.8208, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.419156633370049e-06, |
| "loss": 0.8915, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.405755275448125e-06, |
| "loss": 0.7963, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.392364367982375e-06, |
| "loss": 0.9692, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.378983941432897e-06, |
| "loss": 0.8889, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.365614026235924e-06, |
| "loss": 0.832, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.3522546528037985e-06, |
| "loss": 0.8367, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.338905851524878e-06, |
| "loss": 0.8331, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.325567652763468e-06, |
| "loss": 0.8031, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.31224008685976e-06, |
| "loss": 0.8256, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.298923184129754e-06, |
| "loss": 0.8388, |
| "step": 1431 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.285616974865208e-06, |
| "loss": 0.8369, |
| "step": 1432 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.272321489333543e-06, |
| "loss": 0.8734, |
| "step": 1433 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.259036757777789e-06, |
| "loss": 0.7801, |
| "step": 1434 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.245762810416517e-06, |
| "loss": 0.6722, |
| "step": 1435 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.232499677443767e-06, |
| "loss": 0.5928, |
| "step": 1436 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.219247389028973e-06, |
| "loss": 0.6237, |
| "step": 1437 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.20600597531692e-06, |
| "loss": 0.6309, |
| "step": 1438 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.1927754664276265e-06, |
| "loss": 0.6686, |
| "step": 1439 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.179555892456337e-06, |
| "loss": 0.6097, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.166347283473403e-06, |
| "loss": 0.5675, |
| "step": 1441 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.1531496695242375e-06, |
| "loss": 0.6428, |
| "step": 1442 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.139963080629249e-06, |
| "loss": 0.5883, |
| "step": 1443 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.126787546783758e-06, |
| "loss": 0.6445, |
| "step": 1444 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.113623097957952e-06, |
| "loss": 0.6252, |
| "step": 1445 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.100469764096789e-06, |
| "loss": 0.6432, |
| "step": 1446 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.087327575119956e-06, |
| "loss": 0.6956, |
| "step": 1447 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.074196560921776e-06, |
| "loss": 0.5548, |
| "step": 1448 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.061076751371172e-06, |
| "loss": 0.6143, |
| "step": 1449 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.047968176311555e-06, |
| "loss": 0.6108, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.034870865560807e-06, |
| "loss": 0.5721, |
| "step": 1451 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 5.021784848911161e-06, |
| "loss": 0.6736, |
| "step": 1452 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 5.008710156129184e-06, |
| "loss": 0.5748, |
| "step": 1453 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.995646816955671e-06, |
| "loss": 0.5915, |
| "step": 1454 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.982594861105594e-06, |
| "loss": 0.6241, |
| "step": 1455 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.9695543182680295e-06, |
| "loss": 0.6154, |
| "step": 1456 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.956525218106097e-06, |
| "loss": 0.5598, |
| "step": 1457 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.943507590256883e-06, |
| "loss": 0.6015, |
| "step": 1458 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.930501464331385e-06, |
| "loss": 0.5638, |
| "step": 1459 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.917506869914432e-06, |
| "loss": 0.5978, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.904523836564622e-06, |
| "loss": 0.5699, |
| "step": 1461 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.891552393814257e-06, |
| "loss": 0.5989, |
| "step": 1462 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.87859257116927e-06, |
| "loss": 0.5648, |
| "step": 1463 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.865644398109177e-06, |
| "loss": 0.6132, |
| "step": 1464 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.8527079040869674e-06, |
| "loss": 0.5491, |
| "step": 1465 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.839783118529095e-06, |
| "loss": 0.5401, |
| "step": 1466 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.826870070835358e-06, |
| "loss": 0.6046, |
| "step": 1467 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.813968790378866e-06, |
| "loss": 0.5798, |
| "step": 1468 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.801079306505957e-06, |
| "loss": 0.6508, |
| "step": 1469 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.788201648536137e-06, |
| "loss": 0.5828, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.7753358457620105e-06, |
| "loss": 0.5699, |
| "step": 1471 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.762481927449222e-06, |
| "loss": 0.6081, |
| "step": 1472 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.749639922836373e-06, |
| "loss": 0.6003, |
| "step": 1473 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.7368098611349715e-06, |
| "loss": 0.5853, |
| "step": 1474 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.723991771529355e-06, |
| "loss": 0.5572, |
| "step": 1475 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.711185683176628e-06, |
| "loss": 0.6312, |
| "step": 1476 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.698391625206608e-06, |
| "loss": 0.5657, |
| "step": 1477 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.685609626721723e-06, |
| "loss": 0.623, |
| "step": 1478 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.672839716796995e-06, |
| "loss": 0.5699, |
| "step": 1479 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.660081924479934e-06, |
| "loss": 0.6357, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.647336278790488e-06, |
| "loss": 0.5201, |
| "step": 1481 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.634602808720977e-06, |
| "loss": 0.6179, |
| "step": 1482 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.621881543236027e-06, |
| "loss": 0.6215, |
| "step": 1483 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.609172511272494e-06, |
| "loss": 0.5728, |
| "step": 1484 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.596475741739426e-06, |
| "loss": 0.6094, |
| "step": 1485 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.583791263517949e-06, |
| "loss": 0.5571, |
| "step": 1486 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.571119105461258e-06, |
| "loss": 0.6018, |
| "step": 1487 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.5584592963945075e-06, |
| "loss": 0.5924, |
| "step": 1488 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.545811865114761e-06, |
| "loss": 0.6335, |
| "step": 1489 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.533176840390944e-06, |
| "loss": 0.5183, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.520554250963733e-06, |
| "loss": 0.6234, |
| "step": 1491 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.5079441255455455e-06, |
| "loss": 0.6575, |
| "step": 1492 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.495346492820431e-06, |
| "loss": 0.5458, |
| "step": 1493 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.482761381444025e-06, |
| "loss": 0.6248, |
| "step": 1494 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.4701888200434855e-06, |
| "loss": 0.5845, |
| "step": 1495 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.457628837217417e-06, |
| "loss": 0.5405, |
| "step": 1496 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.445081461535813e-06, |
| "loss": 0.6076, |
| "step": 1497 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.432546721540002e-06, |
| "loss": 0.6205, |
| "step": 1498 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.420024645742544e-06, |
| "loss": 0.7318, |
| "step": 1499 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.407515262627221e-06, |
| "loss": 0.5836, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.395018600648925e-06, |
| "loss": 0.5871, |
| "step": 1501 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.382534688233616e-06, |
| "loss": 0.5683, |
| "step": 1502 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.370063553778254e-06, |
| "loss": 0.5851, |
| "step": 1503 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.357605225650727e-06, |
| "loss": 0.5175, |
| "step": 1504 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.345159732189809e-06, |
| "loss": 0.5629, |
| "step": 1505 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.33272710170506e-06, |
| "loss": 0.535, |
| "step": 1506 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.320307362476791e-06, |
| "loss": 0.6414, |
| "step": 1507 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.307900542755987e-06, |
| "loss": 0.552, |
| "step": 1508 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.295506670764245e-06, |
| "loss": 0.6079, |
| "step": 1509 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.283125774693709e-06, |
| "loss": 0.5609, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.2707578827070186e-06, |
| "loss": 0.6685, |
| "step": 1511 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.258403022937207e-06, |
| "loss": 0.5232, |
| "step": 1512 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.246061223487692e-06, |
| "loss": 0.6708, |
| "step": 1513 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.233732512432166e-06, |
| "loss": 0.5504, |
| "step": 1514 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.2214169178145545e-06, |
| "loss": 0.5605, |
| "step": 1515 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.209114467648946e-06, |
| "loss": 0.5665, |
| "step": 1516 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.196825189919528e-06, |
| "loss": 0.6009, |
| "step": 1517 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.1845491125805324e-06, |
| "loss": 0.552, |
| "step": 1518 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.172286263556158e-06, |
| "loss": 0.5794, |
| "step": 1519 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.160036670740513e-06, |
| "loss": 0.5853, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.1478003619975484e-06, |
| "loss": 0.6261, |
| "step": 1521 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.135577365161017e-06, |
| "loss": 0.6306, |
| "step": 1522 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.123367708034361e-06, |
| "loss": 0.5868, |
| "step": 1523 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.111171418390712e-06, |
| "loss": 0.6353, |
| "step": 1524 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.09898852397276e-06, |
| "loss": 0.6693, |
| "step": 1525 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.086819052492757e-06, |
| "loss": 0.5605, |
| "step": 1526 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.074663031632403e-06, |
| "loss": 0.6312, |
| "step": 1527 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.062520489042808e-06, |
| "loss": 0.6125, |
| "step": 1528 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.050391452344423e-06, |
| "loss": 0.5426, |
| "step": 1529 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.038275949126973e-06, |
| "loss": 0.6939, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.026174006949409e-06, |
| "loss": 0.6369, |
| "step": 1531 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.014085653339827e-06, |
| "loss": 0.6463, |
| "step": 1532 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.002010915795412e-06, |
| "loss": 0.6024, |
| "step": 1533 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.989949821782377e-06, |
| "loss": 0.6676, |
| "step": 1534 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.977902398735914e-06, |
| "loss": 0.6321, |
| "step": 1535 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.965868674060092e-06, |
| "loss": 0.6447, |
| "step": 1536 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.953848675127849e-06, |
| "loss": 0.6315, |
| "step": 1537 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.941842429280874e-06, |
| "loss": 0.5678, |
| "step": 1538 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.929849963829595e-06, |
| "loss": 0.5989, |
| "step": 1539 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.917871306053083e-06, |
| "loss": 0.6058, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.905906483198999e-06, |
| "loss": 0.666, |
| "step": 1541 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.89395552248354e-06, |
| "loss": 0.6044, |
| "step": 1542 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.882018451091367e-06, |
| "loss": 0.6196, |
| "step": 1543 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.870095296175547e-06, |
| "loss": 0.6439, |
| "step": 1544 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.858186084857497e-06, |
| "loss": 0.6195, |
| "step": 1545 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.84629084422691e-06, |
| "loss": 0.6068, |
| "step": 1546 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.834409601341703e-06, |
| "loss": 0.6093, |
| "step": 1547 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.822542383227953e-06, |
| "loss": 0.5937, |
| "step": 1548 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.8106892168798305e-06, |
| "loss": 0.6464, |
| "step": 1549 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.7988501292595546e-06, |
| "loss": 0.5303, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.787025147297302e-06, |
| "loss": 0.566, |
| "step": 1551 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.7752142978911786e-06, |
| "loss": 0.6363, |
| "step": 1552 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7634176079071383e-06, |
| "loss": 0.5824, |
| "step": 1553 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7516351041789244e-06, |
| "loss": 0.572, |
| "step": 1554 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.739866813508013e-06, |
| "loss": 0.6178, |
| "step": 1555 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.728112762663548e-06, |
| "loss": 0.5124, |
| "step": 1556 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7163729783822835e-06, |
| "loss": 0.5652, |
| "step": 1557 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.704647487368528e-06, |
| "loss": 0.5709, |
| "step": 1558 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.6929363162940655e-06, |
| "loss": 0.6014, |
| "step": 1559 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.6812394917981166e-06, |
| "loss": 0.6475, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.6695570404872604e-06, |
| "loss": 0.5683, |
| "step": 1561 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.657888988935383e-06, |
| "loss": 0.5726, |
| "step": 1562 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.646235363683629e-06, |
| "loss": 0.5754, |
| "step": 1563 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.6345961912403004e-06, |
| "loss": 0.5957, |
| "step": 1564 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.622971498080852e-06, |
| "loss": 0.6541, |
| "step": 1565 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.6113613106477853e-06, |
| "loss": 0.6189, |
| "step": 1566 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.599765655350612e-06, |
| "loss": 0.6087, |
| "step": 1567 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.588184558565787e-06, |
| "loss": 0.5759, |
| "step": 1568 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.5766180466366476e-06, |
| "loss": 0.5849, |
| "step": 1569 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.565066145873355e-06, |
| "loss": 0.5829, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.5535288825528435e-06, |
| "loss": 0.6441, |
| "step": 1571 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.542006282918733e-06, |
| "loss": 0.6552, |
| "step": 1572 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.53049837318131e-06, |
| "loss": 0.6291, |
| "step": 1573 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.5190051795174308e-06, |
| "loss": 0.6487, |
| "step": 1574 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.5075267280704793e-06, |
| "loss": 0.5332, |
| "step": 1575 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.4960630449503198e-06, |
| "loss": 0.634, |
| "step": 1576 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.484614156233196e-06, |
| "loss": 0.6352, |
| "step": 1577 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.473180087961727e-06, |
| "loss": 0.6311, |
| "step": 1578 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.4617608661448043e-06, |
| "loss": 0.6034, |
| "step": 1579 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.4503565167575505e-06, |
| "loss": 0.5944, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.438967065741262e-06, |
| "loss": 0.5969, |
| "step": 1581 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.4275925390033426e-06, |
| "loss": 0.5894, |
| "step": 1582 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.4162329624172454e-06, |
| "loss": 0.65, |
| "step": 1583 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.4048883618224315e-06, |
| "loss": 0.5009, |
| "step": 1584 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.3935587630242705e-06, |
| "loss": 0.5516, |
| "step": 1585 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.3822441917940353e-06, |
| "loss": 0.5567, |
| "step": 1586 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.3709446738687966e-06, |
| "loss": 0.6938, |
| "step": 1587 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.359660234951393e-06, |
| "loss": 0.5418, |
| "step": 1588 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.348390900710358e-06, |
| "loss": 0.6426, |
| "step": 1589 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.3371366967798657e-06, |
| "loss": 0.648, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.325897648759684e-06, |
| "loss": 0.6037, |
| "step": 1591 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.3146737822150945e-06, |
| "loss": 0.6202, |
| "step": 1592 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.303465122676851e-06, |
| "loss": 0.6421, |
| "step": 1593 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.2922716956411105e-06, |
| "loss": 0.6042, |
| "step": 1594 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.2810935265693957e-06, |
| "loss": 0.6287, |
| "step": 1595 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.269930640888499e-06, |
| "loss": 0.6013, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2587830639904726e-06, |
| "loss": 0.546, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.247650821232523e-06, |
| "loss": 0.5635, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2365339379369963e-06, |
| "loss": 0.6191, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2254324393912895e-06, |
| "loss": 0.5882, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.214346350847807e-06, |
| "loss": 0.5522, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2032756975238976e-06, |
| "loss": 0.5134, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1922205046018008e-06, |
| "loss": 0.5926, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1811807972285967e-06, |
| "loss": 0.6242, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.17015660051613e-06, |
| "loss": 0.6733, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1591479395409672e-06, |
| "loss": 0.5285, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.148154839344334e-06, |
| "loss": 0.5384, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1371773249320714e-06, |
| "loss": 0.6569, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1262154212745456e-06, |
| "loss": 0.6274, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.1152691533066393e-06, |
| "loss": 0.6248, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.104338545927643e-06, |
| "loss": 0.6544, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.093423624001248e-06, |
| "loss": 0.6987, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.082524412355452e-06, |
| "loss": 0.6701, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.0716409357825194e-06, |
| "loss": 0.575, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.0607732190389228e-06, |
| "loss": 0.6877, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.0499212868452878e-06, |
| "loss": 0.6576, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.0390851638863293e-06, |
| "loss": 0.5698, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.0282648748108135e-06, |
| "loss": 0.6381, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.017460444231477e-06, |
| "loss": 0.6558, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.0066718967249853e-06, |
| "loss": 0.581, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 2.995899256831888e-06, |
| "loss": 0.637, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 2.9851425490565256e-06, |
| "loss": 0.6435, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 2.974401797867025e-06, |
| "loss": 0.6494, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.963677027695192e-06, |
| "loss": 0.6199, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.952968262936502e-06, |
| "loss": 0.6891, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9422755279500093e-06, |
| "loss": 0.6168, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.931598847058311e-06, |
| "loss": 0.6136, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9209382445474853e-06, |
| "loss": 0.6383, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9102937446670364e-06, |
| "loss": 0.6282, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.8996653716298374e-06, |
| "loss": 0.548, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.889053149612088e-06, |
| "loss": 0.6394, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.878457102753238e-06, |
| "loss": 0.5881, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.867877255155951e-06, |
| "loss": 0.5505, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.8573136308860374e-06, |
| "loss": 0.5873, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.846766253972405e-06, |
| "loss": 0.6015, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.836235148407016e-06, |
| "loss": 0.5862, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.825720338144797e-06, |
| "loss": 0.5789, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.815221847103633e-06, |
| "loss": 0.6082, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.8047396991642695e-06, |
| "loss": 0.6286, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.794273918170287e-06, |
| "loss": 0.6544, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7838245279280305e-06, |
| "loss": 0.6143, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7733915522065645e-06, |
| "loss": 0.624, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7629750147376122e-06, |
| "loss": 0.6589, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7525749392155122e-06, |
| "loss": 0.6181, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7421913492971506e-06, |
| "loss": 0.5982, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.7318242686019158e-06, |
| "loss": 0.637, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.7214737207116424e-06, |
| "loss": 0.538, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.7111397291705566e-06, |
| "loss": 0.5164, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.7008223174852355e-06, |
| "loss": 0.5496, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.6905215091245205e-06, |
| "loss": 0.6428, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.6802373275195084e-06, |
| "loss": 0.6172, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.6699697960634605e-06, |
| "loss": 0.5915, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.6597189381117707e-06, |
| "loss": 0.571, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.6494847769819022e-06, |
| "loss": 0.6209, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.639267335953343e-06, |
| "loss": 0.5402, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.62906663826754e-06, |
| "loss": 0.5828, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.61888270712787e-06, |
| "loss": 0.6265, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.608715565699549e-06, |
| "loss": 0.5801, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.5985652371096213e-06, |
| "loss": 0.5401, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5884317444468785e-06, |
| "loss": 0.5794, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.578315110761812e-06, |
| "loss": 0.5339, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5682153590665803e-06, |
| "loss": 0.5677, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5581325123349155e-06, |
| "loss": 0.5647, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5480665935021208e-06, |
| "loss": 0.5936, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5380176254649804e-06, |
| "loss": 0.6572, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.527985631081723e-06, |
| "loss": 0.6287, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.5179706331719654e-06, |
| "loss": 0.5791, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.507972654516676e-06, |
| "loss": 0.6003, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.497991717858086e-06, |
| "loss": 0.6339, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.4880278458996877e-06, |
| "loss": 0.6081, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.478081061306131e-06, |
| "loss": 0.5919, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.4681513867032203e-06, |
| "loss": 0.5359, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.458238844677827e-06, |
| "loss": 0.6063, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.4483434577778544e-06, |
| "loss": 0.6447, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.438465248512183e-06, |
| "loss": 0.6405, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.428604239350617e-06, |
| "loss": 0.6283, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.418760452723846e-06, |
| "loss": 0.6268, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.408933911023371e-06, |
| "loss": 0.611, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.3991246366014743e-06, |
| "loss": 0.6231, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.3893326517711546e-06, |
| "loss": 0.6179, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.3795579788060928e-06, |
| "loss": 0.5193, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.369800639940574e-06, |
| "loss": 0.5737, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.360060657369474e-06, |
| "loss": 0.7327, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.3503380532481667e-06, |
| "loss": 0.6448, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.340632849692515e-06, |
| "loss": 0.5915, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.330945068778789e-06, |
| "loss": 0.6301, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.3212747325436346e-06, |
| "loss": 0.616, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.3116218629840104e-06, |
| "loss": 0.5693, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.3019864820571447e-06, |
| "loss": 0.6084, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2923686116804923e-06, |
| "loss": 0.659, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2827682737316704e-06, |
| "loss": 0.6292, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2731854900484153e-06, |
| "loss": 0.5643, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2636202824285312e-06, |
| "loss": 0.5578, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2540726726298547e-06, |
| "loss": 0.6557, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2445426823701723e-06, |
| "loss": 0.5901, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.2350303333272127e-06, |
| "loss": 0.5043, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.225535647138557e-06, |
| "loss": 0.5864, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.2160586454016262e-06, |
| "loss": 0.6668, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.2065993496736036e-06, |
| "loss": 0.6162, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.1971577814714017e-06, |
| "loss": 0.6151, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.1877339622716066e-06, |
| "loss": 0.6437, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.178327913510431e-06, |
| "loss": 0.6227, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.168939656583664e-06, |
| "loss": 0.6604, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.1595692128466308e-06, |
| "loss": 0.5771, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.1502166036141293e-06, |
| "loss": 0.5902, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.14088185016039e-06, |
| "loss": 0.5705, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.131564973719039e-06, |
| "loss": 0.64, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.122265995483016e-06, |
| "loss": 0.5684, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.112984936604573e-06, |
| "loss": 0.5863, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.1037218181951767e-06, |
| "loss": 0.6644, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.094476661325504e-06, |
| "loss": 0.6989, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.085249487025367e-06, |
| "loss": 0.5805, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.076040316283673e-06, |
| "loss": 0.5917, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.0668491700483784e-06, |
| "loss": 0.672, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.057676069226436e-06, |
| "loss": 0.5865, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.0485210346837557e-06, |
| "loss": 0.7023, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0393840872451532e-06, |
| "loss": 0.5885, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0302652476942965e-06, |
| "loss": 0.5966, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0211645367736667e-06, |
| "loss": 0.6051, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0120819751845068e-06, |
| "loss": 0.557, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.003017583586776e-06, |
| "loss": 0.658, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.9939713825991103e-06, |
| "loss": 0.6416, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.9849433927987492e-06, |
| "loss": 0.6232, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.9759336347215287e-06, |
| "loss": 0.599, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9669421288617994e-06, |
| "loss": 0.5516, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9579688956724e-06, |
| "loss": 0.6748, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9490139555646015e-06, |
| "loss": 0.6145, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9400773289080644e-06, |
| "loss": 0.5702, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.931159036030793e-06, |
| "loss": 0.5839, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.92225909721909e-06, |
| "loss": 0.6089, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.913377532717505e-06, |
| "loss": 0.583, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.9045143627287932e-06, |
| "loss": 0.5174, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.895669607413867e-06, |
| "loss": 0.6501, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8868432868917497e-06, |
| "loss": 0.5511, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8780354212395423e-06, |
| "loss": 0.631, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8692460304923476e-06, |
| "loss": 0.6182, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8604751346432625e-06, |
| "loss": 0.6095, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8517227536433012e-06, |
| "loss": 0.6416, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8429889074013696e-06, |
| "loss": 0.5958, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8342736157842056e-06, |
| "loss": 0.5648, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8255768986163558e-06, |
| "loss": 0.6107, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8168987756800938e-06, |
| "loss": 0.6224, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.808239266715419e-06, |
| "loss": 0.6145, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.7995983914199722e-06, |
| "loss": 0.6027, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.7909761694490235e-06, |
| "loss": 0.554, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7823726204154013e-06, |
| "loss": 0.5545, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.773787763889463e-06, |
| "loss": 0.5029, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7652216193990523e-06, |
| "loss": 0.6296, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.756674206429434e-06, |
| "loss": 0.6511, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.748145544423282e-06, |
| "loss": 0.615, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7396356527806059e-06, |
| "loss": 0.5439, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.731144550858722e-06, |
| "loss": 0.5256, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.7226722579722065e-06, |
| "loss": 0.6231, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.7142187933928544e-06, |
| "loss": 0.5822, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.705784176349622e-06, |
| "loss": 0.625, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.6973684260286071e-06, |
| "loss": 0.5854, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.6889715615729762e-06, |
| "loss": 0.5954, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.6805936020829528e-06, |
| "loss": 0.5608, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.6722345666157448e-06, |
| "loss": 0.6612, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.66389447418552e-06, |
| "loss": 0.6153, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6555733437633559e-06, |
| "loss": 0.5863, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6472711942771934e-06, |
| "loss": 0.6367, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6389880446118078e-06, |
| "loss": 0.6685, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.630723913608745e-06, |
| "loss": 0.6, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6224788200662934e-06, |
| "loss": 0.5336, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6142527827394373e-06, |
| "loss": 0.589, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.60604582033982e-06, |
| "loss": 0.6192, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5978579515356819e-06, |
| "loss": 0.5383, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5896891949518456e-06, |
| "loss": 0.6125, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5815395691696466e-06, |
| "loss": 0.5441, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5734090927269163e-06, |
| "loss": 0.637, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5652977841179196e-06, |
| "loss": 0.6245, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5572056617933228e-06, |
| "loss": 0.567, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5491327441601478e-06, |
| "loss": 0.6124, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.541079049581733e-06, |
| "loss": 0.582, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.533044596377693e-06, |
| "loss": 0.5511, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.525029402823872e-06, |
| "loss": 0.5777, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.517033487152304e-06, |
| "loss": 0.61, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.5090568675511696e-06, |
| "loss": 0.5619, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.5010995621647672e-06, |
| "loss": 0.5499, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.4931615890934447e-06, |
| "loss": 0.6256, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.485242966393593e-06, |
| "loss": 0.5546, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4773437120775713e-06, |
| "loss": 0.5737, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4694638441136955e-06, |
| "loss": 0.5849, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.461603380426173e-06, |
| "loss": 0.5586, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4537623388950795e-06, |
| "loss": 0.5464, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4459407373563072e-06, |
| "loss": 0.6119, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4381385936015302e-06, |
| "loss": 0.5728, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4303559253781595e-06, |
| "loss": 0.5854, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4225927503893145e-06, |
| "loss": 0.61, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4148490862937624e-06, |
| "loss": 0.5226, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4071249507058926e-06, |
| "loss": 0.6071, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.3994203611956826e-06, |
| "loss": 0.5532, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.391735335288632e-06, |
| "loss": 0.5519, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.3840698904657547e-06, |
| "loss": 0.542, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3764240441635113e-06, |
| "loss": 0.6386, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3687978137737933e-06, |
| "loss": 0.5352, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3611912166438656e-06, |
| "loss": 0.5938, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.353604270076334e-06, |
| "loss": 0.604, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3460369913291072e-06, |
| "loss": 0.5674, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3384893976153546e-06, |
| "loss": 0.5874, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3309615061034676e-06, |
| "loss": 0.5692, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3234533339170253e-06, |
| "loss": 0.594, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3159648981347485e-06, |
| "loss": 0.5667, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3084962157904623e-06, |
| "loss": 0.5883, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3010473038730632e-06, |
| "loss": 0.6043, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.2936181793264691e-06, |
| "loss": 0.5586, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.2862088590496013e-06, |
| "loss": 0.5683, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.278819359896315e-06, |
| "loss": 0.594, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.2714496986753932e-06, |
| "loss": 0.6421, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2640998921504865e-06, |
| "loss": 0.5996, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.256769957040085e-06, |
| "loss": 0.5551, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2494599100174753e-06, |
| "loss": 0.6013, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2421697677107081e-06, |
| "loss": 0.568, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2348995467025527e-06, |
| "loss": 0.5827, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2276492635304694e-06, |
| "loss": 0.6073, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2204189346865614e-06, |
| "loss": 0.5888, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.213208576617545e-06, |
| "loss": 0.5942, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.2060182057247083e-06, |
| "loss": 0.6544, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1988478383638724e-06, |
| "loss": 0.6649, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1916974908453649e-06, |
| "loss": 0.6244, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1845671794339597e-06, |
| "loss": 0.6132, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.177456920348873e-06, |
| "loss": 0.6241, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1703667297636933e-06, |
| "loss": 0.5846, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1632966238063693e-06, |
| "loss": 0.5908, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1562466185591558e-06, |
| "loss": 0.6116, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1492167300585944e-06, |
| "loss": 0.6511, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1422069742954546e-06, |
| "loss": 0.5717, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.135217367214726e-06, |
| "loss": 0.5655, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1282479247155498e-06, |
| "loss": 0.6726, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1212986626512112e-06, |
| "loss": 0.571, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.114369596829088e-06, |
| "loss": 0.6497, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.1074607430106154e-06, |
| "loss": 0.5856, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.100572116911255e-06, |
| "loss": 0.5736, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.0937037342004542e-06, |
| "loss": 0.6158, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.0868556105016193e-06, |
| "loss": 0.5219, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.0800277613920672e-06, |
| "loss": 0.6061, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.0732202024029992e-06, |
| "loss": 0.5343, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0664329490194602e-06, |
| "loss": 0.5639, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0596660166803142e-06, |
| "loss": 0.5989, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0529194207781879e-06, |
| "loss": 0.6231, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0461931766594669e-06, |
| "loss": 0.5066, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.039487299624221e-06, |
| "loss": 0.619, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0328018049262134e-06, |
| "loss": 0.6217, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0261367077728279e-06, |
| "loss": 0.5551, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 1.0194920233250594e-06, |
| "loss": 0.5696, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 1.012867766697464e-06, |
| "loss": 0.6388, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 1.0062639529581341e-06, |
| "loss": 0.6662, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.996805971286638e-07, |
| "loss": 0.6136, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.931177141841087e-07, |
| "loss": 0.6511, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.865753190529537e-07, |
| "loss": 0.608, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.800534266170792e-07, |
| "loss": 0.6362, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.735520517117403e-07, |
| "loss": 0.6109, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.67071209125502e-07, |
| "loss": 0.5991, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.606109136002428e-07, |
| "loss": 0.5994, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.541711798310883e-07, |
| "loss": 0.5873, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.47752022466405e-07, |
| "loss": 0.645, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.413534561077464e-07, |
| "loss": 0.6277, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.349754953098333e-07, |
| "loss": 0.5621, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.286181545805096e-07, |
| "loss": 0.6431, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.222814483807185e-07, |
| "loss": 0.5608, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.159653911244693e-07, |
| "loss": 0.5447, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.096699971787959e-07, |
| "loss": 0.6147, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.033952808637314e-07, |
| "loss": 0.6246, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.971412564522719e-07, |
| "loss": 0.5912, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.909079381703556e-07, |
| "loss": 0.5556, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.84695340196805e-07, |
| "loss": 0.6744, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.785034766633271e-07, |
| "loss": 0.5847, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.723323616544477e-07, |
| "loss": 0.6199, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.661820092075124e-07, |
| "loss": 0.5709, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.60052433312627e-07, |
| "loss": 0.5375, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.539436479126429e-07, |
| "loss": 0.5739, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.478556669031179e-07, |
| "loss": 0.6239, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.417885041322871e-07, |
| "loss": 0.6661, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.357421734010274e-07, |
| "loss": 0.5943, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.29716688462836e-07, |
| "loss": 0.5838, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.237120630237871e-07, |
| "loss": 0.613, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.17728310742506e-07, |
| "loss": 0.62, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.117654452301449e-07, |
| "loss": 0.5753, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.058234800503328e-07, |
| "loss": 0.5743, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 7.999024287191726e-07, |
| "loss": 0.5618, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.940023047051782e-07, |
| "loss": 0.6197, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.881231214292751e-07, |
| "loss": 0.6447, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.822648922647458e-07, |
| "loss": 0.6421, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.764276305372143e-07, |
| "loss": 0.5481, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.706113495246059e-07, |
| "loss": 0.6203, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.648160624571244e-07, |
| "loss": 0.5512, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.59041782517217e-07, |
| "loss": 0.5436, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.532885228395492e-07, |
| "loss": 0.6077, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.475562965109695e-07, |
| "loss": 0.5781, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.418451165704832e-07, |
| "loss": 0.6087, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.361549960092206e-07, |
| "loss": 0.6487, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.304859477704096e-07, |
| "loss": 0.5781, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.248379847493469e-07, |
| "loss": 0.5511, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.192111197933616e-07, |
| "loss": 0.6226, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.136053657017983e-07, |
| "loss": 0.6152, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 7.08020735225976e-07, |
| "loss": 0.6032, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 7.024572410691655e-07, |
| "loss": 0.6595, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.969148958865579e-07, |
| "loss": 0.6251, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.913937122852443e-07, |
| "loss": 0.5305, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.858937028241675e-07, |
| "loss": 0.622, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.804148800141164e-07, |
| "loss": 0.6042, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.749572563176821e-07, |
| "loss": 0.6033, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.695208441492352e-07, |
| "loss": 0.5716, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.64105655874897e-07, |
| "loss": 0.5973, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.587117038125091e-07, |
| "loss": 0.5147, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.533390002316142e-07, |
| "loss": 0.5567, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.4798755735341e-07, |
| "loss": 0.5631, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.426573873507458e-07, |
| "loss": 0.604, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.373485023480719e-07, |
| "loss": 0.6302, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.320609144214263e-07, |
| "loss": 0.6032, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.267946355984001e-07, |
| "loss": 0.6038, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.215496778581209e-07, |
| "loss": 0.6092, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.163260531312032e-07, |
| "loss": 0.6793, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.11123773299751e-07, |
| "loss": 0.5852, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.05942850197303e-07, |
| "loss": 0.6085, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.007832956088278e-07, |
| "loss": 0.6221, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.956451212706804e-07, |
| "loss": 0.5289, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.905283388705863e-07, |
| "loss": 0.5876, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.854329600476105e-07, |
| "loss": 0.5728, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.803589963921274e-07, |
| "loss": 0.6524, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.753064594458058e-07, |
| "loss": 0.6747, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.70275360701572e-07, |
| "loss": 0.5611, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.652657116035865e-07, |
| "loss": 0.5761, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.602775235472169e-07, |
| "loss": 0.6158, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.553108078790215e-07, |
| "loss": 0.5551, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.503655758967031e-07, |
| "loss": 0.6172, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.454418388491112e-07, |
| "loss": 0.5876, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.405396079361847e-07, |
| "loss": 0.6036, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.356588943089569e-07, |
| "loss": 0.6317, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.307997090695083e-07, |
| "loss": 0.6098, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.259620632709517e-07, |
| "loss": 0.5634, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.211459679174036e-07, |
| "loss": 0.5962, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.163514339639586e-07, |
| "loss": 0.6149, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.115784723166695e-07, |
| "loss": 0.5573, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.068270938325182e-07, |
| "loss": 0.57, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.020973093193882e-07, |
| "loss": 0.5516, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 4.973891295360467e-07, |
| "loss": 0.5932, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 4.92702565192118e-07, |
| "loss": 0.6374, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.880376269480525e-07, |
| "loss": 0.5794, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.833943254151152e-07, |
| "loss": 0.5405, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.787726711553486e-07, |
| "loss": 0.5969, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.741726746815589e-07, |
| "loss": 0.585, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.69594346457285e-07, |
| "loss": 0.653, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.6503769689677733e-07, |
| "loss": 0.453, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.60502736364975e-07, |
| "loss": 0.4873, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.5598947517748183e-07, |
| "loss": 0.5712, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.5149792360054236e-07, |
| "loss": 0.5348, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.4702809185101835e-07, |
| "loss": 0.6187, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.4257999009636567e-07, |
| "loss": 0.6246, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.3815362845460954e-07, |
| "loss": 0.5524, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.3374901699432923e-07, |
| "loss": 0.5961, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.293661657346204e-07, |
| "loss": 0.6077, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.2500508464509037e-07, |
| "loss": 0.6162, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.206657836458161e-07, |
| "loss": 0.6546, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.16348272607342e-07, |
| "loss": 0.5925, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.12052561350641e-07, |
| "loss": 0.6192, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.0777865964710005e-07, |
| "loss": 0.5992, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.03526577218496e-07, |
| "loss": 0.5559, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 3.99296323736974e-07, |
| "loss": 0.54, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.950879088250226e-07, |
| "loss": 0.626, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.9090134205546195e-07, |
| "loss": 0.5884, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.867366329514055e-07, |
| "loss": 0.7014, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.8259379098625205e-07, |
| "loss": 0.6048, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.784728255836623e-07, |
| "loss": 0.6457, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.7437374611752674e-07, |
| "loss": 0.5924, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.702965619119614e-07, |
| "loss": 0.6375, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.662412822412687e-07, |
| "loss": 0.6007, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.622079163299319e-07, |
| "loss": 0.5684, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.5819647335258555e-07, |
| "loss": 0.5981, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.54206962433995e-07, |
| "loss": 0.5097, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.502393926490355e-07, |
| "loss": 0.5963, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.462937730226812e-07, |
| "loss": 0.5903, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.42370112529965e-07, |
| "loss": 0.6562, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.3846842009598093e-07, |
| "loss": 0.6475, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.3458870459584294e-07, |
| "loss": 0.6146, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.3073097485468275e-07, |
| "loss": 0.6771, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.268952396476144e-07, |
| "loss": 0.5776, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.2308150769972404e-07, |
| "loss": 0.5338, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.1928978768605234e-07, |
| "loss": 0.5561, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.15520088231559e-07, |
| "loss": 0.65, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.117724179111248e-07, |
| "loss": 0.5808, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.0804678524951504e-07, |
| "loss": 0.6247, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.043431987213663e-07, |
| "loss": 0.5457, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.006616667511697e-07, |
| "loss": 0.589, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.970021977132487e-07, |
| "loss": 0.6079, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.9336479993173795e-07, |
| "loss": 0.6447, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.8974948168057237e-07, |
| "loss": 0.5963, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.861562511834559e-07, |
| "loss": 0.6023, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.825851166138549e-07, |
| "loss": 0.5722, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.7903608609497365e-07, |
| "loss": 0.5964, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.7550916769973456e-07, |
| "loss": 0.6337, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.720043694507657e-07, |
| "loss": 0.5648, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.685216993203732e-07, |
| "loss": 0.6046, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.650611652305346e-07, |
| "loss": 0.683, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.6162277505287216e-07, |
| "loss": 0.6424, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.582065366086362e-07, |
| "loss": 0.5782, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.5481245766869165e-07, |
| "loss": 0.6178, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.514405459534974e-07, |
| "loss": 0.6102, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.4809080913308583e-07, |
| "loss": 0.6055, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.447632548270529e-07, |
| "loss": 0.5884, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.414578906045317e-07, |
| "loss": 0.5609, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.3817472398418362e-07, |
| "loss": 0.5514, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.3491376243417685e-07, |
| "loss": 0.632, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.3167501337216792e-07, |
| "loss": 0.5757, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.2845848416528816e-07, |
| "loss": 0.6259, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.2526418213012602e-07, |
| "loss": 0.6463, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.220921145327115e-07, |
| "loss": 0.6112, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.189422885884962e-07, |
| "loss": 0.6257, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.158147114623399e-07, |
| "loss": 0.5695, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.127093902684907e-07, |
| "loss": 0.5345, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.0962633207057827e-07, |
| "loss": 0.5905, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.0656554388158168e-07, |
| "loss": 0.5831, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.0352703266383166e-07, |
| "loss": 0.6712, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.0051080532897838e-07, |
| "loss": 0.6089, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.9751686873798913e-07, |
| "loss": 0.6321, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.945452297011252e-07, |
| "loss": 0.6186, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.9159589497792397e-07, |
| "loss": 0.5667, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.8866887127719335e-07, |
| "loss": 0.6278, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.8576416525698637e-07, |
| "loss": 0.6603, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.828817835245933e-07, |
| "loss": 0.6563, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.800217326365228e-07, |
| "loss": 0.5662, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.7718401909848637e-07, |
| "loss": 0.5749, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.7436864936538623e-07, |
| "loss": 0.6096, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.7157562984130182e-07, |
| "loss": 0.5871, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.688049668794678e-07, |
| "loss": 0.542, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.660566667822705e-07, |
| "loss": 0.5384, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.633307358012215e-07, |
| "loss": 0.6558, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.606271801369552e-07, |
| "loss": 0.5642, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.5794600593920794e-07, |
| "loss": 0.5989, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.5528721930680336e-07, |
| "loss": 0.5474, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.526508262876414e-07, |
| "loss": 0.614, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.5003683287868276e-07, |
| "loss": 0.6025, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.4744524502594004e-07, |
| "loss": 0.6591, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.4487606862445547e-07, |
| "loss": 0.6509, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.4232930951829426e-07, |
| "loss": 0.6323, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.3980497350052914e-07, |
| "loss": 0.5563, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.37303066313228e-07, |
| "loss": 0.576, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.3482359364743958e-07, |
| "loss": 0.6184, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.3236656114318235e-07, |
| "loss": 0.6451, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.2993197438942672e-07, |
| "loss": 0.597, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.275198389240906e-07, |
| "loss": 0.5851, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.2513016023402158e-07, |
| "loss": 0.5888, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.2276294375498265e-07, |
| "loss": 0.5672, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.204181948716443e-07, |
| "loss": 0.596, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.1809591891757121e-07, |
| "loss": 0.5916, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.1579612117520456e-07, |
| "loss": 0.5788, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.1351880687586192e-07, |
| "loss": 0.5814, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.1126398119971182e-07, |
| "loss": 0.5942, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0903164927577147e-07, |
| "loss": 0.6292, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0682181618189235e-07, |
| "loss": 0.5432, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0463448694474465e-07, |
| "loss": 0.5823, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0246966653981505e-07, |
| "loss": 0.6369, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 1.0032735989138231e-07, |
| "loss": 0.5598, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.820757187252061e-08, |
| "loss": 0.5863, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.61103073050773e-08, |
| "loss": 0.7235, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.403557095966742e-08, |
| "loss": 0.6829, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.19833675556614e-08, |
| "loss": 0.6167, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.995370176117402e-08, |
| "loss": 0.6263, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.79465781930522e-08, |
| "loss": 0.5296, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.596200141687161e-08, |
| "loss": 0.5081, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.399997594691456e-08, |
| "loss": 0.6296, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.206050624617212e-08, |
| "loss": 0.569, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.014359672632421e-08, |
| "loss": 0.5367, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.824925174773513e-08, |
| "loss": 0.6022, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.637747561944131e-08, |
| "loss": 0.5909, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.452827259914252e-08, |
| "loss": 0.5282, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.270164689319181e-08, |
| "loss": 0.4961, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.089760265658663e-08, |
| "loss": 0.5329, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.911614399295663e-08, |
| "loss": 0.5838, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.735727495455813e-08, |
| "loss": 0.6174, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.562099954226409e-08, |
| "loss": 0.598, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.390732170555191e-08, |
| "loss": 0.5295, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.221624534249681e-08, |
| "loss": 0.5787, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.054777429976621e-08, |
| "loss": 0.5858, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.890191237260201e-08, |
| "loss": 0.4701, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.727866330482168e-08, |
| "loss": 0.5512, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.567803078880385e-08, |
| "loss": 0.6166, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.410001846548385e-08, |
| "loss": 0.5578, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.254462992433817e-08, |
| "loss": 0.5572, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.101186870338781e-08, |
| "loss": 0.5613, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 4.950173828918048e-08, |
| "loss": 0.605, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.801424211678396e-08, |
| "loss": 0.6469, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.654938356978278e-08, |
| "loss": 0.65, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.5107165980269315e-08, |
| "loss": 0.5996, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.36875926288316e-08, |
| "loss": 0.578, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.2290666744552175e-08, |
| "loss": 0.575, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.0916391504994826e-08, |
| "loss": 0.6433, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 3.95647700362034e-08, |
| "loss": 0.5881, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.8235805412688567e-08, |
| "loss": 0.5424, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.692950065742551e-08, |
| "loss": 0.5626, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.564585874184623e-08, |
| "loss": 0.6364, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.43848825858295e-08, |
| "loss": 0.566, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.314657505769758e-08, |
| "loss": 0.6096, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.193093897421173e-08, |
| "loss": 0.5292, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.0737977100557816e-08, |
| "loss": 0.6274, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.9567692150347384e-08, |
| "loss": 0.6057, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.8420086785611035e-08, |
| "loss": 0.6085, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.729516361678619e-08, |
| "loss": 0.5818, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.6192925202720433e-08, |
| "loss": 0.6295, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.5113374050655947e-08, |
| "loss": 0.5245, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.4056512616231764e-08, |
| "loss": 0.5504, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.3022343303474858e-08, |
| "loss": 0.6062, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.201086846479239e-08, |
| "loss": 0.6174, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 2.1022090400970584e-08, |
| "loss": 0.6268, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 2.0056011361166972e-08, |
| "loss": 0.5806, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.9112633542908156e-08, |
| "loss": 0.5393, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.8191959092078714e-08, |
| "loss": 0.6258, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.729399010292343e-08, |
| "loss": 0.5988, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.6418728618037285e-08, |
| "loss": 0.6076, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.5566176628365458e-08, |
| "loss": 0.5816, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.4736336073193358e-08, |
| "loss": 0.5648, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.3929208840146591e-08, |
| "loss": 0.6103, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.314479676518432e-08, |
| "loss": 0.5627, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.2383101632597039e-08, |
| "loss": 0.6406, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.164412517499991e-08, |
| "loss": 0.6097, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.0927869073332764e-08, |
| "loss": 0.5697, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.0234334956849002e-08, |
| "loss": 0.6413, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 9.563524403122249e-09, |
| "loss": 0.5769, |
| "step": 2119 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.915438938034148e-09, |
| "loss": 0.5904, |
| "step": 2120 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.290080035774361e-09, |
| "loss": 0.5537, |
| "step": 2121 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.68744911883723e-09, |
| "loss": 0.5277, |
| "step": 2122 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.107547558017347e-09, |
| "loss": 0.5385, |
| "step": 2123 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 6.550376672407321e-09, |
| "loss": 0.5094, |
| "step": 2124 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 6.015937729396681e-09, |
| "loss": 0.5878, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 5.5042319446629855e-09, |
| "loss": 0.6393, |
| "step": 2126 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 5.0152604821773755e-09, |
| "loss": 0.6481, |
| "step": 2127 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 4.5490244541934734e-09, |
| "loss": 0.5553, |
| "step": 2128 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 4.105524921252935e-09, |
| "loss": 0.5588, |
| "step": 2129 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.684762892176563e-09, |
| "loss": 0.5933, |
| "step": 2130 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.2867393240654244e-09, |
| "loss": 0.6107, |
| "step": 2131 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.9114551222964027e-09, |
| "loss": 0.6566, |
| "step": 2132 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 2.5589111405233124e-09, |
| "loss": 0.653, |
| "step": 2133 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 2.229108180672457e-09, |
| "loss": 0.644, |
| "step": 2134 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 1.9220469929415176e-09, |
| "loss": 0.5518, |
| "step": 2135 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 1.6377282757973344e-09, |
| "loss": 0.5531, |
| "step": 2136 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 1.3761526759747956e-09, |
| "loss": 0.5921, |
| "step": 2137 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 1.1373207884768366e-09, |
| "loss": 0.5876, |
| "step": 2138 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 9.212331565711108e-10, |
| "loss": 0.6151, |
| "step": 2139 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 7.278902717866576e-10, |
| "loss": 0.5587, |
| "step": 2140 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 5.572925739183443e-10, |
| "loss": 0.5808, |
| "step": 2141 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 4.09440451023535e-10, |
| "loss": 0.5247, |
| "step": 2142 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 2.843342394176496e-10, |
| "loss": 0.5328, |
| "step": 2143 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 1.819742236774946e-10, |
| "loss": 0.5915, |
| "step": 2144 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 1.0236063664126328e-10, |
| "loss": 0.5768, |
| "step": 2145 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 4.5493659402984224e-11, |
| "loss": 0.5871, |
| "step": 2146 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 1.1373421318072731e-11, |
| "loss": 0.597, |
| "step": 2147 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.0, |
| "loss": 0.5972, |
| "step": 2148 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 2148, |
| "total_flos": 3.773769285055283e+17, |
| "train_loss": 0.8857765065530381, |
| "train_runtime": 39271.2468, |
| "train_samples_per_second": 7.009, |
| "train_steps_per_second": 0.055 |
| } |
| ], |
| "max_steps": 2148, |
| "num_train_epochs": 3, |
| "total_flos": 3.773769285055283e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|