| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9985789194391228, | |
| "eval_steps": 500, | |
| "global_step": 434, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 2154.8345, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 312.5255, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1e-05, | |
| "loss": 2138.2986, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 343.9667, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 8198.7002, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2e-05, | |
| "loss": 2178.5674, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 23595.8965, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 67273.3125, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3e-05, | |
| "loss": 141691.7031, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 103845.0469, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.6666666666666666e-05, | |
| "loss": 468.9194, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4e-05, | |
| "loss": 1383.6638, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 1525188.25, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.666666666666667e-05, | |
| "loss": 41394.4219, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 5e-05, | |
| "loss": 531693.3125, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.988066825775656e-05, | |
| "loss": 424.9092, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.976133651551313e-05, | |
| "loss": 4699.9478, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.964200477326969e-05, | |
| "loss": 3598.4495, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.952267303102625e-05, | |
| "loss": 329783.8438, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.940334128878282e-05, | |
| "loss": 108553.8594, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9284009546539385e-05, | |
| "loss": 669686.625, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9164677804295945e-05, | |
| "loss": 516.546, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.904534606205251e-05, | |
| "loss": 1165.3262, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.892601431980907e-05, | |
| "loss": 14278.1465, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.880668257756563e-05, | |
| "loss": 803169.0625, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8687350835322196e-05, | |
| "loss": 3864.5024, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.856801909307876e-05, | |
| "loss": 286386.7812, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.844868735083533e-05, | |
| "loss": 9645701.0, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.832935560859189e-05, | |
| "loss": 10830.8975, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.821002386634845e-05, | |
| "loss": 7533.4912, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.809069212410501e-05, | |
| "loss": 11029.4629, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.797136038186158e-05, | |
| "loss": 5534.8965, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.785202863961814e-05, | |
| "loss": 3849.7932, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7732696897374704e-05, | |
| "loss": 14622.9688, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.761336515513127e-05, | |
| "loss": 2258.0952, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.749403341288783e-05, | |
| "loss": 562.1243, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7374701670644396e-05, | |
| "loss": 1060.5302, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7255369928400955e-05, | |
| "loss": 94356.0781, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.713603818615752e-05, | |
| "loss": 325.8985, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.701670644391408e-05, | |
| "loss": 1542.8948, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6897374701670647e-05, | |
| "loss": 25336.4648, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.677804295942721e-05, | |
| "loss": 8603.3447, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.665871121718377e-05, | |
| "loss": 164.9301, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.653937947494034e-05, | |
| "loss": 35805.4766, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.64200477326969e-05, | |
| "loss": 178383.0625, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.6300715990453463e-05, | |
| "loss": 756.3121, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.618138424821002e-05, | |
| "loss": 3312.0427, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.606205250596659e-05, | |
| "loss": 2473.1311, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.594272076372315e-05, | |
| "loss": 8402.5947, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.582338902147972e-05, | |
| "loss": 93.5995, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.570405727923628e-05, | |
| "loss": 235.2204, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.5584725536992847e-05, | |
| "loss": 21004.8418, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.5465393794749406e-05, | |
| "loss": 9018.7559, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.5346062052505965e-05, | |
| "loss": 31628.4824, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.522673031026253e-05, | |
| "loss": 98.9685, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.510739856801909e-05, | |
| "loss": 55139.7383, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.498806682577566e-05, | |
| "loss": 3274.8267, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.486873508353222e-05, | |
| "loss": 387.2443, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.474940334128879e-05, | |
| "loss": 115.7972, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.463007159904535e-05, | |
| "loss": 4086.0728, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.4510739856801914e-05, | |
| "loss": 13200.7227, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.4391408114558474e-05, | |
| "loss": 392.8962, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.427207637231503e-05, | |
| "loss": 1629.9041, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.41527446300716e-05, | |
| "loss": 665.3759, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.4033412887828165e-05, | |
| "loss": 1822.3533, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.391408114558473e-05, | |
| "loss": 84440.8281, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.379474940334129e-05, | |
| "loss": 156.116, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.367541766109786e-05, | |
| "loss": 33720.3906, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.3556085918854416e-05, | |
| "loss": 57585.8555, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.343675417661098e-05, | |
| "loss": 27.3209, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.331742243436754e-05, | |
| "loss": 1515.2338, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.319809069212411e-05, | |
| "loss": 360.9213, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.3078758949880674e-05, | |
| "loss": 44361.9766, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.295942720763723e-05, | |
| "loss": 2210.8513, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.28400954653938e-05, | |
| "loss": 1037.6666, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.272076372315036e-05, | |
| "loss": 4023.9438, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.2601431980906925e-05, | |
| "loss": 82307.1484, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.2482100238663484e-05, | |
| "loss": 323.2414, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.236276849642005e-05, | |
| "loss": 135475.5312, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.2243436754176616e-05, | |
| "loss": 3599.7883, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.2124105011933175e-05, | |
| "loss": 363409.875, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.200477326968974e-05, | |
| "loss": 125604.3984, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.18854415274463e-05, | |
| "loss": 220.2469, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.176610978520287e-05, | |
| "loss": 1677.0813, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.1646778042959426e-05, | |
| "loss": 4098.7871, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.152744630071599e-05, | |
| "loss": 2398.8313, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.140811455847255e-05, | |
| "loss": 923.5432, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.1288782816229125e-05, | |
| "loss": 10474.2695, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.1169451073985684e-05, | |
| "loss": 85930.8516, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.105011933174224e-05, | |
| "loss": 628.9391, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.093078758949881e-05, | |
| "loss": 266.4478, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.081145584725537e-05, | |
| "loss": 3529850.0, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.0692124105011935e-05, | |
| "loss": 30642.959, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.0572792362768494e-05, | |
| "loss": 4583143.0, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.045346062052506e-05, | |
| "loss": 6407.5708, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.0334128878281626e-05, | |
| "loss": 3839.4238, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.021479713603819e-05, | |
| "loss": 815.6643, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.009546539379475e-05, | |
| "loss": 349.929, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 3.997613365155131e-05, | |
| "loss": 371217.7812, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 3.985680190930788e-05, | |
| "loss": 176.3654, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.9737470167064437e-05, | |
| "loss": 64.4966, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.9618138424821e-05, | |
| "loss": 17530.2637, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.949880668257757e-05, | |
| "loss": 544.7783, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 3.9379474940334135e-05, | |
| "loss": 7955.9927, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 3.9260143198090694e-05, | |
| "loss": 197.6159, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 3.914081145584726e-05, | |
| "loss": 1884.6552, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 3.902147971360382e-05, | |
| "loss": 1102.6284, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3.8902147971360386e-05, | |
| "loss": 52947.9766, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3.8782816229116945e-05, | |
| "loss": 836.1002, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 3.866348448687351e-05, | |
| "loss": 8226.0859, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 3.854415274463008e-05, | |
| "loss": 671.9891, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.8424821002386637e-05, | |
| "loss": 3420.7861, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.83054892601432e-05, | |
| "loss": 6020.8076, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.818615751789976e-05, | |
| "loss": 9401.3975, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 3.806682577565633e-05, | |
| "loss": 888.0344, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 3.794749403341289e-05, | |
| "loss": 20902.9258, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.7828162291169453e-05, | |
| "loss": 33.6206, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.770883054892602e-05, | |
| "loss": 36034.0859, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.758949880668258e-05, | |
| "loss": 4315.4829, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.7470167064439145e-05, | |
| "loss": 41.5108, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.7350835322195704e-05, | |
| "loss": 118439.2344, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.723150357995227e-05, | |
| "loss": 56936.8984, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 3.711217183770883e-05, | |
| "loss": 70.141, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 3.6992840095465396e-05, | |
| "loss": 1669.0631, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.6873508353221955e-05, | |
| "loss": 993.8124, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.675417661097852e-05, | |
| "loss": 323.3054, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.663484486873509e-05, | |
| "loss": 2687.2642, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 3.651551312649165e-05, | |
| "loss": 809.3516, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 3.639618138424821e-05, | |
| "loss": 2634.0364, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.627684964200477e-05, | |
| "loss": 1219.66, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.615751789976134e-05, | |
| "loss": 340.3108, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.60381861575179e-05, | |
| "loss": 202.8932, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.5918854415274464e-05, | |
| "loss": 2560.3716, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.579952267303103e-05, | |
| "loss": 194.0811, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.5680190930787596e-05, | |
| "loss": 280622.0938, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.5560859188544155e-05, | |
| "loss": 1152.0107, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.5441527446300715e-05, | |
| "loss": 1275.4032, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.532219570405728e-05, | |
| "loss": 2975.0798, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.520286396181384e-05, | |
| "loss": 4207.3472, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.5083532219570406e-05, | |
| "loss": 1444.906, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.496420047732697e-05, | |
| "loss": 10217.4951, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.484486873508354e-05, | |
| "loss": 2441.9373, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.47255369928401e-05, | |
| "loss": 2693.4023, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.4606205250596664e-05, | |
| "loss": 289.4755, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.448687350835322e-05, | |
| "loss": 3141.6868, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.436754176610978e-05, | |
| "loss": 4827.5513, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.424821002386635e-05, | |
| "loss": 140.7513, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.4128878281622915e-05, | |
| "loss": 407.0926, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.400954653937948e-05, | |
| "loss": 11325.877, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.389021479713604e-05, | |
| "loss": 1355.1071, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.3770883054892606e-05, | |
| "loss": 2447.5452, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.3651551312649165e-05, | |
| "loss": 179011.7969, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.353221957040573e-05, | |
| "loss": 4771.3545, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.341288782816229e-05, | |
| "loss": 10228.957, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.329355608591885e-05, | |
| "loss": 6799.0142, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.3174224343675416e-05, | |
| "loss": 62602.7656, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.305489260143198e-05, | |
| "loss": 1877.1234, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.293556085918855e-05, | |
| "loss": 23234.3672, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.281622911694511e-05, | |
| "loss": 3270.4211, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.2696897374701674e-05, | |
| "loss": 210849.625, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.257756563245823e-05, | |
| "loss": 21.4817, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.24582338902148e-05, | |
| "loss": 440.5267, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.233890214797136e-05, | |
| "loss": 11568.3057, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.2219570405727925e-05, | |
| "loss": 222751.4375, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.210023866348449e-05, | |
| "loss": 8170.4053, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.198090692124105e-05, | |
| "loss": 895.7988, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.1861575178997616e-05, | |
| "loss": 1350.8844, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.1742243436754176e-05, | |
| "loss": 5618.6152, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.162291169451074e-05, | |
| "loss": 4986.3745, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.15035799522673e-05, | |
| "loss": 47760.0273, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.138424821002387e-05, | |
| "loss": 1268.5199, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.126491646778043e-05, | |
| "loss": 2829.28, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.1145584725537e-05, | |
| "loss": 63647.0195, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.102625298329356e-05, | |
| "loss": 289.5919, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.090692124105012e-05, | |
| "loss": 212786.1406, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.0787589498806684e-05, | |
| "loss": 750.4266, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.0668257756563243e-05, | |
| "loss": 439.2351, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.054892601431981e-05, | |
| "loss": 3095.3596, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.0429594272076372e-05, | |
| "loss": 1786.9099, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.031026252983294e-05, | |
| "loss": 24.2971, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.01909307875895e-05, | |
| "loss": 8940.6621, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.0071599045346067e-05, | |
| "loss": 35.1568, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.9952267303102627e-05, | |
| "loss": 464.3121, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.983293556085919e-05, | |
| "loss": 1286.3716, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9713603818615755e-05, | |
| "loss": 2473.5828, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.9594272076372315e-05, | |
| "loss": 53711.5586, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.947494033412888e-05, | |
| "loss": 28963.9258, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.935560859188544e-05, | |
| "loss": 75207.0234, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.923627684964201e-05, | |
| "loss": 4702.6685, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.911694510739857e-05, | |
| "loss": 4575.6294, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.8997613365155135e-05, | |
| "loss": 651.8832, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.8878281622911694e-05, | |
| "loss": 71858.8516, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.8758949880668257e-05, | |
| "loss": 3636.6687, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.8639618138424823e-05, | |
| "loss": 76.8694, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.8520286396181382e-05, | |
| "loss": 390.3317, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.840095465393795e-05, | |
| "loss": 2925.4253, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.828162291169451e-05, | |
| "loss": 2840.2637, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.8162291169451077e-05, | |
| "loss": 19435.1816, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.8042959427207637e-05, | |
| "loss": 13893.8916, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.7923627684964203e-05, | |
| "loss": 85997.7031, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.7804295942720766e-05, | |
| "loss": 1057.1903, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.7684964200477325e-05, | |
| "loss": 222.8144, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.756563245823389e-05, | |
| "loss": 3508.3396, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.7446300715990454e-05, | |
| "loss": 77230.9922, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.732696897374702e-05, | |
| "loss": 118.6687, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.720763723150358e-05, | |
| "loss": 818.3032, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7088305489260145e-05, | |
| "loss": 614.3881, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.6968973747016708e-05, | |
| "loss": 1442.3588, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.6849642004773274e-05, | |
| "loss": 11598.5752, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.6730310262529833e-05, | |
| "loss": 3279.7495, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.6610978520286396e-05, | |
| "loss": 9819.6895, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.6491646778042962e-05, | |
| "loss": 619.9316, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.637231503579952e-05, | |
| "loss": 43706.6953, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.6252983293556088e-05, | |
| "loss": 64.7919, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.613365155131265e-05, | |
| "loss": 3574.5085, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.6014319809069216e-05, | |
| "loss": 678.5247, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.5894988066825776e-05, | |
| "loss": 66.3897, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.5775656324582342e-05, | |
| "loss": 41925.4766, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.5656324582338905e-05, | |
| "loss": 190234.2188, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.5536992840095464e-05, | |
| "loss": 1073.3352, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 2.541766109785203e-05, | |
| "loss": 87.6374, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 2.529832935560859e-05, | |
| "loss": 26874.1582, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.517899761336516e-05, | |
| "loss": 410.1149, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.5059665871121718e-05, | |
| "loss": 7732.0552, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.494033412887828e-05, | |
| "loss": 1358.0614, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.4821002386634844e-05, | |
| "loss": 33285.3281, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.470167064439141e-05, | |
| "loss": 8909.3711, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.4582338902147972e-05, | |
| "loss": 407.9029, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.4463007159904535e-05, | |
| "loss": 912.2557, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 2.4343675417661098e-05, | |
| "loss": 11245.6484, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 2.4224343675417664e-05, | |
| "loss": 13153.9141, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 2.4105011933174227e-05, | |
| "loss": 1926.8518, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 2.398568019093079e-05, | |
| "loss": 275.4235, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.3866348448687352e-05, | |
| "loss": 2805.95, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.3747016706443915e-05, | |
| "loss": 3201.3953, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.3627684964200477e-05, | |
| "loss": 788.0258, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.350835322195704e-05, | |
| "loss": 11083.1758, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.3389021479713606e-05, | |
| "loss": 55374.0273, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.326968973747017e-05, | |
| "loss": 584374.6875, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.3150357995226732e-05, | |
| "loss": 61.7834, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.3031026252983294e-05, | |
| "loss": 16746.6934, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.291169451073986e-05, | |
| "loss": 729.4385, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 2.2792362768496423e-05, | |
| "loss": 2346.0439, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 2.2673031026252983e-05, | |
| "loss": 22444.0, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.2553699284009545e-05, | |
| "loss": 11688.8047, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.243436754176611e-05, | |
| "loss": 8438.4268, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 2.2315035799522674e-05, | |
| "loss": 513.0939, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 2.2195704057279237e-05, | |
| "loss": 145.1669, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.20763723150358e-05, | |
| "loss": 33718.2617, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.1957040572792366e-05, | |
| "loss": 5309.7217, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2.183770883054893e-05, | |
| "loss": 1561.8147, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2.171837708830549e-05, | |
| "loss": 401.0991, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.1599045346062054e-05, | |
| "loss": 691.1371, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.1479713603818617e-05, | |
| "loss": 5117.2085, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.136038186157518e-05, | |
| "loss": 441.8343, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.1241050119331742e-05, | |
| "loss": 4239.8047, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.1121718377088308e-05, | |
| "loss": 2131.5051, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 2.100238663484487e-05, | |
| "loss": 15543.0732, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 2.0883054892601433e-05, | |
| "loss": 1561.8523, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.0763723150357996e-05, | |
| "loss": 501.6205, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.0644391408114562e-05, | |
| "loss": 31348.1641, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.052505966587112e-05, | |
| "loss": 9407.1494, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.0405727923627684e-05, | |
| "loss": 5945.5815, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.0286396181384247e-05, | |
| "loss": 198.1411, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.0167064439140813e-05, | |
| "loss": 1753.7537, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.0047732696897376e-05, | |
| "loss": 1425.4434, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.992840095465394e-05, | |
| "loss": 417.404, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.98090692124105e-05, | |
| "loss": 3352.2693, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.9689737470167067e-05, | |
| "loss": 419649.4062, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.957040572792363e-05, | |
| "loss": 331.6387, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.9451073985680193e-05, | |
| "loss": 3271.9436, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.9331742243436756e-05, | |
| "loss": 257.6401, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.9212410501193318e-05, | |
| "loss": 77.437, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.909307875894988e-05, | |
| "loss": 4351.1445, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.8973747016706444e-05, | |
| "loss": 945.9266, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.885441527446301e-05, | |
| "loss": 863.4519, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.8735083532219573e-05, | |
| "loss": 3068.04, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.8615751789976135e-05, | |
| "loss": 152.3694, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.8496420047732698e-05, | |
| "loss": 27651.7871, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.837708830548926e-05, | |
| "loss": 1480.9608, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.8257756563245823e-05, | |
| "loss": 20972.2227, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.8138424821002386e-05, | |
| "loss": 509.0767, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.801909307875895e-05, | |
| "loss": 25930.4199, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.7899761336515515e-05, | |
| "loss": 33.3048, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.7780429594272078e-05, | |
| "loss": 12881.1221, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.766109785202864e-05, | |
| "loss": 125079.1875, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.7541766109785203e-05, | |
| "loss": 17346.0098, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.742243436754177e-05, | |
| "loss": 3085.6494, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.7303102625298332e-05, | |
| "loss": 1862.4844, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.718377088305489e-05, | |
| "loss": 584.3925, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.7064439140811457e-05, | |
| "loss": 1440.3424, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.694510739856802e-05, | |
| "loss": 10591.7715, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.6825775656324583e-05, | |
| "loss": 3629.6746, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.6706443914081145e-05, | |
| "loss": 203.7102, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.6587112171837708e-05, | |
| "loss": 1990.9237, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.6467780429594274e-05, | |
| "loss": 4698.8696, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.6348448687350837e-05, | |
| "loss": 12364.7568, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.62291169451074e-05, | |
| "loss": 3250.7644, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.6109785202863962e-05, | |
| "loss": 563.4382, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.5990453460620525e-05, | |
| "loss": 4512.2549, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.5871121718377088e-05, | |
| "loss": 356.9354, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.575178997613365e-05, | |
| "loss": 20561.3516, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.5632458233890217e-05, | |
| "loss": 1720.2273, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.551312649164678e-05, | |
| "loss": 61301.4414, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.5393794749403342e-05, | |
| "loss": 2145.4048, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.5274463007159905e-05, | |
| "loss": 32.2598, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.515513126491647e-05, | |
| "loss": 98319.1484, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.5035799522673034e-05, | |
| "loss": 41.3583, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.4916467780429595e-05, | |
| "loss": 969.0966, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4797136038186157e-05, | |
| "loss": 11281.6221, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.467780429594272e-05, | |
| "loss": 140.3954, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4558472553699284e-05, | |
| "loss": 40839.0352, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4439140811455847e-05, | |
| "loss": 687.9666, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4319809069212412e-05, | |
| "loss": 15623.2422, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4200477326968974e-05, | |
| "loss": 36405.7031, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.4081145584725539e-05, | |
| "loss": 18.3407, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.3961813842482101e-05, | |
| "loss": 20860.0215, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.3842482100238662e-05, | |
| "loss": 136255.6094, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.3723150357995227e-05, | |
| "loss": 114715.4141, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.360381861575179e-05, | |
| "loss": 380233.5312, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.3484486873508354e-05, | |
| "loss": 92655.1797, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.3365155131264917e-05, | |
| "loss": 221169.5625, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.3245823389021481e-05, | |
| "loss": 14491.3555, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.3126491646778044e-05, | |
| "loss": 604.6326, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.3007159904534608e-05, | |
| "loss": 1070.2869, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.2887828162291171e-05, | |
| "loss": 1943.536, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2768496420047732e-05, | |
| "loss": 13625.3975, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2649164677804295e-05, | |
| "loss": 18923.8242, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2529832935560859e-05, | |
| "loss": 8192.084, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2410501193317422e-05, | |
| "loss": 18687.1094, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2291169451073986e-05, | |
| "loss": 24.7262, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2171837708830549e-05, | |
| "loss": 31110.3066, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2052505966587113e-05, | |
| "loss": 22251.8145, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.1933174224343676e-05, | |
| "loss": 1738.8523, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.1813842482100239e-05, | |
| "loss": 135108.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.1694510739856803e-05, | |
| "loss": 421.1908, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.1575178997613366e-05, | |
| "loss": 1606.5295, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.145584725536993e-05, | |
| "loss": 2758.7969, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.1336515513126491e-05, | |
| "loss": 3688.4736, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1217183770883056e-05, | |
| "loss": 42112.6953, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1097852028639618e-05, | |
| "loss": 1432.4136, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.0978520286396183e-05, | |
| "loss": 1795.425, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.0859188544152746e-05, | |
| "loss": 7462.5249, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.0739856801909308e-05, | |
| "loss": 314273.4688, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.0620525059665871e-05, | |
| "loss": 82580.6328, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.0501193317422435e-05, | |
| "loss": 7970.0132, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.0381861575178998e-05, | |
| "loss": 34508.6797, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.026252983293556e-05, | |
| "loss": 69825.7188, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.0143198090692124e-05, | |
| "loss": 841.0046, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.0023866348448688e-05, | |
| "loss": 1370.4319, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.90453460620525e-06, | |
| "loss": 296.6957, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.785202863961815e-06, | |
| "loss": 469730.4062, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.665871121718378e-06, | |
| "loss": 25752.3867, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.54653937947494e-06, | |
| "loss": 3364.7927, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.427207637231505e-06, | |
| "loss": 2272.8479, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.307875894988068e-06, | |
| "loss": 6455.9248, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.18854415274463e-06, | |
| "loss": 1536.6981, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.069212410501193e-06, | |
| "loss": 1008.3629, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.949880668257757e-06, | |
| "loss": 5969.8877, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.83054892601432e-06, | |
| "loss": 3844.1497, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.711217183770885e-06, | |
| "loss": 5906.2695, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.591885441527446e-06, | |
| "loss": 22959.3652, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.47255369928401e-06, | |
| "loss": 356.8062, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.353221957040573e-06, | |
| "loss": 10640.417, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.233890214797137e-06, | |
| "loss": 15015.7236, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.1145584725537e-06, | |
| "loss": 27919.8359, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 7.995226730310263e-06, | |
| "loss": 93.8642, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 7.875894988066825e-06, | |
| "loss": 531.3182, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 7.75656324582339e-06, | |
| "loss": 44.6307, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 7.637231503579952e-06, | |
| "loss": 617.2424, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 7.517899761336517e-06, | |
| "loss": 35023.9141, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 7.398568019093079e-06, | |
| "loss": 301.8852, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 7.279236276849642e-06, | |
| "loss": 4805.9424, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 7.159904534606206e-06, | |
| "loss": 981.1636, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 7.040572792362769e-06, | |
| "loss": 2385.3809, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 6.921241050119331e-06, | |
| "loss": 17975.209, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 6.801909307875895e-06, | |
| "loss": 14639.3535, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 6.682577565632458e-06, | |
| "loss": 184.0828, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.563245823389022e-06, | |
| "loss": 4096.9023, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.4439140811455855e-06, | |
| "loss": 3484.0598, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.324582338902147e-06, | |
| "loss": 875.6252, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.205250596658711e-06, | |
| "loss": 372.1837, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.0859188544152745e-06, | |
| "loss": 374.8253, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.966587112171838e-06, | |
| "loss": 148.9297, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.847255369928402e-06, | |
| "loss": 2172.886, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.727923627684965e-06, | |
| "loss": 114176.7109, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.608591885441528e-06, | |
| "loss": 774.8303, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.489260143198091e-06, | |
| "loss": 8663.0283, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.369928400954654e-06, | |
| "loss": 39.113, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.250596658711218e-06, | |
| "loss": 569.8843, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.13126491646778e-06, | |
| "loss": 62.069, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 5.011933174224344e-06, | |
| "loss": 16262.8975, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.8926014319809075e-06, | |
| "loss": 386.9028, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.77326968973747e-06, | |
| "loss": 1635.613, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.653937947494034e-06, | |
| "loss": 6881.8877, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.5346062052505965e-06, | |
| "loss": 9540.7607, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.41527446300716e-06, | |
| "loss": 1987.6592, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.295942720763723e-06, | |
| "loss": 6021.2861, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.176610978520286e-06, | |
| "loss": 2370.6196, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.05727923627685e-06, | |
| "loss": 1680.244, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.937947494033413e-06, | |
| "loss": 82911.6641, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.818615751789976e-06, | |
| "loss": 767.3438, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.6992840095465393e-06, | |
| "loss": 316.2509, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.579952267303103e-06, | |
| "loss": 169716.9688, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.4606205250596656e-06, | |
| "loss": 307.0406, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.341288782816229e-06, | |
| "loss": 730.7938, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.2219570405727927e-06, | |
| "loss": 95440.8516, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.1026252983293554e-06, | |
| "loss": 4808.3286, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.983293556085919e-06, | |
| "loss": 1284.4513, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.8639618138424826e-06, | |
| "loss": 720.0696, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.7446300715990457e-06, | |
| "loss": 102548.2656, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.625298329355609e-06, | |
| "loss": 84.6599, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.505966587112172e-06, | |
| "loss": 1387.675, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.386634844868735e-06, | |
| "loss": 424.9204, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.2673031026252983e-06, | |
| "loss": 1309.8296, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.1479713603818614e-06, | |
| "loss": 6679.3481, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.028639618138425e-06, | |
| "loss": 14053.2295, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.909307875894988e-06, | |
| "loss": 44.5011, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.7899761336515514e-06, | |
| "loss": 4494.7183, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.6706443914081146e-06, | |
| "loss": 149.143, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.5513126491646777e-06, | |
| "loss": 821.6967, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.4319809069212413e-06, | |
| "loss": 1617.1495, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.3126491646778044e-06, | |
| "loss": 2006.76, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.1933174224343676e-06, | |
| "loss": 64.7329, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.0739856801909307e-06, | |
| "loss": 1603.62, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 9.54653937947494e-07, | |
| "loss": 2882.3655, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 8.353221957040573e-07, | |
| "loss": 69.2657, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.159904534606206e-07, | |
| "loss": 286800.9062, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.966587112171838e-07, | |
| "loss": 4796.5063, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.77326968973747e-07, | |
| "loss": 267.1339, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 3.579952267303103e-07, | |
| "loss": 1926.3707, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 2.386634844868735e-07, | |
| "loss": 7170.8643, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.1933174224343676e-07, | |
| "loss": 612.3254, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.0, | |
| "loss": 7622.6533, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 434, | |
| "total_flos": 1.0362069838124974e+19, | |
| "train_loss": 74766.76249146792, | |
| "train_runtime": 78556.7962, | |
| "train_samples_per_second": 2.831, | |
| "train_steps_per_second": 0.006 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 434, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "total_flos": 1.0362069838124974e+19, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |