I0922 08:31:34.719542 139722080012096 train.py:80] Step 2300: loss=43.793564, tp=7376, pos=23724, pred=23739, f1=0.310811. I0922 08:33:37.700823 139722080012096 train.py:80] Step 2400: loss=42.933798, tp=7368, pos=23468, pred=23505, f1=0.313712. I0922 08:35:40.014538 139722080012096 train.py:80] Step 2500: loss=45.443451, tp=6600, pos=23299, pred=23563, f1=0.281678. I0922 08:37:43.956522 139722080012096 train.py:80] Step 2600: loss=42.634361, tp=7462, pos=23661, pred=23789, f1=0.314521. I0922 08:39:47.824223 139722080012096 train.py:80] Step 2700: loss=40.338867, tp=7919, pos=23497, pred=23653, f1=0.335907. I0922 08:41:50.759461 139722080012096 train.py:80] Step 2800: loss=39.552951, tp=8370, pos=23591, pred=23758, f1=0.353545. I0922 08:43:52.757941 139722080012096 train.py:80] Step 2900: loss=38.586103, tp=8308, pos=23383, pred=23492, f1=0.354475. I0922 08:45:56.399995 139722080012096 train.py:80] Step 3000: loss=38.142439, tp=8535, pos=23405, pred=23571, f1=0.363377. I0922 08:47:58.360316 139722080012096 train.py:80] Step 3100: loss=38.163295, tp=8504, pos=23287, pred=23417, f1=0.364166. I0922 08:50:00.923817 139722080012096 train.py:80] Step 3200: loss=38.850313, tp=8480, pos=23434, pred=23567, f1=0.360843. I0922 08:52:05.777075 139722080012096 train.py:80] Step 3300: loss=38.429292, tp=8687, pos=23690, pred=23870, f1=0.365307. I0922 08:54:09.693588 139722080012096 train.py:80] Step 3400: loss=37.665960, tp=8773, pos=23429, pred=23598, f1=0.373105. I0922 08:56:10.987399 139722080012096 train.py:80] Step 3500: loss=34.617738, tp=9198, pos=23135, pred=23325, f1=0.395954. I0922 08:58:15.131932 139722080012096 train.py:80] Step 3600: loss=32.012023, tp=9961, pos=23362, pred=23631, f1=0.423935. I0922 09:00:18.621774 139722080012096 train.py:80] Step 3700: loss=31.863533, tp=10072, pos=23527, pred=23650, f1=0.426988. I0922 09:02:23.375048 139722080012096 train.py:80] Step 3800: loss=32.815681, tp=9587, pos=23151, pred=23411, f1=0.411795. I0922 09:04:28.183915 139722080012096 train.py:80] Step 3900: loss=32.991979, tp=9691, pos=23446, pred=23612, f1=0.411875. I0922 09:06:33.910646 139722080012096 train.py:80] Step 4000: loss=33.101618, tp=9902, pos=23517, pred=23728, f1=0.419177. I0922 09:08:38.128628 139722080012096 train.py:80] Step 4100: loss=32.457481, tp=9941, pos=23516, pred=23661, f1=0.421434. I0922 09:10:42.025209 139722080012096 train.py:80] Step 4200: loss=32.382849, tp=9566, pos=23170, pred=23323, f1=0.411503. I0922 09:12:45.144205 139722080012096 train.py:80] Step 4300: loss=31.959615, tp=9736, pos=23085, pred=23273, f1=0.420035. I0922 09:14:49.615176 139722080012096 train.py:80] Step 4400: loss=32.626861, tp=10091, pos=23498, pred=23706, f1=0.427549. I0922 09:16:54.982551 139722080012096 train.py:80] Step 4500: loss=32.799677, tp=10183, pos=23741, pred=23896, f1=0.427525. I0922 09:19:01.674943 139722080012096 train.py:80] Step 4600: loss=30.075472, tp=10735, pos=23633, pred=23878, f1=0.451895. I0922 09:21:08.163668 139722080012096 train.py:80] Step 4700: loss=26.148112, tp=11357, pos=23409, pred=23753, f1=0.481617. I0922 09:23:13.057930 139722080012096 train.py:80] Step 4800: loss=27.106392, tp=11091, pos=23329, pred=23537, f1=0.473307. I0922 09:25:14.345078 139722080012096 train.py:80] Step 4900: loss=27.262423, tp=11031, pos=23218, pred=23502, f1=0.472217. I0922 09:27:16.523801 139722080012096 train.py:80] Step 5000: loss=27.715982, tp=11317, pos=23771, pred=23951, f1=0.474289. I0922 09:29:16.387348 139722080012096 train.py:80] Step 5100: loss=27.647513, tp=10758, pos=22997, pred=23237, f1=0.465372. I0922 09:31:17.128146 139722080012096 train.py:80] Step 5200: loss=27.571460, tp=11237, pos=23417, pred=23636, f1=0.477632. I0922 09:33:14.859896 139722080012096 train.py:80] Step 5300: loss=27.719701, tp=10930, pos=23084, pred=23333, f1=0.470948. I0922 09:35:14.110525 139722080012096 train.py:80] Step 5400: loss=28.527714, tp=10950, pos=23357, pred=23618, f1=0.466205. I0922 09:37:13.892600 139722080012096 train.py:80] Step 5500: loss=28.161608, tp=11150, pos=23402, pred=23638, f1=0.474065. I0922 09:39:14.590360 139722080012096 train.py:80] Step 5600: loss=27.935296, tp=11044, pos=23405, pred=23616, f1=0.469748. I0922 09:41:13.635112 139722080012096 train.py:80] Step 5700: loss=25.729731, tp=11559, pos=23352, pred=23510, f1=0.493321. I0922 09:43:15.426425 139722080012096 train.py:80] Step 5800: loss=21.686442, tp=12695, pos=23522, pred=23743, f1=0.537184. I0922 09:45:14.751119 139722080012096 train.py:80] Step 5900: loss=22.541377, tp=12577, pos=23523, pred=23781, f1=0.531752. I0922 09:47:13.933984 139722080012096 train.py:80] Step 6000: loss=23.281013, tp=12180, pos=23340, pred=23553, f1=0.519481. I0922 09:49:10.586908 139722080012096 train.py:80] Step 6100: loss=22.781097, tp=11906, pos=22805, pred=23047, f1=0.519323. I0922 09:51:12.319085 139722080012096 train.py:80] Step 6200: loss=24.136259, tp=11929, pos=23378, pred=23634, f1=0.507487. I0922 09:53:12.580131 139722080012096 train.py:80] Step 6300: loss=24.018486, tp=12150, pos=23500, pred=23730, f1=0.514503. I0922 09:55:13.720905 139722080012096 train.py:80] Step 6400: loss=24.290969, tp=11918, pos=23388, pred=23630, f1=0.506955. I0922 09:57:12.887026 139722080012096 train.py:80] Step 6500: loss=24.101559, tp=12042, pos=23350, pred=23534, f1=0.513693. I0922 09:59:13.966607 139722080012096 train.py:80] Step 6600: loss=24.446708, tp=11972, pos=23415, pred=23664, f1=0.508592. I0922 10:01:15.352441 139722080012096 train.py:80] Step 6700: loss=24.927423, tp=12021, pos=23651, pred=23852, f1=0.506115. I0922 10:03:15.808447 139722080012096 train.py:80] Step 6800: loss=21.462809, tp=12814, pos=23342, pred=23557, f1=0.546451. I0922 10:05:17.935793 139722080012096 train.py:80] Step 6900: loss=18.450743, tp=13478, pos=23319, pred=23594, f1=0.574596. I0922 10:07:20.342869 139722080012096 train.py:80] Step 7000: loss=18.600685, tp=13207, pos=23062, pred=23321, f1=0.569476. I0922 10:09:20.217378 139722080012096 train.py:80] Step 7100: loss=19.492598, tp=13031, pos=23164, pred=23392, f1=0.559799. I0922 10:11:19.155082 139722080012096 train.py:80] Step 7200: loss=20.075269, tp=12968, pos=23285, pred=23523, f1=0.554093. I0922 10:13:20.322952 139722080012096 train.py:80] Step 7300: loss=20.144545, tp=13061, pos=23406, pred=23645, f1=0.555185. I0922 10:15:21.796969 139722080012096 train.py:80] Step 7400: loss=21.545017, tp=12735, pos=23465, pred=23724, f1=0.539744. I0922 10:17:21.466923 139722080012096 train.py:80] Step 7500: loss=20.289755, tp=13050, pos=23340, pred=23611, f1=0.555899. I0922 10:19:23.291984 139722080012096 train.py:80] Step 7600: loss=20.829923, tp=12876, pos=23262, pred=23476, f1=0.550986. I0922 10:21:25.778093 139722080012096 train.py:80] Step 7700: loss=21.372231, tp=12915, pos=23517, pred=23773, f1=0.546204. I0922 10:23:28.832912 139722080012096 train.py:80] Step 7800: loss=21.178642, tp=12926, pos=23478, pred=23777, f1=0.547074. I0922 10:25:31.276920 139722080012096 train.py:80] Step 7900: loss=17.344182, tp=14323, pos=23507, pred=23714, f1=0.606637. I0922 10:27:34.921452 139722080012096 train.py:80] Step 8000: loss=15.020802, tp=14723, pos=23436, pred=23721, f1=0.624425. I0922 10:29:35.936408 139722080012096 train.py:80] Step 8100: loss=15.711227, tp=14073, pos=23025, pred=23260, f1=0.608102. I0922 10:31:40.965057 139722080012096 train.py:80] Step 8200: loss=16.582492, tp=14202, pos=23455, pred=23654, f1=0.602942. I0922 10:33:44.430129 139722080012096 train.py:80] Step 8300: loss=16.631238, tp=13929, pos=23313, pred=23529, f1=0.594723. I0922 10:35:48.898503 139722080012096 train.py:80] Step 8400: loss=17.991025, tp=13852, pos=23627, pred=23878, f1=0.583181. I0922 10:37:55.258298 139722080012096 train.py:80] Step 8500: loss=17.657923, tp=14113, pos=23685, pred=23939, f1=0.592684. I0922 10:39:58.584928 139722080012096 train.py:80] Step 8600: loss=17.480387, tp=13780, pos=23300, pred=23498, f1=0.588914. I0922 10:42:02.865999 139722080012096 train.py:80] Step 8700: loss=18.014438, tp=14024, pos=23631, pred=23861, f1=0.590584. I0922 10:44:05.465761 139722080012096 train.py:80] Step 8800: loss=18.664138, tp=13306, pos=23161, pred=23393, f1=0.571637. I0922 10:46:07.555052 139722080012096 train.py:80] Step 8900: loss=18.410030, tp=13347, pos=23126, pred=23355, f1=0.574299. I0922 10:48:12.009283 139722080012096 train.py:80] Step 9000: loss=14.688259, tp=15117, pos=23401, pred=23604, f1=0.643208. I0922 10:50:18.450331 139722080012096 train.py:80] Step 9100: loss=12.564867, tp=15667, pos=23398, pred=23607, f1=0.666610. I0922 10:52:24.708409 139722080012096 train.py:80] Step 9200: loss=12.866177, tp=15693, pos=23656, pred=23912, f1=0.659813. I0922 10:54:27.243605 139722080012096 train.py:80] Step 9300: loss=13.719843, tp=14910, pos=23318, pred=23497, f1=0.636975. I0922 10:56:31.455309 139722080012096 train.py:80] Step 9400: loss=14.534177, tp=14852, pos=23476, pred=23674, f1=0.629989. I0922 10:58:33.659047 139722080012096 train.py:80] Step 9500: loss=14.643423, tp=14617, pos=23222, pred=23449, f1=0.626385. I0922 11:00:38.039019 139722080012096 train.py:80] Step 9600: loss=15.361955, tp=14446, pos=23417, pred=23652, f1=0.613822. I0922 11:02:42.083901 139722080012096 train.py:80] Step 9700: loss=15.762690, tp=14446, pos=23366, pred=23661, f1=0.614370. I0922 11:04:45.687260 139722080012096 train.py:80] Step 9800: loss=15.678779, tp=14412, pos=23367, pred=23552, f1=0.614335. I0922 11:06:48.677884 139722080012096 train.py:80] Step 9900: loss=15.893612, tp=14192, pos=23102, pred=23378, f1=0.610671. I0922 11:08:54.164862 139722080012096 train.py:80] Step 10000: loss=16.087843, tp=14544, pos=23612, pred=23823, f1=0.613218. I0922 11:10:57.530283 139722080012096 train.py:80] Step 10100: loss=11.549378, tp=15989, pos=23248, pred=23402, f1=0.685488. I0922 11:13:01.629298 139722080012096 train.py:80] Step 10200: loss=11.098275, tp=16026, pos=23189, pred=23402, f1=0.687944. I0922 11:15:05.900809 139722080012096 train.py:80] Step 10300: loss=11.085230, tp=16067, pos=23355, pred=23532, f1=0.685350. I0922 11:17:13.201266 139722080012096 train.py:80] Step 10400: loss=11.824532, tp=16263, pos=23907, pred=24137, f1=0.677004. I0922 11:19:17.517050 139722080012096 train.py:80] Step 10500: loss=12.652596, tp=15423, pos=23294, pred=23486, f1=0.659384. I0922 11:21:21.468870 139722080012096 train.py:80] Step 10600: loss=12.597294, tp=15619, pos=23537, pred=23708, f1=0.661192. I0922 11:23:26.897401 139722080012096 train.py:80] Step 10700: loss=12.602532, tp=15532, pos=23447, pred=23635, f1=0.659785. I0922 11:25:32.039829 139722080012096 train.py:80] Step 10800: loss=13.065184, tp=15421, pos=23486, pred=23713, f1=0.653446. I0922 11:27:35.077148 139722080012096 train.py:80] Step 10900: loss=13.413355, tp=14938, pos=23199, pred=23438, f1=0.640607. I0922 11:29:36.273350 139722080012096 train.py:80] Step 11000: loss=13.804809, tp=14740, pos=23156, pred=23387, f1=0.633393. I0922 11:31:37.073923 139722080012096 train.py:80] Step 11100: loss=13.981135, tp=15151, pos=23482, pred=23705, f1=0.642168. I0922 11:33:36.761701 139722080012096 train.py:80] Step 11200: loss=9.245134, tp=17037, pos=23325, pred=23489, f1=0.727859. I0922 11:35:36.925587 139722080012096 train.py:80] Step 11300: loss=9.219799, tp=16775, pos=23198, pred=23384, f1=0.720235. I0922 11:37:37.086534 139722080012096 train.py:80] Step 11400: loss=9.270541, tp=16842, pos=23316, pred=23529, f1=0.719052. I0922 11:39:36.536354 139722080012096 train.py:80] Step 11500: loss=10.103694, tp=16501, pos=23441, pred=23626, f1=0.701171. I0922 11:41:36.211575 139722080012096 train.py:80] Step 11600: loss=10.791561, tp=16144, pos=23517, pred=23706, f1=0.683735. I0922 11:43:34.090583 139722080012096 train.py:80] Step 11700: loss=11.096572, tp=15750, pos=23074, pred=23246, f1=0.680052. I0922 11:45:33.771349 139722080012096 train.py:80] Step 11800: loss=11.352662, tp=15956, pos=23385, pred=23585, f1=0.679412. I0922 11:47:32.546499 139722080012096 train.py:80] Step 11900: loss=11.158924, tp=15865, pos=23348, pred=23524, f1=0.676950. I0922 11:49:32.351606 139722080012096 train.py:80] Step 12000: loss=12.149348, tp=15822, pos=23501, pred=23717, f1=0.670168. I0922 11:51:35.454318 139722080012096 train.py:80] Step 12100: loss=12.153872, tp=15891, pos=23615, pred=23858, f1=0.669475. I0922 11:53:38.807368 139722080012096 train.py:80] Step 12200: loss=11.715309, tp=16010, pos=23684, pred=23887, f1=0.673099. I0922 11:55:41.557393 139722080012096 train.py:80] Step 12300: loss=7.557527, tp=17910, pos=23519, pred=23690, f1=0.758754. I0922 11:57:42.163073 139722080012096 train.py:80] Step 12400: loss=7.142746, tp=17705, pos=23143, pred=23283, f1=0.762719. I0922 11:59:46.003489 139722080012096 train.py:80] Step 12500: loss=8.678975, tp=17250, pos=23519, pred=23700, f1=0.730638. I0922 12:01:48.834656 139722080012096 train.py:80] Step 12600: loss=8.911242, tp=17101, pos=23480, pred=23646, f1=0.725756. I0922 12:03:51.506764 139722080012096 train.py:80] Step 12700: loss=9.313433, tp=16631, pos=23282, pred=23425, f1=0.712142. I0922 12:05:55.877150 139722080012096 train.py:80] Step 12800: loss=9.815657, tp=16708, pos=23515, pred=23721, f1=0.707427. I0922 12:08:00.291095 139722080012096 train.py:80] Step 12900: loss=9.834907, tp=16707, pos=23533, pred=23711, f1=0.707264. I0922 12:10:03.706704 139722080012096 train.py:80] Step 13000: loss=9.861696, tp=16533, pos=23469, pred=23632, f1=0.702023. I0922 12:12:12.333390 139722080012096 train.py:80] Step 13100: loss=10.325450, tp=16720, pos=23818, pred=23976, f1=0.699669. I0922 12:14:17.765394 139722080012096 train.py:80] Step 13200: loss=10.768884, tp=16096, pos=23264, pred=23512, f1=0.688216. I0922 12:16:20.972403 139722080012096 train.py:80] Step 13300: loss=10.602642, tp=16058, pos=23176, pred=23387, f1=0.689732. I0922 12:18:25.961522 139722080012096 train.py:80] Step 13400: loss=6.153967, tp=18889, pos=23652, pred=23780, f1=0.796467. I0922 12:20:32.000822 139722080012096 train.py:80] Step 13500: loss=6.669770, tp=18463, pos=23631, pred=23788, f1=0.778717. I0922 12:22:35.792887 139722080012096 train.py:80] Step 13600: loss=7.106067, tp=18045, pos=23563, pred=23664, f1=0.764182. I0922 12:24:40.209888 139722080012096 train.py:80] Step 13700: loss=7.869253, tp=17694, pos=23544, pred=23712, f1=0.748857. I0922 12:26:44.795868 139722080012096 train.py:80] Step 13800: loss=8.115555, tp=17118, pos=23281, pred=23444, f1=0.732713. I0922 12:28:49.515308 139722080012096 train.py:80] Step 13900: loss=8.423900, tp=17130, pos=23266, pred=23427, f1=0.733729. I0922 12:30:56.149018 139722080012096 train.py:80] Step 14000: loss=8.829604, tp=16883, pos=23333, pred=23497, f1=0.721034. I0922 12:33:06.591142 139722080012096 train.py:80] Step 14100: loss=9.139616, tp=16846, pos=23406, pred=23612, f1=0.716577. I0922 12:35:16.623991 139722080012096 train.py:80] Step 14200: loss=9.423518, tp=16775, pos=23567, pred=23697, f1=0.709843. I0922 12:37:26.652253 139722080012096 train.py:80] Step 14300: loss=9.330327, tp=16587, pos=23252, pred=23455, f1=0.710258. I0922 12:39:37.668104 139722080012096 train.py:80] Step 14400: loss=9.297591, tp=16781, pos=23312, pred=23523, f1=0.716601. I0922 12:41:48.514059 139722080012096 train.py:80] Step 14500: loss=5.272758, tp=19262, pos=23639, pred=23742, f1=0.813069. I0922 12:44:00.174637 139722080012096 train.py:80] Step 14600: loss=5.726021, tp=18703, pos=23381, pred=23494, f1=0.797995. I0922 12:46:10.772682 139722080012096 train.py:80] Step 14700: loss=6.541801, tp=17826, pos=23068, pred=23196, f1=0.770621. I0922 12:48:20.254348 139722080012096 train.py:80] Step 14800: loss=6.516363, tp=17876, pos=23063, pred=23248, f1=0.771998. I0922 12:50:32.069921 139722080012096 train.py:80] Step 14900: loss=7.088716, tp=18174, pos=23687, pred=23833, f1=0.764899. I0922 12:52:43.092682 139722080012096 train.py:80] Step 15000: loss=7.657199, tp=17960, pos=23736, pred=23884, f1=0.754305. I0922 12:54:51.367013 139722080012096 train.py:80] Step 15100: loss=7.849149, tp=17402, pos=23356, pred=23521, f1=0.742454. I0922 12:56:59.254962 139722080012096 train.py:80] Step 15200: loss=7.828219, tp=17277, pos=23344, pred=23498, f1=0.737671. I0922 12:59:06.851820 139722080012096 train.py:80] Step 15300: loss=8.446103, tp=17431, pos=23662, pred=23821, f1=0.734200. I0922 13:01:11.662403 139722080012096 train.py:80] Step 15400: loss=8.810841, tp=17243, pos=23591, pred=23771, f1=0.728136. I0922 13:03:18.137265 139722080012096 train.py:80] Step 15500: loss=7.727564, tp=17614, pos=23575, pred=23729, f1=0.744715. I0922 13:05:24.996394 139722080012096 train.py:80] Step 15600: loss=4.833294, tp=19973, pos=23959, pred=24077, f1=0.831585. I0922 13:07:29.602913 139722080012096 train.py:80] Step 15700: loss=5.416218, tp=19170, pos=23599, pred=23710, f1=0.810417. I0922 13:09:32.072685 139722080012096 train.py:80] Step 15800: loss=6.031333, tp=18458, pos=23255, pred=23351, f1=0.792087. I0922 13:11:34.813993 139722080012096 train.py:80] Step 15900: loss=5.835747, tp=18188, pos=22989, pred=23131, f1=0.788725. I0922 13:13:40.865267 139722080012096 train.py:80] Step 16000: loss=6.380317, tp=18741, pos=23807, pred=23926, f1=0.785243. I0922 13:15:42.904908 139722080012096 train.py:80] Step 16100: loss=6.522398, tp=17874, pos=23139, pred=23271, f1=0.770265. I0922 13:17:46.355884 139722080012096 train.py:80] Step 16200: loss=6.858916, tp=18135, pos=23616, pred=23767, f1=0.765464. I0922 13:19:48.945443 139722080012096 train.py:80] Step 16300: loss=7.284055, tp=17635, pos=23299, pred=23428, f1=0.754810. I0922 13:21:54.026263 139722080012096 train.py:80] Step 16400: loss=7.390332, tp=17857, pos=23551, pred=23747, f1=0.755085. I0922 13:23:58.065810 139722080012096 train.py:80] Step 16500: loss=7.476712, tp=17405, pos=23187, pred=23358, f1=0.747878. I0922 13:26:02.906382 139722080012096 train.py:80] Step 16600: loss=6.923453, tp=18383, pos=23766, pred=23889, f1=0.771504. I0922 13:28:06.327411 139722080012096 train.py:80] Step 16700: loss=4.485097, tp=19640, pos=23479, pred=23599, f1=0.834360. I0922 13:30:11.248630 139722080012096 train.py:80] Step 16800: loss=4.405707, tp=19675, pos=23583, pred=23653, f1=0.833051. I0922 13:32:16.437488 139722080012096 train.py:80] Step 16900: loss=5.272192, tp=19251, pos=23757, pred=23846, f1=0.808815. I0922 13:34:19.840769 139722080012096 train.py:80] Step 17000: loss=5.617792, tp=18560, pos=23128, pred=23267, f1=0.800086. I0922 13:36:26.574831 139722080012096 train.py:80] Step 17100: loss=5.687082, tp=18539, pos=23255, pred=23381, f1=0.795051. I0922 13:38:35.960380 139722080012096 train.py:80] Step 17200: loss=6.131705, tp=18261, pos=23330, pred=23486, f1=0.780118. I0922 13:40:45.822063 139722080012096 train.py:80] Step 17300: loss=6.392502, tp=18296, pos=23488, pred=23625, f1=0.776686. I0922 13:42:56.918607 139722080012096 train.py:80] Step 17400: loss=6.675230, tp=18147, pos=23516, pred=23664, f1=0.769267. I0922 13:45:07.460292 139722080012096 train.py:80] Step 17500: loss=6.889245, tp=17896, pos=23290, pred=23471, f1=0.765424. I0922 13:47:19.110583 139722080012096 train.py:80] Step 17600: loss=7.197365, tp=18051, pos=23639, pred=23743, f1=0.761935. I0922 13:49:31.119685 139722080012096 train.py:80] Step 17700: loss=5.880382, tp=18917, pos=23684, pred=23847, f1=0.795986. I0922 13:51:36.733765 139722080012096 train.py:80] Step 17800: loss=4.318046, tp=19683, pos=23444, pred=23531, f1=0.838020. I0922 13:53:41.245413 139722080012096 train.py:80] Step 17900: loss=4.383097, tp=19815, pos=23646, pred=23759, f1=0.835988. I0922 13:55:45.210705 139722080012096 train.py:80] Step 18000: loss=4.840162, tp=19359, pos=23525, pred=23597, f1=0.821654. I0922 13:57:48.045739 139722080012096 train.py:80] Step 18100: loss=4.993586, tp=19076, pos=23370, pred=23503, f1=0.813944. I0922 13:59:50.998870 139722080012096 train.py:80] Step 18200: loss=5.396644, tp=18836, pos=23379, pred=23528, f1=0.803121. I0922 14:01:54.812070 139722080012096 train.py:80] Step 18300: loss=5.525679, tp=18904, pos=23580, pred=23704, f1=0.799594. I0922 14:03:57.934076 139722080012096 train.py:80] Step 18400: loss=6.157397, tp=18266, pos=23352, pred=23465, f1=0.780315. I0922 14:05:59.227272 139722080012096 train.py:80] Step 18500: loss=6.136928, tp=18120, pos=23134, pred=23309, f1=0.780311. I0922 14:08:04.499522 139722080012096 train.py:80] Step 18600: loss=6.119025, tp=18739, pos=23798, pred=23912, f1=0.785538. I0922 14:10:07.688135 139722080012096 train.py:80] Step 18700: loss=6.273952, tp=18311, pos=23451, pred=23614, f1=0.778115. I0922 14:12:09.578925 139722080012096 train.py:80] Step 18800: loss=4.895768, tp=19286, pos=23431, pred=23502, f1=0.821852. I0922 14:14:11.890070 139722080012096 train.py:80] Step 18900: loss=3.613177, tp=20079, pos=23373, pred=23451, f1=0.857637. I0922 14:16:14.376280 139722080012096 train.py:80] Step 19000: loss=3.954219, tp=19773, pos=23266, pred=23368, f1=0.848008. I0922 14:18:17.202012 139722080012096 train.py:80] Step 19100: loss=4.842511, tp=19374, pos=23502, pred=23611, f1=0.822448. I0922 14:20:21.407144 139722080012096 train.py:80] Step 19200: loss=4.945406, tp=19563, pos=23803, pred=23906, f1=0.820097. I0922 14:22:23.298941 139722080012096 train.py:80] Step 19300: loss=5.060172, tp=19115, pos=23422, pred=23513, f1=0.814531. I0922 14:24:27.662974 139722080012096 train.py:80] Step 19400: loss=5.258329, tp=19020, pos=23537, pred=23646, f1=0.806223. I0922 14:26:31.268561 139722080012096 train.py:80] Step 19500: loss=5.362582, tp=19086, pos=23642, pred=23731, f1=0.805775. I0922 14:28:33.090588 139722080012096 train.py:80] Step 19600: loss=5.640242, tp=19069, pos=23750, pred=23831, f1=0.801538. I0922 14:30:36.335830 139722080012096 train.py:80] Step 19700: loss=6.035873, tp=18501, pos=23391, pred=23531, f1=0.788585. I0922 14:32:39.531512 139722080012096 train.py:80] Step 19800: loss=5.948133, tp=18504, pos=23480, pred=23627, f1=0.785616. I0922 14:34:40.578260 139722080012096 train.py:80] Step 19900: loss=4.503541, tp=19530, pos=23450, pred=23557, f1=0.830940. I0922 14:36:41.542894 139722080012096 train.py:80] Step 20000: loss=3.458437, tp=20363, pos=23507, pred=23584, f1=0.864836. I0922 14:38:40.860669 139722080012096 train.py:80] Step 20100: loss=3.595032, tp=20157, pos=23456, pred=23526, f1=0.858073. I0922 14:40:41.008285 139722080012096 train.py:80] Step 20200: loss=4.436750, tp=19708, pos=23531, pred=23611, f1=0.836112. I0922 14:42:44.690171 139722080012096 train.py:80] Step 20300: loss=4.704680, tp=19646, pos=23746, pred=23872, f1=0.825150. I0922 14:44:44.750712 139722080012096 train.py:80] Step 20400: loss=4.695894, tp=19282, pos=23350, pred=23452, f1=0.823982. I0922 14:46:45.357596 139722080012096 train.py:80] Step 20500: loss=4.665674, tp=19385, pos=23506, pred=23605, f1=0.822950. I0922 14:48:46.754239 139722080012096 train.py:80] Step 20600: loss=4.846766, tp=19386, pos=23609, pred=23717, f1=0.819254. I0922 14:50:46.144184 139722080012096 train.py:80] Step 20700: loss=5.241949, tp=18739, pos=23173, pred=23300, f1=0.806447. I0922 14:52:49.017377 139722080012096 train.py:80] Step 20800: loss=5.492993, tp=19057, pos=23719, pred=23861, f1=0.801051. I0922 14:54:49.965533 139722080012096 train.py:80] Step 20900: loss=5.359367, tp=19031, pos=23526, pred=23645, f1=0.806894. I0922 14:56:52.826578 139722080012096 train.py:80] Step 21000: loss=3.917202, tp=19801, pos=23321, pred=23401, f1=0.847609. I0922 14:58:55.844699 139722080012096 train.py:80] Step 21100: loss=3.157549, tp=20834, pos=23768, pred=23858, f1=0.874900. I0922 15:00:58.311979 139722080012096 train.py:80] Step 21200: loss=3.510983, tp=20498, pos=23659, pred=23741, f1=0.864895. I0922 15:03:01.915463 139722080012096 train.py:80] Step 21300: loss=3.902180, tp=20216, pos=23751, pred=23822, f1=0.849894. I0922 15:05:05.531804 139722080012096 train.py:80] Step 21400: loss=4.464726, tp=19517, pos=23405, pred=23537, f1=0.831537. I0922 15:07:08.592176 139722080012096 train.py:80] Step 21500: loss=4.323597, tp=19661, pos=23482, pred=23574, f1=0.835643. I0922 15:09:09.948727 139722080012096 train.py:80] Step 21600: loss=4.479926, tp=19477, pos=23407, pred=23515, f1=0.830186. I0922 15:11:11.995744 139722080012096 train.py:80] Step 21700: loss=4.798186, tp=19490, pos=23782, pred=23855, f1=0.818272. I0922 15:13:12.577519 139722080012096 train.py:80] Step 21800: loss=4.996112, tp=18887, pos=23241, pred=23336, f1=0.811001. I0922 15:15:12.487209 139722080012096 train.py:80] Step 21900: loss=5.339246, tp=18941, pos=23490, pred=23593, f1=0.804579. I0922 15:17:11.870138 139722080012096 train.py:80] Step 22000: loss=5.173669, tp=18805, pos=23298, pred=23420, f1=0.805043. I0922 15:19:10.656210 139722080012096 train.py:80] Step 22100: loss=3.572815, tp=20093, pos=23275, pred=23349, f1=0.861917. I0922 15:21:10.699641 139722080012096 train.py:80] Step 22200: loss=3.050048, tp=20540, pos=23340, pred=23413, f1=0.878660. I0922 15:23:12.415675 139722080012096 train.py:80] Step 22300: loss=3.323514, tp=20657, pos=23744, pred=23820, f1=0.868598. I0922 15:25:13.360054 139722080012096 train.py:80] Step 22400: loss=3.506305, tp=20472, pos=23710, pred=23772, f1=0.862306. I0922 15:27:14.570581 139722080012096 train.py:80] Step 22500: loss=3.884948, tp=20345, pos=23913, pred=24008, f1=0.849106. I0922 15:29:16.694478 139722080012096 train.py:80] Step 22600: loss=4.367197, tp=20092, pos=24003, pred=24101, f1=0.835357. I0922 15:31:15.456084 139722080012096 train.py:80] Step 22700: loss=4.414113, tp=19412, pos=23291, pred=23397, f1=0.831563. I0922 15:33:14.635320 139722080012096 train.py:80] Step 22800: loss=4.310346, tp=19667, pos=23512, pred=23620, f1=0.834550. I0922 15:35:13.796667 139722080012096 train.py:80] Step 22900: loss=4.554974, tp=19236, pos=23212, pred=23317, f1=0.826839. I0922 15:37:12.435021 139722080012096 train.py:80] Step 23000: loss=4.852726, tp=19244, pos=23465, pred=23553, f1=0.818580. I0922 15:39:09.090616 139722080012096 train.py:80] Step 23100: loss=4.917673, tp=18775, pos=23125, pred=23206, f1=0.810472. I0922 15:41:07.454044 139722080012096 train.py:80] Step 23200: loss=3.134142, tp=20407, pos=23293, pred=23355, f1=0.874936. I0922 15:43:04.690736 139722080012096 train.py:80] Step 23300: loss=2.959489, tp=20666, pos=23362, pred=23422, f1=0.883464. I0922 15:45:02.614583 139722080012096 train.py:80] Step 23400: loss=3.304187, tp=20634, pos=23673, pred=23716, f1=0.870835. I0922 15:47:00.418276 139722080012096 train.py:80] Step 23500: loss=3.459288, tp=20253, pos=23444, pred=23511, f1=0.862656. I0922 15:49:00.315401 139722080012096 train.py:80] Step 23600: loss=3.806328, tp=20470, pos=23966, pred=24056, f1=0.852526. I0922 15:50:57.685954 139722080012096 train.py:80] Step 23700: loss=3.996167, tp=19732, pos=23289, pred=23388, f1=0.845470. I0922 15:52:57.215427 139722080012096 train.py:80] Step 23800: loss=4.023884, tp=20131, pos=23788, pred=23866, f1=0.844882. I0922 15:54:56.220336 139722080012096 train.py:80] Step 23900: loss=4.262775, tp=19841, pos=23647, pred=23726, f1=0.837650. I0922 15:56:55.835795 139722080012096 train.py:80] Step 24000: loss=4.252493, tp=19932, pos=23719, pred=23814, f1=0.838659. I0922 15:58:54.256676 139722080012096 train.py:80] Step 24100: loss=4.484764, tp=19358, pos=23378, pred=23458, f1=0.826629. I0922 16:00:51.861639 139722080012096 train.py:80] Step 24200: loss=4.599566, tp=19204, pos=23137, pred=23226, f1=0.828419. I0922 16:02:50.444095 139722080012096 train.py:80] Step 24300: loss=2.870588, tp=20878, pos=23511, pred=23572, f1=0.886859. I0922 16:04:47.141310 139722080012096 train.py:80] Step 24400: loss=2.776432, tp=20685, pos=23314, pred=23386, f1=0.885867. I0922 16:06:45.043409 139722080012096 train.py:80] Step 24500: loss=3.295347, tp=20313, pos=23375, pred=23426, f1=0.868058. I0922 16:08:43.451899 139722080012096 train.py:80] Step 24600: loss=3.424504, tp=20336, pos=23503, pred=23569, f1=0.864038. I0922 16:10:42.749999 139722080012096 train.py:80] Step 24700: loss=3.587761, tp=20419, pos=23714, pred=23823, f1=0.859078. I0922 16:12:40.964488 139722080012096 train.py:80] Step 24800: loss=3.767910, tp=20022, pos=23432, pred=23492, f1=0.853380. I0922 16:14:39.487301 139722080012096 train.py:80] Step 24900: loss=3.947854, tp=19924, pos=23500, pred=23600, f1=0.846030. I0922 16:16:37.458528 139722080012096 train.py:80] Step 25000: loss=3.973139, tp=19833, pos=23429, pred=23507, f1=0.845108. I0922 16:18:35.195328 139722080012096 train.py:80] Step 25100: loss=4.061563, tp=19748, pos=23444, pred=23544, f1=0.840555. I0922 16:20:32.207675 139722080012096 train.py:80] Step 25200: loss=4.291134, tp=19684, pos=23505, pred=23598, f1=0.835785. I0922 16:22:31.426050 139722080012096 train.py:80] Step 25300: loss=4.127831, tp=20178, pos=24011, pred=24086, f1=0.839054. I0922 16:24:29.182976 139722080012096 train.py:80] Step 25400: loss=2.432455, tp=21194, pos=23521, pred=23570, f1=0.900130. I0922 16:26:28.326140 139722080012096 train.py:80] Step 25500: loss=2.399236, tp=21481, pos=23787, pred=23826, f1=0.902317. I0922 16:28:24.903785 139722080012096 train.py:80] Step 25600: loss=2.692479, tp=20568, pos=23209, pred=23261, f1=0.885216. I0922 16:30:22.082204 139722080012096 train.py:80] Step 25700: loss=3.181994, tp=20450, pos=23446, pred=23524, f1=0.870769. I0922 16:32:21.844676 139722080012096 train.py:80] Step 25800: loss=3.379774, tp=20291, pos=23471, pred=23550, f1=0.863061. I0922 16:34:19.365159 139722080012096 train.py:80] Step 25900: loss=3.520479, tp=20308, pos=23530, pred=23599, f1=0.861805. I0922 16:36:15.368479 139722080012096 train.py:80] Step 26000: loss=4.032663, tp=19896, pos=23487, pred=23576, f1=0.845505. I0922 16:38:11.647282 139722080012096 train.py:80] Step 26100: loss=4.354989, tp=19460, pos=23373, pred=23456, f1=0.831109. I0922 16:40:10.563739 139722080012096 train.py:80] Step 26200: loss=4.164825, tp=20128, pos=23909, pred=23999, f1=0.840277. I0922 16:42:07.133680 139722080012096 train.py:80] Step 26300: loss=4.110914, tp=19667, pos=23393, pred=23511, f1=0.838607. I0922 16:44:04.625323 139722080012096 train.py:80] Step 26400: loss=4.255870, tp=19850, pos=23602, pred=23707, f1=0.839164. I0922 16:46:01.926784 139722080012096 train.py:80] Step 26500: loss=2.318759, tp=21321, pos=23597, pred=23627, f1=0.902973. I0922 16:47:58.290596 139722080012096 train.py:80] Step 26600: loss=2.400992, tp=21330, pos=23538, pred=23595, f1=0.905098. I0922 16:49:58.350761 139722080012096 train.py:80] Step 26700: loss=2.756897, tp=21248, pos=23889, pred=23978, f1=0.887793. I0922 16:51:54.524178 139722080012096 train.py:80] Step 26800: loss=2.995750, tp=20542, pos=23417, pred=23469, f1=0.876253. I0922 16:53:50.478338 139722080012096 train.py:80] Step 26900: loss=3.091266, tp=20417, pos=23321, pred=23402, f1=0.873959. I0922 16:55:47.941388 139722080012096 train.py:80] Step 27000: loss=3.278067, tp=20421, pos=23474, pred=23556, f1=0.868424. I0922 16:57:44.772850 139722080012096 train.py:80] Step 27100: loss=3.553158, tp=20189, pos=23458, pred=23561, f1=0.858759. I0922 16:59:42.190196 139722080012096 train.py:80] Step 27200: loss=3.832465, tp=20293, pos=23748, pred=23827, f1=0.853095. I0922 17:01:40.541589 139722080012096 train.py:80] Step 27300: loss=3.743458, tp=20156, pos=23669, pred=23773, f1=0.849711. I0922 17:03:36.769371 139722080012096 train.py:80] Step 27400: loss=3.885659, tp=19747, pos=23306, pred=23396, f1=0.845660. I0922 17:05:33.133034 139722080012096 train.py:80] Step 27500: loss=3.711789, tp=19969, pos=23413, pred=23488, f1=0.851538. I0922 17:07:30.683795 139722080012096 train.py:80] Step 27600: loss=2.258423, tp=21590, pos=23742, pred=23800, f1=0.908250. I0922 17:09:27.808927 139722080012096 train.py:80] Step 27700: loss=2.303616, tp=21413, pos=23633, pred=23697, f1=0.904838. I0922 17:11:25.878992 139722080012096 train.py:80] Step 27800: loss=2.567873, tp=21073, pos=23554, pred=23619, f1=0.893435. I0922 17:13:23.274307 139722080012096 train.py:80] Step 27900: loss=2.907984, tp=20887, pos=23670, pred=23739, f1=0.881141. I0922 17:15:19.967203 139722080012096 train.py:80] Step 28000: loss=3.145130, tp=20434, pos=23432, pred=23495, f1=0.870885. I0922 17:17:17.028085 139722080012096 train.py:80] Step 28100: loss=3.362016, tp=20245, pos=23351, pred=23456, f1=0.865042. I0922 17:19:12.510951 139722080012096 train.py:80] Step 28200: loss=3.411171, tp=20106, pos=23281, pred=23343, f1=0.862474. I0922 17:21:10.256052 139722080012096 train.py:80] Step 28300: loss=3.783188, tp=20287, pos=23669, pred=23737, f1=0.855883. I0922 17:23:07.965845 139722080012096 train.py:80] Step 28400: loss=3.724092, tp=20109, pos=23519, pred=23568, f1=0.854121. I0922 17:25:05.913057 139722080012096 train.py:80] Step 28500: loss=3.683648, tp=20241, pos=23665, pred=23739, f1=0.853979. I0922 17:27:01.163508 139722080012096 train.py:80] Step 28600: loss=3.357618, tp=20199, pos=23264, pred=23316, f1=0.867282. I0922 17:28:59.106593 139722080012096 train.py:80] Step 28700: loss=2.241036, tp=21770, pos=23909, pred=23958, f1=0.909604. I0922 17:30:55.330132 139722080012096 train.py:80] Step 28800: loss=2.414306, tp=20967, pos=23269, pred=23313, f1=0.900219. I0922 17:32:52.875425 139722080012096 train.py:80] Step 28900: loss=2.341939, tp=21446, pos=23676, pred=23720, f1=0.904971. I0922 17:34:51.778535 139722080012096 train.py:80] Step 29000: loss=2.761002, tp=21019, pos=23686, pred=23748, f1=0.886242. I0922 17:36:48.382598 139722080012096 train.py:80] Step 29100: loss=3.141259, tp=20525, pos=23479, pred=23563, f1=0.872624. I0922 17:38:46.310171 139722080012096 train.py:80] Step 29200: loss=3.216306, tp=20675, pos=23713, pred=23783, f1=0.870600. I0922 17:40:43.088771 139722080012096 train.py:80] Step 29300: loss=3.311541, tp=20291, pos=23387, pred=23454, f1=0.866378. I0922 17:42:40.772479 139722080012096 train.py:80] Step 29400: loss=3.476274, tp=20199, pos=23403, pred=23489, f1=0.861512. I0922 17:44:36.902869 139722080012096 train.py:80] Step 29500: loss=3.607994, tp=19931, pos=23260, pred=23344, f1=0.855334. I0922 17:46:34.878532 139722080012096 train.py:80] Step 29600: loss=3.446960, tp=20384, pos=23573, pred=23657, f1=0.863180. I0922 17:48:32.967514 139722080012096 train.py:80] Step 29700: loss=3.190063, tp=20696, pos=23665, pred=23729, f1=0.873359. I0922 17:50:29.804758 139722080012096 train.py:80] Step 29800: loss=2.184171, tp=21151, pos=23272, pred=23328, f1=0.907768. I0922 17:52:27.410047 139722080012096 train.py:80] Step 29900: loss=2.181815, tp=21564, pos=23707, pred=23746, f1=0.908857. I0922 17:54:23.319416 139722080012096 train.py:80] Step 30000: loss=2.362074, tp=21073, pos=23372, pred=23409, f1=0.900921. I0922 17:56:21.328358 139722080012096 train.py:80] Step 30100: loss=2.685347, tp=21104, pos=23671, pred=23729, f1=0.890464. I0922 17:58:18.940948 139722080012096 train.py:80] Step 30200: loss=2.762422, tp=20916, pos=23576, pred=23625, f1=0.886252. I0922 18:00:16.053990 139722080012096 train.py:80] Step 30300: loss=2.909456, tp=20744, pos=23487, pred=23537, f1=0.882273. I0922 18:02:12.294785 139722080012096 train.py:80] Step 30400: loss=3.155341, tp=20401, pos=23434, pred=23497, f1=0.869404. I0922 18:04:08.900497 139722080012096 train.py:80] Step 30500: loss=3.243251, tp=20451, pos=23452, pred=23509, f1=0.870978. I0922 18:06:07.861154 139722080012096 train.py:80] Step 30600: loss=3.440232, tp=20493, pos=23795, pred=23870, f1=0.859876. I0922 18:08:06.850351 139722080012096 train.py:80] Step 30700: loss=3.564115, tp=20474, pos=23681, pred=23757, f1=0.863190. I0922 18:10:05.131218 139722080012096 train.py:80] Step 30800: loss=2.903747, tp=20912, pos=23704, pred=23759, f1=0.881192. I0922 18:12:03.755684 139722080012096 train.py:80] Step 30900: loss=2.031064, tp=21587, pos=23546, pred=23598, f1=0.915790. I0922 18:13:58.765058 139722080012096 train.py:80] Step 31000: loss=2.256378, tp=20984, pos=23143, pred=23203, f1=0.905537. I0922 18:15:56.248424 139722080012096 train.py:80] Step 31100: loss=2.436910, tp=21235, pos=23620, pred=23689, f1=0.897715. I0922 18:17:52.681980 139722080012096 train.py:80] Step 31200: loss=2.758680, tp=20804, pos=23390, pred=23459, f1=0.888130. I0922 18:19:48.756643 139722080012096 train.py:80] Step 31300: loss=2.811844, tp=20701, pos=23454, pred=23474, f1=0.882245. I0922 18:21:44.802607 139722080012096 train.py:80] Step 31400: loss=2.819270, tp=20847, pos=23583, pred=23657, f1=0.882599. I0922 18:23:42.494478 139722080012096 train.py:80] Step 31500: loss=3.013733, tp=20732, pos=23591, pred=23664, f1=0.877452. I0922 18:25:39.604561 139722080012096 train.py:80] Step 31600: loss=3.252626, tp=20429, pos=23515, pred=23583, f1=0.867510. I0922 18:27:37.990322 139722080012096 train.py:80] Step 31700: loss=3.373880, tp=20637, pos=23753, pred=23855, f1=0.866955. I0922 18:29:34.454563 139722080012096 train.py:80] Step 31800: loss=3.093449, tp=20647, pos=23618, pred=23699, f1=0.872710. I0922 18:31:29.891309 139722080012096 train.py:80] Step 31900: loss=2.637674, tp=20821, pos=23313, pred=23371, f1=0.891997. I0922 18:33:29.052736 139722080012096 train.py:80] Step 32000: loss=1.890607, tp=21957, pos=23802, pred=23841, f1=0.921730. I0922 18:35:24.548783 139722080012096 train.py:80] Step 32100: loss=2.016861, tp=21268, pos=23297, pred=23332, f1=0.912222. I0922 18:37:22.265837 139722080012096 train.py:80] Step 32200: loss=2.104430, tp=21395, pos=23523, pred=23553, f1=0.908956. I0922 18:39:20.207607 139722080012096 train.py:80] Step 32300: loss=2.438707, tp=21119, pos=23517, pred=23594, f1=0.896563. I0922 18:41:19.012568 139722080012096 train.py:80] Step 32400: loss=2.935081, tp=21056, pos=23793, pred=23853, f1=0.883852. I0922 18:43:17.207766 139722080012096 train.py:80] Step 32500: loss=3.017653, tp=20734, pos=23599, pred=23669, f1=0.877295. I0922 18:45:14.218416 139722080012096 train.py:80] Step 32600: loss=2.935437, tp=20666, pos=23511, pred=23588, f1=0.877556. I0922 18:47:11.044766 139722080012096 train.py:80] Step 32700: loss=3.160859, tp=20512, pos=23477, pred=23540, f1=0.872535. I0922 18:49:07.378190 139722080012096 train.py:80] Step 32800: loss=3.147726, tp=20521, pos=23446, pred=23510, f1=0.874052. I0922 18:51:05.031163 139722080012096 train.py:80] Step 32900: loss=3.082069, tp=20773, pos=23648, pred=23720, f1=0.877090. I0922 18:53:02.775974 139722080012096 train.py:80] Step 33000: loss=2.493605, tp=21176, pos=23538, pred=23577, f1=0.898907. I0922 18:54:59.274927 139722080012096 train.py:80] Step 33100: loss=1.852720, tp=21741, pos=23622, pred=23652, f1=0.919787. I0922 18:56:56.723226 139722080012096 train.py:80] Step 33200: loss=1.858671, tp=21819, pos=23665, pred=23699, f1=0.921333. I0922 18:58:54.575496 139722080012096 train.py:80] Step 33300: loss=2.175428, tp=21575, pos=23718, pred=23738, f1=0.909263. I0922 19:00:52.558569 139722080012096 train.py:80] Step 33400: loss=2.532050, tp=21221, pos=23685, pred=23725, f1=0.895212. I0922 19:02:50.225729 139722080012096 train.py:80] Step 33500: loss=2.688211, tp=20965, pos=23565, pred=23632, f1=0.888404. I0922 19:04:46.411787 139722080012096 train.py:80] Step 33600: loss=2.811771, tp=20846, pos=23480, pred=23531, f1=0.886856. I0922 19:06:42.800776 139722080012096 train.py:80] Step 33700: loss=2.895598, tp=20590, pos=23367, pred=23422, f1=0.880121. I0922 19:08:39.292445 139722080012096 train.py:80] Step 33800: loss=2.974457, tp=20446, pos=23335, pred=23391, f1=0.875144. I0922 19:10:36.564690 139722080012096 train.py:80] Step 33900: loss=3.057090, tp=20555, pos=23454, pred=23513, f1=0.875295. I0922 19:12:34.697710 139722080012096 train.py:80] Step 34000: loss=3.208687, tp=20709, pos=23732, pred=23810, f1=0.871188. I0922 19:14:31.148051 139722080012096 train.py:80] Step 34100: loss=2.360720, tp=21240, pos=23442, pred=23475, f1=0.905429. I0922 19:16:27.072999 139722080012096 train.py:80] Step 34200: loss=1.751135, tp=21729, pos=23481, pred=23535, f1=0.924324. I0922 19:18:22.858381 139722080012096 train.py:80] Step 34300: loss=2.055458, tp=21354, pos=23367, pred=23415, f1=0.912915. I0922 19:20:21.315719 139722080012096 train.py:80] Step 34400: loss=2.014383, tp=21540, pos=23556, pred=23588, f1=0.913796. I0922 19:22:17.270477 139722080012096 train.py:80] Step 34500: loss=2.370240, tp=21080, pos=23390, pred=23455, f1=0.899989. I0922 19:24:15.284617 139722080012096 train.py:80] Step 34600: loss=2.575506, tp=21254, pos=23770, pred=23831, f1=0.893006. I0922 19:26:14.398033 139722080012096 train.py:80] Step 34700: loss=2.814119, tp=21114, pos=23786, pred=23879, f1=0.885933. I0922 19:28:12.207666 139722080012096 train.py:80] Step 34800: loss=2.751083, tp=20985, pos=23580, pred=23646, f1=0.888705. I0922 19:30:07.569925 139722080012096 train.py:80] Step 34900: loss=2.821998, tp=20792, pos=23434, pred=23485, f1=0.886293. I0922 19:32:04.030603 139722080012096 train.py:80] Step 35000: loss=2.919215, tp=20769, pos=23538, pred=23608, f1=0.881050. I0922 19:34:01.581532 139722080012096 train.py:80] Step 35100: loss=2.847411, tp=20804, pos=23555, pred=23616, f1=0.882067. I0922 19:35:57.048288 139722080012096 train.py:80] Step 35200: loss=2.090442, tp=21331, pos=23333, pred=23387, f1=0.913142. I0922 19:37:52.562891 139722080012096 train.py:80] Step 35300: loss=1.842545, tp=21638, pos=23417, pred=23465, f1=0.923083. I0922 19:39:49.170089 139722080012096 train.py:80] Step 35400: loss=2.019406, tp=21442, pos=23507, pred=23549, f1=0.911340. I0922 19:41:46.515129 139722080012096 train.py:80] Step 35500: loss=2.055690, tp=21660, pos=23673, pred=23720, f1=0.914059. I0922 19:43:43.475849 139722080012096 train.py:80] Step 35600: loss=2.195783, tp=21515, pos=23655, pred=23698, f1=0.908707. I0922 19:45:41.060224 139722080012096 train.py:80] Step 35700: loss=2.551981, tp=21200, pos=23707, pred=23758, f1=0.893290. I0922 19:47:37.428096 139722080012096 train.py:80] Step 35800: loss=2.737623, tp=20922, pos=23612, pred=23681, f1=0.884782. I0922 19:49:34.151488 139722080012096 train.py:80] Step 35900: loss=2.753441, tp=21000, pos=23660, pred=23694, f1=0.886937. I0922 19:51:31.604121 139722080012096 train.py:80] Step 36000: loss=2.874670, tp=20752, pos=23524, pred=23579, f1=0.881133. I0922 19:53:27.273145 139722080012096 train.py:80] Step 36100: loss=2.738421, tp=20740, pos=23388, pred=23451, f1=0.885587. I0922 19:55:24.078660 139722080012096 train.py:80] Step 36200: loss=2.757435, tp=20939, pos=23650, pred=23699, f1=0.884454. I0922 19:57:20.009840 139722080012096 train.py:80] Step 36300: loss=1.743726, tp=21781, pos=23551, pred=23590, f1=0.924079. I0922 19:59:15.614767 139722080012096 train.py:80] Step 36400: loss=1.634340, tp=21613, pos=23267, pred=23305, f1=0.928154. I0922 20:01:13.214782 139722080012096 train.py:80] Step 36500: loss=1.890989, tp=21785, pos=23659, pred=23697, f1=0.920052. I0922 20:03:10.103165 139722080012096 train.py:80] Step 36600: loss=2.108875, tp=21543, pos=23633, pred=23701, f1=0.910255. I0922 20:05:06.807495 139722080012096 train.py:80] Step 36700: loss=2.193000, tp=21241, pos=23416, pred=23457, f1=0.906321. I0922 20:07:03.406619 139722080012096 train.py:80] Step 36800: loss=2.426746, tp=21217, pos=23593, pred=23662, f1=0.897979. I0922 20:08:59.882698 139722080012096 train.py:80] Step 36900: loss=2.704317, tp=21168, pos=23680, pred=23737, f1=0.892844. I0922 20:10:54.686652 139722080012096 train.py:80] Step 37000: loss=2.609244, tp=20882, pos=23365, pred=23413, f1=0.892813. I0922 20:12:51.437878 139722080012096 train.py:80] Step 37100: loss=2.742430, tp=21086, pos=23686, pred=23752, f1=0.888992. I0922 20:14:46.446369 139722080012096 train.py:80] Step 37200: loss=2.686276, tp=20758, pos=23411, pred=23461, f1=0.885731. I0922 20:16:44.399066 139722080012096 train.py:80] Step 37300: loss=2.901816, tp=21048, pos=23828, pred=23892, f1=0.882146. I0922 20:18:40.948658 139722080012096 train.py:80] Step 37400: loss=1.731014, tp=21890, pos=23635, pred=23665, f1=0.925581. I0922 20:20:37.973158 139722080012096 train.py:80] Step 37500: loss=1.706668, tp=22074, pos=23703, pred=23731, f1=0.930725. I0922 20:22:35.012994 139722080012096 train.py:80] Step 37600: loss=1.616280, tp=21977, pos=23617, pred=23652, f1=0.929869. I0922 20:24:30.936511 139722080012096 train.py:80] Step 37700: loss=2.035565, tp=21463, pos=23478, pred=23519, f1=0.913377. I0922 20:26:27.404545 139722080012096 train.py:80] Step 37800: loss=2.421985, tp=21234, pos=23568, pred=23632, f1=0.899746. I0922 20:28:21.690614 139722080012096 train.py:80] Step 37900: loss=2.463049, tp=20702, pos=23106, pred=23170, f1=0.894719. I0922 20:30:19.263201 139722080012096 train.py:80] Step 38000: loss=2.546905, tp=21245, pos=23709, pred=23775, f1=0.894828. I0922 20:32:15.648373 139722080012096 train.py:80] Step 38100: loss=2.509341, tp=21091, pos=23577, pred=23624, f1=0.893668. I0922 20:34:11.544095 139722080012096 train.py:80] Step 38200: loss=2.645018, tp=20983, pos=23574, pred=23618, f1=0.889261. I0922 20:36:08.941649 139722080012096 train.py:80] Step 38300: loss=2.769072, tp=21203, pos=23817, pred=23897, f1=0.888754. I0922 20:38:03.740140 139722080012096 train.py:80] Step 38400: loss=2.583583, tp=20801, pos=23326, pred=23363, f1=0.891045. I0922 20:40:02.021755 139722080012096 train.py:80] Step 38500: loss=1.788088, tp=21889, pos=23596, pred=23633, f1=0.926930. I0922 20:41:58.158561 139722080012096 train.py:80] Step 38600: loss=1.614459, tp=22070, pos=23620, pred=23658, f1=0.933627. I0922 20:43:53.981730 139722080012096 train.py:80] Step 38700: loss=1.852038, tp=21645, pos=23478, pred=23528, f1=0.920946. I0922 20:45:49.237992 139722080012096 train.py:80] Step 38800: loss=1.915086, tp=21507, pos=23395, pred=23428, f1=0.918651. I0922 20:47:45.839364 139722080012096 train.py:80] Step 38900: loss=2.309537, tp=21309, pos=23538, pred=23606, f1=0.903996. I0922 20:49:42.754822 139722080012096 train.py:80] Step 39000: loss=2.264461, tp=21558, pos=23770, pred=23803, f1=0.906312. I0922 20:51:40.769817 139722080012096 train.py:80] Step 39100: loss=2.439788, tp=21390, pos=23746, pred=23796, f1=0.899836. I0922 20:53:35.789091 139722080012096 train.py:80] Step 39200: loss=2.479535, tp=20766, pos=23172, pred=23206, f1=0.895511. I0922 20:55:31.687414 139722080012096 train.py:80] Step 39300: loss=2.403602, tp=21155, pos=23507, pred=23544, f1=0.899237. I0922 20:57:27.371106 139722080012096 train.py:80] Step 39400: loss=2.351266, tp=21356, pos=23676, pred=23731, f1=0.900964. I0922 20:59:22.641347 139722080012096 train.py:80] Step 39500: loss=2.410857, tp=21171, pos=23604, pred=23645, f1=0.896146. I0922 21:01:18.023620 139722080012096 train.py:80] Step 39600: loss=1.618961, tp=21753, pos=23367, pred=23392, f1=0.930431. I0922 21:03:15.379673 139722080012096 train.py:80] Step 39700: loss=1.724146, tp=22023, pos=23685, pred=23708, f1=0.929378. I0922 21:05:12.380249 139722080012096 train.py:80] Step 39800: loss=1.834786, tp=21709, pos=23530, pred=23575, f1=0.921728. I0922 21:07:07.806297 139722080012096 train.py:80] Step 39900: loss=2.122008, tp=21362, pos=23472, pred=23515, f1=0.909273. I0922 21:09:04.518204 139722080012096 train.py:80] Step 40000: loss=2.303140, tp=21394, pos=23691, pred=23728, f1=0.902339. I0922 21:11:01.199511 139722080012096 train.py:80] Step 40100: loss=2.267313, tp=21440, pos=23705, pred=23746, f1=0.903669. I0922 21:12:58.330373 139722080012096 train.py:80] Step 40200: loss=2.289715, tp=21295, pos=23530, pred=23585, f1=0.903958. I0922 21:14:55.144844 139722080012096 train.py:80] Step 40300: loss=2.432203, tp=21252, pos=23639, pred=23692, f1=0.898016. I0922 21:16:50.457982 139722080012096 train.py:80] Step 40400: loss=2.437162, tp=21086, pos=23426, pred=23469, f1=0.899286. I0922 21:18:46.349906 139722080012096 train.py:80] Step 40500: loss=2.451800, tp=21136, pos=23528, pred=23593, f1=0.897095. I0922 21:20:42.624283 139722080012096 train.py:80] Step 40600: loss=2.417839, tp=21358, pos=23689, pred=23730, f1=0.900820. I0922 21:22:40.160539 139722080012096 train.py:80] Step 40700: loss=1.488959, tp=22233, pos=23705, pred=23728, f1=0.937449. I0922 21:24:34.860013 139722080012096 train.py:80] Step 40800: loss=1.544930, tp=21707, pos=23308, pred=23333, f1=0.930812. I0922 21:26:33.285969 139722080012096 train.py:80] Step 40900: loss=1.816422, tp=22047, pos=23875, pred=23926, f1=0.922449. I0922 21:28:30.132064 139722080012096 train.py:80] Step 41000: loss=1.881417, tp=21948, pos=23822, pred=23844, f1=0.920908. I0922 21:30:26.319512 139722080012096 train.py:80] Step 41100: loss=2.184518, tp=21417, pos=23520, pred=23560, f1=0.909813. I0922 21:32:21.778195 139722080012096 train.py:80] Step 41200: loss=2.281538, tp=21166, pos=23376, pred=23424, f1=0.904530. I0922 21:34:18.051622 139722080012096 train.py:80] Step 41300: loss=2.297533, tp=21265, pos=23536, pred=23588, f1=0.902513. I0922 21:36:14.068974 139722080012096 train.py:80] Step 41400: loss=2.269145, tp=21148, pos=23402, pred=23454, f1=0.902681. I0922 21:38:09.704584 139722080012096 train.py:80] Step 41500: loss=2.307619, tp=21274, pos=23532, pred=23606, f1=0.902626. I0922 21:40:06.020519 139722080012096 train.py:80] Step 41600: loss=2.427654, tp=21132, pos=23522, pred=23566, f1=0.897554. I0922 21:42:01.573604 139722080012096 train.py:80] Step 41700: loss=2.362397, tp=21163, pos=23392, pred=23439, f1=0.903803. I0922 21:43:59.839003 139722080012096 train.py:80] Step 41800: loss=1.580474, tp=22227, pos=23777, pred=23814, f1=0.934084. I0922 21:45:55.072053 139722080012096 train.py:80] Step 41900: loss=1.389283, tp=21865, pos=23284, pred=23316, f1=0.938412. I0922 21:47:51.341249 139722080012096 train.py:80] Step 42000: loss=1.684796, tp=21827, pos=23498, pred=23523, f1=0.928394. I0922 21:49:49.576179 139722080012096 train.py:80] Step 42100: loss=1.831985, tp=21871, pos=23695, pred=23734, f1=0.922263. I0922 21:51:45.699917 139722080012096 train.py:80] Step 42200: loss=2.027069, tp=21617, pos=23627, pred=23680, f1=0.913903. I0922 21:53:41.597305 139722080012096 train.py:80] Step 42300: loss=2.365096, tp=21079, pos=23408, pred=23462, f1=0.899467. I0922 21:55:37.689641 139722080012096 train.py:80] Step 42400: loss=2.161641, tp=21407, pos=23557, pred=23619, f1=0.907538. I0922 21:57:35.609071 139722080012096 train.py:80] Step 42500: loss=2.382056, tp=21492, pos=23767, pred=23808, f1=0.903500. I0922 21:59:30.257740 139722080012096 train.py:80] Step 42600: loss=2.219754, tp=21209, pos=23388, pred=23402, f1=0.906561. I0922 22:01:27.654164 139722080012096 train.py:80] Step 42700: loss=2.346234, tp=21540, pos=23809, pred=23846, f1=0.903997. I0922 22:03:23.117834 139722080012096 train.py:80] Step 42800: loss=2.429908, tp=21079, pos=23345, pred=23417, f1=0.901544. I0922 22:05:18.117623 139722080012096 train.py:80] Step 42900: loss=1.600518, tp=21883, pos=23471, pred=23493, f1=0.931905. I0922 22:07:15.399984 139722080012096 train.py:80] Step 43000: loss=1.821340, tp=21939, pos=23651, pred=23680, f1=0.927046. I0922 22:09:13.315048 139722080012096 train.py:80] Step 43100: loss=1.708589, tp=22144, pos=23878, pred=23903, f1=0.926896. I0922 22:11:08.838260 139722080012096 train.py:80] Step 43200: loss=1.901294, tp=21569, pos=23388, pred=23419, f1=0.921614. I0922 22:13:03.795510 139722080012096 train.py:80] Step 43300: loss=2.394507, tp=20981, pos=23309, pred=23355, f1=0.899237. I0922 22:14:58.849686 139722080012096 train.py:80] Step 43400: loss=1.990914, tp=21356, pos=23343, pred=23360, f1=0.914545. I0922 22:16:57.224859 139722080012096 train.py:80] Step 43500: loss=2.035816, tp=21735, pos=23795, pred=23839, f1=0.912583. I0922 22:18:51.862408 139722080012096 train.py:80] Step 43600: loss=2.067438, tp=21231, pos=23268, pred=23298, f1=0.911867. I0922 22:20:48.841709 139722080012096 train.py:80] Step 43700: loss=2.262154, tp=21484, pos=23710, pred=23754, f1=0.905276. I0922 22:22:45.219715 139722080012096 train.py:80] Step 43800: loss=2.272359, tp=21428, pos=23642, pred=23689, f1=0.905453. I0922 22:24:43.817417 139722080012096 train.py:80] Step 43900: loss=1.843738, tp=22133, pos=23931, pred=23965, f1=0.924211. I0922 22:26:40.186981 139722080012096 train.py:80] Step 44000: loss=1.289251, tp=22334, pos=23625, pred=23648, f1=0.944895. I0922 22:28:37.043147 139722080012096 train.py:80] Step 44100: loss=1.361315, tp=22218, pos=23615, pred=23652, f1=0.940106. I0922 22:30:32.717700 139722080012096 train.py:80] Step 44200: loss=1.465553, tp=21833, pos=23314, pred=23346, f1=0.935834. I0922 22:32:28.169255 139722080012096 train.py:80] Step 44300: loss=1.914948, tp=21553, pos=23477, pred=23517, f1=0.917266. I0922 22:34:23.694560 139722080012096 train.py:80] Step 44400: loss=2.111603, tp=21408, pos=23417, pred=23457, f1=0.913427. I0922 22:36:21.669081 139722080012096 train.py:80] Step 44500: loss=2.111464, tp=21586, pos=23623, pred=23693, f1=0.912419. I0922 22:38:20.170239 139722080012096 train.py:80] Step 44600: loss=2.086740, tp=21230, pos=23331, pred=23369, f1=0.909208. I0922 22:40:19.799649 139722080012096 train.py:80] Step 44700: loss=2.180204, tp=21712, pos=23888, pred=23930, f1=0.908110. I0922 22:42:16.919229 139722080012096 train.py:80] Step 44800: loss=2.157387, tp=21512, pos=23643, pred=23655, f1=0.909637. I0922 22:44:12.516728 139722080012096 train.py:80] Step 44900: loss=2.210505, tp=21431, pos=23618, pred=23677, f1=0.906269. I0922 22:46:10.200824 139722080012096 train.py:80] Step 45000: loss=1.853728, tp=22112, pos=23941, pred=23992, f1=0.922621. I0922 22:48:06.875610 139722080012096 train.py:80] Step 45100: loss=1.448872, tp=22112, pos=23577, pred=23597, f1=0.937466. I0922 22:50:03.216469 139722080012096 train.py:80] Step 45200: loss=1.619548, tp=21819, pos=23434, pred=23478, f1=0.930210. I0922 22:51:59.670019 139722080012096 train.py:80] Step 45300: loss=1.538832, tp=22064, pos=23653, pred=23693, f1=0.932032. I0922 22:53:55.955003 139722080012096 train.py:80] Step 45400: loss=1.722548, tp=21847, pos=23599, pred=23633, f1=0.925093. I0922 22:55:50.966265 139722080012096 train.py:80] Step 45500: loss=1.793514, tp=21521, pos=23354, pred=23408, f1=0.920448. I0922 22:57:46.481579 139722080012096 train.py:80] Step 45600: loss=1.899625, tp=21470, pos=23378, pred=23403, f1=0.917894. I0922 22:59:43.365891 139722080012096 train.py:80] Step 45700: loss=1.989732, tp=21586, pos=23567, pred=23598, f1=0.915340. I0922 23:01:40.740860 139722080012096 train.py:80] Step 45800: loss=2.183514, tp=21575, pos=23740, pred=23782, f1=0.908001. I0922 23:03:35.818718 139722080012096 train.py:80] Step 45900: loss=2.211823, tp=21192, pos=23365, pred=23433, f1=0.905680. I0922 23:05:33.687392 139722080012096 train.py:80] Step 46000: loss=2.458560, tp=21321, pos=23676, pred=23736, f1=0.899393. I0922 23:07:28.640605 139722080012096 train.py:80] Step 46100: loss=1.938977, tp=21434, pos=23313, pred=23335, f1=0.918968. I0922 23:09:23.888250 139722080012096 train.py:80] Step 46200: loss=1.345455, tp=21921, pos=23248, pred=23281, f1=0.942251. I0922 23:11:19.409335 139722080012096 train.py:80] Step 46300: loss=1.345691, tp=22008, pos=23396, pred=23433, f1=0.939930. I0922 23:13:14.525092 139722080012096 train.py:80] Step 46400: loss=1.479798, tp=21897, pos=23415, pred=23436, f1=0.934751. I0922 23:15:10.569641 139722080012096 train.py:80] Step 46500: loss=1.657627, tp=21705, pos=23415, pred=23450, f1=0.926278. I0922 23:17:08.474158 139722080012096 train.py:80] Step 46600: loss=1.862154, tp=21868, pos=23723, pred=23762, f1=0.921049. I0922 23:19:05.562349 139722080012096 train.py:80] Step 46700: loss=1.785061, tp=21847, pos=23632, pred=23675, f1=0.923627. I0922 23:21:03.759367 139722080012096 train.py:80] Step 46800: loss=1.939975, tp=22058, pos=24013, pred=24067, f1=0.917554. I0922 23:23:01.499115 139722080012096 train.py:80] Step 46900: loss=2.184041, tp=21621, pos=23747, pred=23810, f1=0.909267. I0922 23:24:58.083437 139722080012096 train.py:80] Step 47000: loss=2.206805, tp=21244, pos=23444, pred=23493, f1=0.905213. I0922 23:26:55.615074 139722080012096 train.py:80] Step 47100: loss=2.212248, tp=21597, pos=23800, pred=23823, f1=0.906999. I0922 23:28:51.800131 139722080012096 train.py:80] Step 47200: loss=1.649838, tp=22014, pos=23653, pred=23665, f1=0.930470. I0922 23:30:49.348733 139722080012096 train.py:80] Step 47300: loss=1.416905, tp=22472, pos=23878, pred=23904, f1=0.940605. I0922 23:32:45.367013 139722080012096 train.py:80] Step 47400: loss=1.459745, tp=22048, pos=23463, pred=23490, f1=0.939152. I0922 23:34:41.213082 139722080012096 train.py:80] Step 47500: loss=1.777848, tp=21664, pos=23407, pred=23457, f1=0.924548. I0922 23:36:39.498347 139722080012096 train.py:80] Step 47600: loss=1.898916, tp=22097, pos=24009, pred=24060, f1=0.919387. I0922 23:38:36.321722 139722080012096 train.py:80] Step 47700: loss=1.966263, tp=21570, pos=23519, pred=23550, f1=0.916527. I0922 23:40:33.627482 139722080012096 train.py:80] Step 47800: loss=2.007213, tp=21886, pos=23840, pred=23893, f1=0.917018. I0922 23:42:29.993461 139722080012096 train.py:80] Step 47900: loss=2.012154, tp=21377, pos=23391, pred=23414, f1=0.913449. I0922 23:44:26.254154 139722080012096 train.py:80] Step 48000: loss=1.975992, tp=21775, pos=23749, pred=23801, f1=0.915878. I0922 23:46:20.831911 139722080012096 train.py:80] Step 48100: loss=1.989483, tp=21260, pos=23240, pred=23275, f1=0.914114. I0922 23:48:14.685851 139722080012096 train.py:80] Step 48200: loss=2.105638, tp=21103, pos=23154, pred=23192, f1=0.910672. I0922 23:50:11.454644 139722080012096 train.py:80] Step 48300: loss=1.329615, tp=22305, pos=23697, pred=23715, f1=0.940901. I0922 23:52:07.461753 139722080012096 train.py:80] Step 48400: loss=1.257412, tp=22121, pos=23388, pred=23423, f1=0.945120. I0922 23:54:03.607336 139722080012096 train.py:80] Step 48500: loss=1.437824, tp=22046, pos=23492, pred=23523, f1=0.937828. I0922 23:55:58.843617 139722080012096 train.py:80] Step 48600: loss=1.562701, tp=21823, pos=23405, pred=23425, f1=0.932009. I0922 23:57:57.178575 139722080012096 train.py:80] Step 48700: loss=1.610100, tp=22276, pos=23906, pred=23925, f1=0.931446. I0922 23:59:54.109217 139722080012096 train.py:80] Step 48800: loss=1.790428, tp=21815, pos=23625, pred=23652, f1=0.922859. I0923 00:01:50.159121 139722080012096 train.py:80] Step 48900: loss=2.105634, tp=21503, pos=23530, pred=23575, f1=0.912982. I0923 00:03:47.778075 139722080012096 train.py:80] Step 49000: loss=2.217825, tp=21479, pos=23665, pred=23728, f1=0.906421. I0923 00:05:44.190512 139722080012096 train.py:80] Step 49100: loss=2.226650, tp=21310, pos=23446, pred=23491, f1=0.908026. I0923 00:07:41.092906 139722080012096 train.py:80] Step 49200: loss=2.075071, tp=21624, pos=23697, pred=23767, f1=0.911175. I0923 00:09:36.302082 139722080012096 train.py:80] Step 49300: loss=2.072321, tp=21200, pos=23277, pred=23316, f1=0.910008. I0923 00:11:35.910051 139722080012096 train.py:80] Step 49400: loss=1.415787, tp=22473, pos=23913, pred=23947, f1=0.939114. I0923 00:13:29.340352 139722080012096 train.py:80] Step 49500: loss=1.185631, tp=21826, pos=23068, pred=23092, f1=0.945667. I0923 00:15:25.544230 139722080012096 train.py:80] Step 49600: loss=1.255130, tp=22401, pos=23650, pred=23673, f1=0.946728. I0923 00:17:20.281785 139722080012096 train.py:80] Step 49700: loss=1.326323, tp=21885, pos=23297, pred=23317, f1=0.938988. I0923 00:19:18.096424 139722080012096 train.py:80] Step 49800: loss=1.504418, tp=22194, pos=23714, pred=23732, f1=0.935548. I0923 00:21:15.192996 139722080012096 train.py:80] Step 49900: loss=1.736493, tp=22078, pos=23837, pred=23866, f1=0.925644. I0923 00:23:12.288068 139722080012096 train.py:80] Step 50000: loss=2.063477, tp=21701, pos=23705, pred=23754, f1=0.914516. I0923 00:25:09.319669 139722080012096 train.py:80] Step 50100: loss=2.118725, tp=21563, pos=23687, pred=23717, f1=0.909754. I0923 00:27:04.558828 139722080012096 train.py:80] Step 50200: loss=1.973400, tp=21406, pos=23337, pred=23385, f1=0.916314. I0923 00:29:01.302694 139722080012096 train.py:80] Step 50300: loss=1.924167, tp=21659, pos=23597, pred=23659, f1=0.916667. I0923 00:30:57.402799 139722080012096 train.py:80] Step 50400: loss=1.891913, tp=21392, pos=23290, pred=23347, f1=0.917383. I0923 00:32:54.139666 139722080012096 train.py:80] Step 50500: loss=1.237766, tp=22498, pos=23734, pred=23755, f1=0.947504. I0923 00:34:49.166386 139722080012096 train.py:80] Step 50600: loss=1.239165, tp=22037, pos=23314, pred=23345, f1=0.944598. I0923 00:36:45.508683 139722080012096 train.py:80] Step 50700: loss=1.313654, tp=22290, pos=23640, pred=23661, f1=0.942475. I0923 00:38:41.522229 139722080012096 train.py:80] Step 50800: loss=1.433224, tp=22054, pos=23508, pred=23555, f1=0.937212. I0923 00:40:37.495916 139722080012096 train.py:80] Step 50900: loss=1.690961, tp=21738, pos=23430, pred=23483, f1=0.926737. I0923 00:42:33.200320 139722080012096 train.py:80] Step 51000: loss=1.793067, tp=21801, pos=23550, pred=23586, f1=0.925025. I0923 00:44:29.998516 139722080012096 train.py:80] Step 51100: loss=1.958194, tp=21820, pos=23758, pred=23791, f1=0.917790. I0923 00:46:27.026798 139722080012096 train.py:80] Step 51200: loss=1.865947, tp=21677, pos=23585, pred=23638, f1=0.918070. I0923 00:48:24.434689 139722080012096 train.py:80] Step 51300: loss=1.935943, tp=21857, pos=23780, pred=23801, f1=0.918728. I0923 00:50:20.027256 139722080012096 train.py:80] Step 51400: loss=2.066346, tp=21281, pos=23344, pred=23395, f1=0.910631. I0923 00:52:17.109414 139722080012096 train.py:80] Step 51500: loss=2.015636, tp=21552, pos=23591, pred=23637, f1=0.912679. I0923 00:54:13.899835 139722080012096 train.py:80] Step 51600: loss=1.164544, tp=22385, pos=23592, pred=23601, f1=0.948658. I0923 00:56:10.158940 139722080012096 train.py:80] Step 51700: loss=1.246441, tp=22386, pos=23635, pred=23669, f1=0.946474. I0923 00:58:06.848661 139722080012096 train.py:80] Step 51800: loss=1.435584, tp=22313, pos=23726, pred=23767, f1=0.939633. I0923 01:00:01.235809 139722080012096 train.py:80] Step 51900: loss=1.567976, tp=21788, pos=23316, pred=23363, f1=0.933525. I0923 01:02:00.337482 139722080012096 train.py:80] Step 52000: loss=1.871942, tp=22162, pos=24010, pred=24056, f1=0.922149. I0923 01:03:56.550767 139722080012096 train.py:80] Step 52100: loss=1.855348, tp=21883, pos=23699, pred=23720, f1=0.922963. I0923 01:05:51.836970 139722080012096 train.py:80] Step 52200: loss=1.839221, tp=21620, pos=23457, pred=23483, f1=0.921176. I0923 01:07:47.700764 139722080012096 train.py:80] Step 52300: loss=1.826153, tp=21780, pos=23583, pred=23615, f1=0.922920. I0923 01:09:43.388832 139722080012096 train.py:80] Step 52400: loss=1.710327, tp=21797, pos=23495, pred=23538, f1=0.926881. I0923 01:11:39.128820 139722080012096 train.py:80] Step 52500: loss=1.734904, tp=21788, pos=23534, pred=23570, f1=0.925102. I0923 01:13:34.606553 139722080012096 train.py:80] Step 52600: loss=1.751171, tp=21500, pos=23231, pred=23266, f1=0.924791. I0923 01:15:31.123380 139722080012096 train.py:80] Step 52700: loss=1.345770, tp=22334, pos=23632, pred=23663, f1=0.944455. I0923 01:17:26.089269 139722080012096 train.py:80] Step 52800: loss=1.249157, tp=22022, pos=23292, pred=23329, f1=0.944724. I0923 01:19:24.368716 139722080012096 train.py:80] Step 52900: loss=1.265448, tp=22588, pos=23857, pred=23886, f1=0.946233. I0923 01:21:21.922422 139722080012096 train.py:80] Step 53000: loss=1.463976, tp=22291, pos=23758, pred=23804, f1=0.937345. I0923 01:23:18.607440 139722080012096 train.py:80] Step 53100: loss=1.615459, tp=21937, pos=23534, pred=23570, f1=0.931428. I0923 01:25:15.667654 139722080012096 train.py:80] Step 53200: loss=1.686865, tp=21830, pos=23530, pred=23564, f1=0.927082. I0923 01:27:10.726319 139722080012096 train.py:80] Step 53300: loss=1.766300, tp=21506, pos=23278, pred=23316, f1=0.923123. I0923 01:29:07.360636 139722080012096 train.py:80] Step 53400: loss=1.913283, tp=21629, pos=23530, pred=23569, f1=0.918448.
I0922 08:50:57.638213 140620692096832 test.py:99] Evalute ckpt-2800: tp=10250, pred=43373, pos=64486, f1=0.190063. 900 3200 I0922 08:50:58.382886 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-3200 I0922 08:58:40.593705 140620692096832 test.py:99] Evalute ckpt-3200: tp=18091, pred=58129, pos=64486, f1=0.295086. 900 3600 I0922 08:58:41.248566 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-3600 I0922 09:06:26.246458 140620692096832 test.py:99] Evalute ckpt-3600: tp=18724, pred=55451, pos=64486, f1=0.312231. 900 3900 I0922 09:06:26.923317 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-3900 I0922 09:14:04.269442 140620692096832 test.py:99] Evalute ckpt-3900: tp=19620, pred=59746, pos=64486, f1=0.315861. 900 4300 I0922 09:14:04.965903 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-4300 I0922 09:21:49.183579 140620692096832 test.py:99] Evalute ckpt-4300: tp=21712, pred=61013, pos=64486, f1=0.346011. 900 4700 I0922 09:21:49.900783 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-4700 I0922 09:29:34.361210 140620692096832 test.py:99] Evalute ckpt-4700: tp=21373, pred=58274, pos=64486, f1=0.348208. 900 5100 I0922 09:29:35.047265 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-5100 I0922 09:37:35.211915 140620692096832 test.py:99] Evalute ckpt-5100: tp=23487, pred=61900, pos=64486, f1=0.371671. 900 5500 I0922 09:37:35.891408 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-5500 I0922 09:45:58.538736 140620692096832 test.py:99] Evalute ckpt-5500: tp=24205, pred=63416, pos=64486, f1=0.378493. 900 5900 I0922 09:45:59.239472 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-5900 I0922 09:53:46.696364 140620692096832 test.py:99] Evalute ckpt-5900: tp=23303, pred=59542, pos=64486, f1=0.375770. 900 6300 I0922 09:53:47.371476 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-6300 I0922 10:01:48.694832 140620692096832 test.py:99] Evalute ckpt-6300: tp=25385, pred=64409, pos=64486, f1=0.393886. 900 6700 I0922 10:01:49.389987 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-6700 I0922 10:09:42.717122 140620692096832 test.py:99] Evalute ckpt-6700: tp=26372, pred=65044, pos=64486, f1=0.407195. 900 7100 I0922 10:09:43.444682 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-7100 I0922 10:17:47.884603 140620692096832 test.py:99] Evalute ckpt-7100: tp=23763, pred=58361, pos=64486, f1=0.386871. 900 7500 I0922 10:17:48.555839 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-7500 I0922 10:25:35.228708 140620692096832 test.py:99] Evalute ckpt-7500: tp=25483, pred=61659, pos=64486, f1=0.404027. 900 7900 I0922 10:25:35.973917 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-7900 I0922 10:33:20.480946 140620692096832 test.py:99] Evalute ckpt-7900: tp=25972, pred=62245, pos=64486, f1=0.409876. 900 8200 I0922 10:33:21.185399 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-8200 I0922 10:41:09.172681 140620692096832 test.py:99] Evalute ckpt-8200: tp=26576, pred=65003, pos=64486, f1=0.410475. 900 8600 I0922 10:41:09.939060 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-8600 I0922 10:48:59.070025 140620692096832 test.py:99] Evalute ckpt-8600: tp=26980, pred=64035, pos=64486, f1=0.419854. 9000 I0922 10:48:59.800483 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-9000 I0922 10:56:42.722292 140620692096832 test.py:99] Evalute ckpt-9000: tp=28034, pred=65981, pos=64486, f1=0.429749. 9400 I0922 10:56:43.412718 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-9400 I0922 11:04:23.225572 140620692096832 test.py:99] Evalute ckpt-9400: tp=25881, pred=62155, pos=64486, f1=0.408730. 9700 I0922 11:04:23.949986 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-9700 I0922 11:12:14.731304 140620692096832 test.py:99] Evalute ckpt-9700: tp=26441, pred=63476, pos=64486, f1=0.413263. 9900 10100 I0922 11:12:15.505815 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-10100 I0922 11:19:50.114764 140620692096832 test.py:99] Evalute ckpt-10100: tp=27382, pred=64968, pos=64486, f1=0.423038. 9900 10500 I0922 11:19:50.788130 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-10500 I0922 11:27:26.188684 140620692096832 test.py:99] Evalute ckpt-10500: tp=24759, pred=58932, pos=64486, f1=0.401222. 9900 10800 I0922 11:27:26.953408 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-10800 I0922 11:35:08.393223 140620692096832 test.py:99] Evalute ckpt-10800: tp=26024, pred=59819, pos=64486, f1=0.418712. 9900 11200 I0922 11:35:09.136953 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-11200 I0922 11:42:56.894840 140620692096832 test.py:99] Evalute ckpt-11200: tp=27163, pred=63133, pos=64486, f1=0.425689. 9900 11600 I0922 11:42:57.602924 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-11600 I0922 11:50:45.426377 140620692096832 test.py:99] Evalute ckpt-11600: tp=26966, pred=64093, pos=64486, f1=0.419446. 9900 12000 I0922 11:50:46.157343 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-12000 I0922 11:58:30.098991 140620692096832 test.py:99] Evalute ckpt-12000: tp=26661, pred=61856, pos=64486, f1=0.422045. 9900 12400 I0922 11:58:30.803871 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-12400 I0922 12:06:22.785790 140620692096832 test.py:99] Evalute ckpt-12400: tp=27398, pred=63648, pos=64486, f1=0.427646. 9900 12800 I0922 12:06:23.498459 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-12800 I0922 12:14:11.448567 140620692096832 test.py:99] Evalute ckpt-12800: tp=26742, pred=61388, pos=64486, f1=0.424901. 9900 13100 I0922 12:14:12.212456 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-13100 I0922 12:22:06.970767 140620692096832 test.py:99] Evalute ckpt-13100: tp=27232, pred=63726, pos=64486, f1=0.424796. 9900 13500 I0922 12:22:07.669660 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-13500 I0922 12:29:46.171024 140620692096832 test.py:99] Evalute ckpt-13500: tp=27694, pred=63364, pos=64486, f1=0.433226. 9900 13900 I0922 12:29:46.907524 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-13900 I0922 12:37:35.694589 140620692096832 test.py:99] Evalute ckpt-13900: tp=27224, pred=64595, pos=64486, f1=0.421813. 9900 14300 I0922 12:37:36.427401 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-14300 I0922 12:45:28.209046 140620692096832 test.py:99] Evalute ckpt-14300: tp=27481, pred=63534, pos=64486, f1=0.429324. 9900 14600 I0922 12:45:28.929951 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-14600 I0922 12:53:18.954976 140620692096832 test.py:99] Evalute ckpt-14600: tp=26695, pred=61343, pos=64486, f1=0.424306. 9900 15000 I0922 12:53:19.727662 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-15000 I0922 13:01:08.524445 140620692096832 test.py:99] Evalute ckpt-15000: tp=26599, pred=62254, pos=64486, f1=0.419741. 9900 15300 I0922 13:01:09.342399 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-15300 I0922 13:09:00.108871 140620692096832 test.py:99] Evalute ckpt-15300: tp=26936, pred=61064, pos=64486, f1=0.429088. 9900 15700 I0922 13:09:00.924031 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-15700 I0922 13:16:51.868468 140620692096832 test.py:99] Evalute ckpt-15700: tp=27583, pred=63085, pos=64486, f1=0.432434. 9900 16100 I0922 13:16:52.653878 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-16100 I0922 13:24:43.650179 140620692096832 test.py:99] Evalute ckpt-16100: tp=27401, pred=63682, pos=64486, f1=0.427579. 9900 16500 I0922 13:24:44.384232 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-16500 I0922 13:32:28.842751 140620692096832 test.py:99] Evalute ckpt-16500: tp=27824, pred=63744, pos=64486, f1=0.433970. 9900 16900 I0922 13:32:29.586617 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-16900 I0922 13:40:26.894575 140620692096832 test.py:99] Evalute ckpt-16900: tp=27584, pred=62384, pos=64486, f1=0.434839. 9900 17200 I0922 13:40:27.679574 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-17200 I0922 13:48:26.435041 140620692096832 test.py:99] Evalute ckpt-17200: tp=27335, pred=61786, pos=64486, f1=0.432954. 9900 17600 I0922 13:48:27.216406 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-17600 I0922 13:56:11.526912 140620692096832 test.py:99] Evalute ckpt-17600: tp=27157, pred=61480, pos=64486, f1=0.431180. 9900 18000 I0922 13:56:12.280215 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-18000 I0922 14:04:04.302159 140620692096832 test.py:99] Evalute ckpt-18000: tp=28202, pred=63358, pos=64486, f1=0.441194. 9900 18400 I0922 14:04:05.046144 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-18400 I0922 14:12:03.307739 140620692096832 test.py:99] Evalute ckpt-18400: tp=27353, pred=63425, pos=64486, f1=0.427688. 9900 18700 I0922 14:12:04.057051 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-18700 I0922 14:20:00.457583 140620692096832 test.py:99] Evalute ckpt-18700: tp=28061, pred=63701, pos=64486, f1=0.437814. 9900 19100 I0922 14:20:01.205011 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-19100 I0922 14:27:50.818572 140620692096832 test.py:99] Evalute ckpt-19100: tp=26863, pred=60778, pos=64486, f1=0.428902. 9900 19500 I0922 14:27:51.531259 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-19500 I0922 14:35:46.271463 140620692096832 test.py:99] Evalute ckpt-19500: tp=28553, pred=64367, pos=64486, f1=0.443187. 9900 19900 I0922 14:35:46.996145 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-19900 I0922 14:44:00.369218 140620692096832 test.py:99] Evalute ckpt-19900: tp=28263, pred=63602, pos=64486, f1=0.441306. 9900 20300 I0922 14:44:01.102273 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-20300 I0922 14:51:47.630226 140620692096832 test.py:99] Evalute ckpt-20300: tp=28139, pred=64830, pos=64486, f1=0.435198. 9900 20700 I0922 14:51:48.383799 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-20700 I0922 14:59:50.895764 140620692096832 test.py:99] Evalute ckpt-20700: tp=27412, pred=62690, pos=64486, f1=0.431088. 9900 21100 I0922 14:59:51.673949 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-21100 I0922 15:07:38.346491 140620692096832 test.py:99] Evalute ckpt-21100: tp=28035, pred=63122, pos=64486, f1=0.439393. 9900 21500 I0922 15:07:39.100256 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-21500 I0922 15:15:29.281116 140620692096832 test.py:99] Evalute ckpt-21500: tp=28405, pred=63945, pos=64486, f1=0.442339. 9900 21900 I0922 15:15:30.082395 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-21900 I0922 15:23:32.320494 140620692096832 test.py:99] Evalute ckpt-21900: tp=26724, pred=60999, pos=64486, f1=0.425931. 9900 22300 I0922 15:23:33.123024 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-22300 I0922 15:31:27.301458 140620692096832 test.py:99] Evalute ckpt-22300: tp=28401, pred=64663, pos=64486, f1=0.439818. 9900 22700 I0922 15:31:28.101658 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-22700 I0922 15:39:31.708056 140620692096832 test.py:99] Evalute ckpt-22700: tp=27887, pred=62008, pos=64486, f1=0.440922. 9900 23100 I0922 15:39:32.502882 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-23100 I0922 15:47:57.293337 140620692096832 test.py:99] Evalute ckpt-23100: tp=28370, pred=63921, pos=64486, f1=0.441876. 9900 23500 I0922 15:47:58.071724 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-23500 I0922 15:56:02.742829 140620692096832 test.py:99] Evalute ckpt-23500: tp=28841, pred=65286, pos=64486, f1=0.444487. 9900 23900 I0922 15:56:03.512205 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-23900 I0922 16:04:10.608257 140620692096832 test.py:99] Evalute ckpt-23900: tp=28954, pred=65740, pos=64486, f1=0.444673. 9900 24300 I0922 16:04:11.375652 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-24300 I0922 16:12:14.688378 140620692096832 test.py:99] Evalute ckpt-24300: tp=29136, pred=64616, pos=64486, f1=0.451364. 9900 24700 I0922 16:12:15.413062 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-24700 I0922 16:20:17.474199 140620692096832 test.py:99] Evalute ckpt-24700: tp=28710, pred=64910, pos=64486, f1=0.443754. 9900 25100 I0922 16:20:18.211266 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-25100 I0922 16:28:26.275006 140620692096832 test.py:99] Evalute ckpt-25100: tp=27803, pred=62275, pos=64486, f1=0.438668. 9900 25600 I0922 16:28:27.005051 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-25600 I0922 16:36:40.287298 140620692096832 test.py:99] Evalute ckpt-25600: tp=28816, pred=62927, pos=64486, f1=0.452324. 9900 26000 I0922 16:36:41.017691 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-26000 I0922 16:44:59.412069 140620692096832 test.py:99] Evalute ckpt-26000: tp=28633, pred=63611, pos=64486, f1=0.447052. 9900 26400 I0922 16:45:00.164333 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-26400 I0922 16:53:16.352422 140620692096832 test.py:99] Evalute ckpt-26400: tp=27751, pred=60896, pos=64486, f1=0.442663. 9900 26800 I0922 16:53:17.090637 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-26800 I0922 17:01:41.433361 140620692096832 test.py:99] Evalute ckpt-26800: tp=28051, pred=62974, pos=64486, f1=0.440154. 9900 27300 I0922 17:01:42.196517 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-27300 I0922 17:10:08.457556 140620692096832 test.py:99] Evalute ckpt-27300: tp=28353, pred=64131, pos=64486, f1=0.440890. 9900 27700 I0922 17:10:09.198982 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-27700 I0922 17:18:27.479816 140620692096832 test.py:99] Evalute ckpt-27700: tp=28684, pred=62999, pos=64486, f1=0.449998. 9900 28100 I0922 17:18:28.229852 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-28100 I0922 17:26:49.261812 140620692096832 test.py:99] Evalute ckpt-28100: tp=27868, pred=62674, pos=64486, f1=0.438314. 9900 28500 I0922 17:26:50.011560 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-28500 I0922 17:35:07.422685 140620692096832 test.py:99] Evalute ckpt-28500: tp=28242, pred=62654, pos=64486, f1=0.444266. 9900 29000 I0922 17:35:08.206881 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-29000 I0922 17:43:20.188046 140620692096832 test.py:99] Evalute ckpt-29000: tp=28345, pred=62729, pos=64486, f1=0.445624. 9900 29400 I0922 17:43:20.962562 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-29400 I0922 17:51:33.572405 140620692096832 test.py:99] Evalute ckpt-29400: tp=28472, pred=63418, pos=64486, f1=0.445209. 9900 29800 I0922 17:51:34.327511 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-29800 I0922 17:59:47.643919 140620692096832 test.py:99] Evalute ckpt-29800: tp=28203, pred=61707, pos=64486, f1=0.446982. 9900 30200 I0922 17:59:48.435814 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-30200 I0922 18:07:59.004852 140620692096832 test.py:99] Evalute ckpt-30200: tp=28343, pred=62011, pos=64486, f1=0.448121. 9900 30600 I0922 18:07:59.796730 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-30600 I0922 18:16:03.834593 140620692096832 test.py:99] Evalute ckpt-30600: tp=28504, pred=64298, pos=64486, f1=0.442664. 9900 31100 I0922 18:16:04.587454 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-31100 I0922 18:24:09.346575 140620692096832 test.py:99] Evalute ckpt-31100: tp=28905, pred=63898, pos=64486, f1=0.450290. 9900 31500 I0922 18:24:10.122289 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-31500 I0922 18:32:14.435449 140620692096832 test.py:99] Evalute ckpt-31500: tp=28442, pred=61983, pos=64486, f1=0.449786. 9900 31900 I0922 18:32:15.202054 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-31900 I0922 18:40:22.829989 140620692096832 test.py:99] Evalute ckpt-31900: tp=29259, pred=63484, pos=64486, f1=0.457279. 9900 32300 I0922 18:40:23.577794 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-32300 I0922 18:48:28.717645 140620692096832 test.py:99] Evalute ckpt-32300: tp=28537, pred=63120, pos=64486, f1=0.447267. 9900 32700 I0922 18:48:29.489849 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-32700 I0922 18:56:33.811380 140620692096832 test.py:99] Evalute ckpt-32700: tp=28995, pred=64036, pos=64486, f1=0.451207. 9900 33100 I0922 18:56:34.582968 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-33100 I0922 19:04:45.934556 140620692096832 test.py:99] Evalute ckpt-33100: tp=28865, pred=63691, pos=64486, f1=0.450393. 9900 33600 I0922 19:04:46.758604 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-33600 I0922 19:12:38.910947 140620692096832 test.py:99] Evalute ckpt-33600: tp=28029, pred=60833, pos=64486, f1=0.447322. 9900 34000 I0922 19:12:39.695165 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-34000 I0922 19:20:36.897920 140620692096832 test.py:99] Evalute ckpt-34000: tp=27974, pred=60934, pos=64486, f1=0.446085. 9900 34400 I0922 19:20:37.705825 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-34400 I0922 19:28:33.927686 140620692096832 test.py:99] Evalute ckpt-34400: tp=29640, pred=64803, pos=64486, f1=0.458508. 9900 34800 I0922 19:28:34.775827 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-34800 I0922 19:37:01.985765 140620692096832 test.py:99] Evalute ckpt-34800: tp=28828, pred=63681, pos=64486, f1=0.449851. 9900 35200 I0922 19:37:02.881550 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-35200 I0922 19:45:17.516208 140620692096832 test.py:99] Evalute ckpt-35200: tp=29129, pred=63375, pos=64486, f1=0.455635. 9900 35600 I0922 19:45:18.276942 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-35600 I0922 19:53:20.880673 140620692096832 test.py:99] Evalute ckpt-35600: tp=28540, pred=62419, pos=64486, f1=0.449785. 9900 36000 I0922 19:53:21.673001 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-36000 I0922 20:01:28.272580 140620692096832 test.py:99] Evalute ckpt-36000: tp=28441, pred=62770, pos=64486, f1=0.446989. 9900 36500 I0922 20:01:29.058200 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-36500 I0922 20:09:30.443087 140620692096832 test.py:99] Evalute ckpt-36500: tp=28793, pred=62229, pos=64486, f1=0.454453. 9900 36900 I0922 20:09:31.220275 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-36900 I0922 20:17:41.148846 140620692096832 test.py:99] Evalute ckpt-36900: tp=28696, pred=63569, pos=64486, f1=0.448182. 9900 37300 I0922 20:17:41.929607 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-37300 I0922 20:25:56.507954 140620692096832 test.py:99] Evalute ckpt-37300: tp=28427, pred=63622, pos=64486, f1=0.443797. 9900 37700 I0922 20:25:57.300596 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-37700 I0922 20:33:59.325290 140620692096832 test.py:99] Evalute ckpt-37700: tp=29168, pred=64559, pos=64486, f1=0.452059. 9900 38100 I0922 20:34:00.088083 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-38100 I0922 20:42:13.238403 140620692096832 test.py:99] Evalute ckpt-38100: tp=28913, pred=63716, pos=64486, f1=0.451054. 9900 38600 I0922 20:42:14.043648 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-38600 I0922 20:50:33.686656 140620692096832 test.py:99] Evalute ckpt-38600: tp=28701, pred=62709, pos=64486, f1=0.451291. 9900 39000 I0922 20:50:34.501354 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-39000 I0922 20:58:48.195917 140620692096832 test.py:99] Evalute ckpt-39000: tp=28997, pred=63225, pos=64486, f1=0.454103. 9900 39400 I0922 20:58:49.048009 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-39400 I0922 21:07:03.064852 140620692096832 test.py:99] Evalute ckpt-39400: tp=28908, pred=63753, pos=64486, f1=0.450846. 9900 39800 I0922 21:07:03.870476 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-39800 I0922 21:15:12.217784 140620692096832 test.py:99] Evalute ckpt-39800: tp=28837, pred=64452, pos=64486, f1=0.447300. 9900 40300 I0922 21:15:13.051947 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-40300 I0922 21:23:28.535184 140620692096832 test.py:99] Evalute ckpt-40300: tp=29209, pred=64125, pos=64486, f1=0.454222. 9900 40700 I0922 21:23:29.367932 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-40700 I0922 21:31:46.906393 140620692096832 test.py:99] Evalute ckpt-40700: tp=29545, pred=63080, pos=64486, f1=0.463211. 9900 41100 I0922 21:31:47.765123 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-41100 I0922 21:40:02.599488 140620692096832 test.py:99] Evalute ckpt-41100: tp=29670, pred=64073, pos=64486, f1=0.461578. 9900 41500 I0922 21:40:03.402255 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-41500 I0922 21:48:10.362937 140620692096832 test.py:99] Evalute ckpt-41500: tp=29197, pred=63786, pos=64486, f1=0.455236. 9900 42000 I0922 21:48:11.204966 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-42000 I0922 21:56:07.126764 140620692096832 test.py:99] Evalute ckpt-42000: tp=29513, pred=63402, pos=64486, f1=0.461544. 9900 42400 I0922 21:56:08.005571 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-42400 I0922 22:04:26.610547 140620692096832 test.py:99] Evalute ckpt-42400: tp=29553, pred=64173, pos=64486, f1=0.459400. 9900 42800 I0922 22:04:27.462499 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-42800 I0922 22:12:44.403953 140620692096832 test.py:99] Evalute ckpt-42800: tp=28957, pred=64009, pos=64486, f1=0.450710. 9900 43200 I0922 22:12:45.231481 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-43200 I0922 22:20:53.992176 140620692096832 test.py:99] Evalute ckpt-43200: tp=27759, pred=62303, pos=64486, f1=0.437877. 9900 43700 I0922 22:20:54.831311 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-43700 I0922 22:29:05.801457 140620692096832 test.py:99] Evalute ckpt-43700: tp=29368, pred=65422, pos=64486, f1=0.452135. 9900 44100 I0922 22:29:06.656390 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-44100 I0922 22:37:18.344208 140620692096832 test.py:99] Evalute ckpt-44100: tp=29626, pred=64244, pos=64486, f1=0.460281. 9900 44500 I0922 22:37:19.240568 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-44500 I0922 22:45:52.320563 140620692096832 test.py:99] Evalute ckpt-44500: tp=29411, pred=63519, pos=64486, f1=0.459529. 9900 44900 I0922 22:45:53.173660 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-44900 I0922 22:54:04.948967 140620692096832 test.py:99] Evalute ckpt-44900: tp=29454, pred=64950, pos=64486, f1=0.455113. 9900 45400 I0922 22:54:05.748335 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-45400 I0922 23:02:10.994948 140620692096832 test.py:99] Evalute ckpt-45400: tp=29258, pred=64225, pos=64486, f1=0.454631. 9900 45800 I0922 23:02:11.776811 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-45800 I0922 23:10:26.883400 140620692096832 test.py:99] Evalute ckpt-45800: tp=29373, pred=64266, pos=64486, f1=0.456273. 9900 46200 I0922 23:10:27.690825 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-46200 I0922 23:18:42.989511 140620692096832 test.py:99] Evalute ckpt-46200: tp=29022, pred=62436, pos=64486, f1=0.457320. 9900 46600 I0922 23:18:43.809975 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-46600 I0922 23:27:04.350923 140620692096832 test.py:99] Evalute ckpt-46600: tp=29071, pred=63022, pos=64486, f1=0.455987. 9900 47100 I0922 23:27:05.220773 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-47100 I0922 23:35:11.828565 140620692096832 test.py:99] Evalute ckpt-47100: tp=28245, pred=60936, pos=64486, f1=0.450399. 9900 47500 I0922 23:35:12.652924 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-47500 I0922 23:43:26.605526 140620692096832 test.py:99] Evalute ckpt-47500: tp=29095, pred=64544, pos=64486, f1=0.450980. 9900 47900 I0922 23:43:27.415037 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-47900 I0922 23:51:44.902137 140620692096832 test.py:99] Evalute ckpt-47900: tp=29764, pred=64323, pos=64486, f1=0.462142. 9900 48300 I0922 23:51:45.758519 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-48300 I0923 00:00:00.694782 140620692096832 test.py:99] Evalute ckpt-48300: tp=29892, pred=63687, pos=64486, f1=0.466432. 9900 48800 I0923 00:00:01.571675 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-48800 I0923 00:08:19.855266 140620692096832 test.py:99] Evalute ckpt-48800: tp=28683, pred=62910, pos=64486, f1=0.450297. 9900 49200 I0923 00:08:20.722337 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-49200 I0923 00:16:39.884711 140620692096832 test.py:99] Evalute ckpt-49200: tp=29126, pred=63037, pos=64486, f1=0.456796. 9900 49600 I0923 00:16:40.758100 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-49600 I0923 00:24:51.932706 140620692096832 test.py:99] Evalute ckpt-49600: tp=29129, pred=62362, pos=64486, f1=0.459274. 9900 50000 I0923 00:24:52.823449 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-50000 I0923 00:33:02.519163 140620692096832 test.py:99] Evalute ckpt-50000: tp=27574, pred=59199, pos=64486, f1=0.445875. 9900 50500 I0923 00:33:03.342244 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-50500 I0923 00:41:26.004486 140620692096832 test.py:99] Evalute ckpt-50500: tp=29345, pred=62551, pos=64486, f1=0.461991. 9900 50900 I0923 00:41:26.841905 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-50900 I0923 00:49:50.686286 140620692096832 test.py:99] Evalute ckpt-50900: tp=29543, pred=64497, pos=64486, f1=0.458091. 9900 51300 I0923 00:49:51.527196 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-51300 I0923 00:58:09.272156 140620692096832 test.py:99] Evalute ckpt-51300: tp=29402, pred=65168, pos=64486, f1=0.453546. 9900 51800 I0923 00:58:10.208889 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-51800 I0923 01:06:18.989921 140620692096832 test.py:99] Evalute ckpt-51800: tp=29682, pred=63509, pos=64486, f1=0.463799. 9900 52200 I0923 01:06:19.843469 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-52200 I0923 01:14:41.353330 140620692096832 test.py:99] Evalute ckpt-52200: tp=28547, pred=61897, pos=64486, f1=0.451754. 9900 52600 I0923 01:14:42.274654 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-52600 I0923 01:22:54.907652 140620692096832 test.py:99] Evalute ckpt-52600: tp=29512, pred=63869, pos=64486, f1=0.459850. 9900 53000 I0923 01:22:55.813140 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-53000 I0923 01:31:01.558587 140620692096832 test.py:99] Evalute ckpt-53000: tp=30152, pred=65010, pos=64486, f1=0.465682. 9900 53500 I0923 01:31:02.414990 140620692096832 saver.py:1280] Restoring parameters from model/v28/ckpt-53500