forked from l4rz/gpt-2-training
-
Notifications
You must be signed in to change notification settings - Fork 0
/
trainlog-1250M-61k-to-143k.txt
41 lines (39 loc) · 2.69 KB
/
trainlog-1250M-61k-to-143k.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
stopped at [1,0]<stdout>:[62000 | 499687.48] loss=2.4035 avg=2.5381 lr=4.35e-06 // 27.14, 310 wallclock hours, 1800 gpu-hours
dataset has 415743691 tokens
[1,0]<stdout>:[62300 | 5368.86] loss=3.1489 avg=2.9729 lr=4.99e-06
[1,0]<stdout>:[67000 | 89234.46] loss=2.9279 avg=2.9132 lr=4.88e-06
[1,0]<stdout>:[68000 | 107082.76] loss=3.1647 avg=2.9074 lr=4.85e-06
[1,0]<stdout>:[69000 | 124926.69] loss=2.6821 avg=2.9026 lr=4.83e-06
[1,0]<stdout>:[70000 | 142775.03] loss=2.6493 avg=2.8998 lr=4.80e-06
[1,0]<stdout>:[71000 | 160617.63] loss=2.6533 avg=2.8792 lr=4.78e-06
[1,0]<stdout>:[72000 | 178460.71] loss=2.7168 avg=2.8767 lr=4.76e-06
[1,0]<stdout>:[73000 | 196299.36] loss=2.9205 avg=2.8551 lr=4.73e-06
[1,0]<stdout>:[74000 | 214138.83] loss=3.0825 avg=2.8608 lr=4.71e-06
[1,0]<stdout>:[75000 | 231977.65] loss=2.6536 avg=2.8824 lr=4.69e-06
[1,0]<stdout>:[76000 | 249818.57] loss=2.7183 avg=2.8601 lr=4.66e-06
[1,0]<stdout>:[80000 | 321217.83] loss=2.5388 avg=2.8325 lr=4.57e-06
[1,0]<stdout>:[81100 | 1807.08] loss=2.8175 avg=2.9542 lr=2.00e-05
[1,0]<stdout>:[85000 | 71446.78] loss=2.9087 avg=2.8819 lr=1.96e-05
[1,0]<stdout>:[86000 | 89304.34] loss=3.0069 avg=2.8857 lr=1.95e-05
[1,0]<stdout>:[94900 | 248182.19] loss=2.8641 avg=2.7955 lr=1.87e-05
[1,0]<stdout>:[96000 | 267825.89] loss=2.5209 avg=2.7825 lr=1.86e-05
[1,0]<stdout>:[97000 | 285683.04] loss=2.6479 avg=2.7541 lr=1.85e-05
[1,0]<stdout>:[98000 | 303539.72] loss=2.5208 avg=2.7567 lr=1.84e-05
[1,0]<stdout>:[99000 | 321394.62] loss=2.9387 avg=2.7499 lr=1.83e-05
[1,0]<stdout>:[100000 | 339250.51] loss=2.9402 avg=2.7324 lr=1.82e-05
[1,0]<stdout>:[106546 | 456094.67] loss=2.4365 avg=2.6818 lr=1.76e-05
[1,0]<stdout>:[109186 | 503211.14] loss=2.8078 avg=2.6695 lr=1.74e-05
[1,0]<stdout>:[128549 | 849028.18] loss=2.2812 avg=2.5300 lr=1.58e-05
[1,0]<stdout>:[130001 | 33.34] loss=2.5390 avg=2.5390 lr=1.00e-05
[1,0]<stdout>:[133000 | 53627.85] loss=2.3714 avg=2.4903 lr=9.85e-06
[1,0]<stdout>:[134000 | 71492.94] loss=2.3166 avg=2.4974 lr=9.80e-06
[1,0]<stdout>:[134815 | 86045.02] loss=2.3925 avg=2.4929 lr=9.76e-06
[1,0]<stdout>:[135001 | 34.39] loss=2.3089 avg=2.3089 lr=7.00e-06
[1,0]<stdout>:[135317 | 5675.87] loss=2.4351 avg=2.4669 lr=6.99e-06
[1,0]<stdout>:[135404 | 7231.47] loss=2.5243 avg=2.4590 lr=6.99e-06
[1,0]<stdout>:[138000 | 53602.84] loss=2.5463 avg=2.4539 lr=6.90e-06
[1,0]<stdout>:[138248 | 4438.38] loss=2.2507 avg=2.4496 lr=4.99e-06
[1,0]<stdout>:[139401 | 25044.11] loss=2.2435 avg=2.4482 lr=4.97e-06
[1,0]<stdout>:[142483 | 80071.18] loss=2.3628 avg=2.4207 lr=4.89e-06
[1,0]<stdout>:[142940 | 88237.79] loss=2.5682 avg=2.4468 lr=4.88e-06
[1,0]<stdout>:[142964 | 88659.18] loss=2.3248 avg=2.4390 lr=4.88e-06 // 12.5 epochs on the new DS, additional 1600 gpu-hours