lapp0 commited on
Commit
2063507
·
verified ·
1 Parent(s): 9bd4885

End of training

Browse files
README.md CHANGED
@@ -16,14 +16,14 @@ This student model is distilled from the teacher model [gpt2](https://huggingfac
16
  The [Distily](https://github.com/lapp0/distily) library was used for this distillation.
17
 
18
  It achieves the following results on the evaluation set:
19
- - eval_enwikippl: 653.3577
20
- - eval_frwikippl: 986.1998
21
- - eval_zhwikippl: 379.8699
22
- - eval_tinystoriesppl: 1082.1683
23
- - eval_loss: 1.3023
24
- - eval_runtime: 12.5969
25
- - eval_samples_per_second: 47.631
26
- - eval_steps_per_second: 11.908
27
 
28
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
29
  should probably proofread and complete it, then remove this comment.
@@ -64,47 +64,47 @@ Peak GPU Memory: 3.9293 GB
64
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
65
  | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |
66
  | **teacher eval** | | 270.2348 | 76.8142 | | | | | 671.1238 | 22.8030 |
67
- | 0 | 0 | 147374.6094 | 4251118206976.0 | 19.8108 | 12.5898 | 47.658 | 11.914 | 74.6838 | 6171058503680.0 |
68
- | 1500 | 0.0253 | 995.8284 | 4478.0557 | 2.2057 | 12.629 | 47.51 | 11.877 | 1054.7445 | 39317.4570 |
69
- | 3000 | 0.0505 | 759.2491 | 2876.1150 | 1.7221 | 12.6775 | 47.328 | 11.832 | 930.6636 | 1598.6740 |
70
- | 4500 | 0.0758 | 679.3580 | 1449.2272 | 1.5342 | 12.6534 | 47.418 | 11.855 | 954.7816 | 415.1080 |
71
- | 6000 | 0.1010 | 706.9536 | 1264.4604 | 1.4442 | 12.6336 | 47.492 | 11.873 | 1114.5806 | 874.3105 |
72
- | 7500 | 0.1263 | 581.0081 | 953.5186 | 1.3672 | 12.5682 | 47.74 | 11.935 | 860.4433 | 287.9040 |
73
- | 9000 | 0.1515 | 653.3577 | 986.1998 | 1.3023 | 12.5969 | 47.631 | 11.908 | 1082.1683 | 379.8699 |
74
- | 10500 | 0.1768 | 634.6018 | 878.6852 | 1.2366 | 12.5486 | 47.814 | 11.954 | 1111.3147 | 267.4301 |
75
- | 12000 | 0.2020 | 543.3941 | 782.5607 | 1.1708 | 12.6162 | 47.558 | 11.889 | 914.1931 | 280.9046 |
76
- | 13500 | 0.2273 | 621.1537 | 751.0798 | 1.1457 | 12.6507 | 47.428 | 11.857 | 1146.2101 | 287.0221 |
77
- | 15000 | 0.2525 | 576.3350 | 773.9283 | 1.1070 | 12.6882 | 47.288 | 11.822 | 1048.3120 | 244.8425 |
78
- | 16500 | 0.2778 | 524.7780 | 686.7684 | 1.0660 | 12.6142 | 47.565 | 11.891 | 963.1450 | 180.7172 |
79
- | 18000 | 0.3030 | 547.1536 | 748.9669 | 1.0617 | 12.6351 | 47.487 | 11.872 | 1048.8325 | 393.3814 |
80
- | 19500 | 0.3283 | 521.4248 | 608.5453 | 1.0117 | 12.6667 | 47.368 | 11.842 | 1005.0343 | 194.0343 |
81
- | 21000 | 0.3535 | 492.6230 | 757.1074 | 0.9890 | 12.6396 | 47.47 | 11.867 | 925.2551 | 316.0413 |
82
- | 22500 | 0.3788 | 508.8848 | 631.0673 | 0.9599 | 12.5581 | 47.778 | 11.944 | 1014.2992 | 269.3275 |
83
- | 24000 | 0.4040 | 448.4678 | 634.5434 | 0.9540 | 12.6193 | 47.546 | 11.887 | 838.1882 | 182.7780 |
84
- | 25500 | 0.4293 | 465.3311 | 685.5602 | 0.9076 | 12.6325 | 47.497 | 11.874 | 941.0688 | 236.3699 |
85
- | 27000 | 0.4545 | 455.5760 | 536.7122 | 0.8543 | 12.6616 | 47.387 | 11.847 | 944.9666 | 158.6557 |
86
- | 28500 | 0.4798 | 422.2133 | 444.7551 | 0.7497 | 12.7174 | 47.179 | 11.795 | 918.8527 | 161.5927 |
87
- | 30000 | 0.5051 | 404.8533 | 401.2530 | 0.7146 | 12.5557 | 47.787 | 11.947 | 903.7859 | 159.8987 |
88
- | 31500 | 0.5303 | 401.0141 | 391.1385 | 0.6968 | 12.5584 | 47.777 | 11.944 | 901.9575 | 144.2610 |
89
- | 33000 | 0.5556 | 414.6530 | 376.1317 | 0.6896 | 12.6093 | 47.584 | 11.896 | 957.7856 | 160.5613 |
90
- | 34500 | 0.5808 | 403.2803 | 388.9411 | 0.6821 | 12.5399 | 47.847 | 11.962 | 924.6055 | 165.9398 |
91
- | 36000 | 0.6061 | 394.4821 | 343.9616 | 0.6697 | 12.5519 | 47.801 | 11.95 | 889.5546 | 170.7110 |
92
- | 37500 | 0.6313 | 400.1528 | 363.8464 | 0.6703 | 12.5536 | 47.795 | 11.949 | 920.4871 | 147.2159 |
93
- | 39000 | 0.6566 | 391.2865 | 364.2054 | 0.6676 | 12.5746 | 47.715 | 11.929 | 891.6525 | 156.6264 |
94
- | 40500 | 0.6818 | 388.4776 | 368.1123 | 0.6612 | 12.5571 | 47.782 | 11.945 | 888.4889 | 139.5851 |
95
- | 42000 | 0.7071 | 400.2923 | 352.6450 | 0.6593 | 12.5709 | 47.729 | 11.932 | 929.3182 | 138.6479 |
96
- | 43500 | 0.7323 | 387.7111 | 360.0483 | 0.6497 | 12.6167 | 47.556 | 11.889 | 881.3199 | 138.9349 |
97
- | 45000 | 0.7576 | 380.8126 | 334.1832 | 0.6313 | 12.6877 | 47.29 | 11.822 | 876.7783 | 125.0634 |
98
- | 46500 | 0.7828 | 380.8054 | 327.5193 | 0.6242 | 12.5708 | 47.73 | 11.932 | 882.1217 | 129.8663 |
99
- | 48000 | 0.8081 | 377.8082 | 338.2561 | 0.6204 | 12.6081 | 47.589 | 11.897 | 877.0321 | 131.2159 |
100
- | 49500 | 0.8333 | 379.1130 | 327.4732 | 0.6185 | 12.5502 | 47.808 | 11.952 | 883.5084 | 123.8266 |
101
- | 51000 | 0.8586 | 377.6328 | 326.7014 | 0.6177 | 12.6001 | 47.619 | 11.905 | 880.3737 | 123.1512 |
102
- | 52500 | 0.8838 | 376.4498 | 325.6333 | 0.6136 | 12.7004 | 47.242 | 11.811 | 876.8870 | 121.4464 |
103
- | 54000 | 0.9091 | 377.0334 | 324.0776 | 0.6123 | 12.7392 | 47.099 | 11.775 | 879.5005 | 121.6815 |
104
- | 55500 | 0.9343 | 377.6328 | 325.2666 | 0.6112 | 12.661 | 47.39 | 11.847 | 881.6116 | 121.6897 |
105
- | 57000 | 0.9596 | 376.8437 | 323.6670 | 0.6106 | 12.6149 | 47.563 | 11.891 | 879.0644 | 121.3654 |
106
- | 58500 | 0.9848 | 376.7562 | 324.3744 | 0.6101 | 12.5659 | 47.748 | 11.937 | 879.3189 | 121.1148 |
107
- | 59400 | 1.0 | 376.9021 | 324.4201 | 0.6100 | 12.5762 | 47.709 | 11.927 | 880.1915 | 121.0986 |
108
 
109
  ### Framework versions
110
  - Distily 0.2.0
 
16
  The [Distily](https://github.com/lapp0/distily) library was used for this distillation.
17
 
18
  It achieves the following results on the evaluation set:
19
+ - eval_enwikippl: 665.9925
20
+ - eval_frwikippl: 995.4457
21
+ - eval_zhwikippl: 405.3946
22
+ - eval_tinystoriesppl: 1100.5725
23
+ - eval_loss: 1.3024
24
+ - eval_runtime: 12.5753
25
+ - eval_samples_per_second: 47.713
26
+ - eval_steps_per_second: 11.928
27
 
28
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
29
  should probably proofread and complete it, then remove this comment.
 
64
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
65
  | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |
66
  | **teacher eval** | | 270.2348 | 76.8142 | | | | | 671.1238 | 22.8030 |
67
+ | 0 | 0 | 147374.6094 | 4251118206976.0 | 19.8108 | 12.6652 | 47.374 | 11.843 | 74.6838 | 6171058503680.0 |
68
+ | 1500 | 0.0253 | 1012.5726 | 4501.9321 | 2.2064 | 12.5479 | 47.817 | 11.954 | 1084.7205 | 39061.2969 |
69
+ | 3000 | 0.0505 | 761.3547 | 2880.7776 | 1.7218 | 12.6141 | 47.566 | 11.891 | 932.5889 | 1552.8525 |
70
+ | 4500 | 0.0758 | 682.1792 | 1444.0309 | 1.5343 | 12.6458 | 47.447 | 11.862 | 963.2644 | 421.1599 |
71
+ | 6000 | 0.1010 | 673.6849 | 1216.2458 | 1.4424 | 12.6927 | 47.271 | 11.818 | 1035.7787 | 983.8034 |
72
+ | 7500 | 0.1263 | 630.5226 | 924.8793 | 1.3688 | 12.561 | 47.767 | 11.942 | 971.2607 | 351.8923 |
73
+ | 9000 | 0.1515 | 665.9925 | 995.4457 | 1.3024 | 12.5753 | 47.713 | 11.928 | 1100.5725 | 405.3946 |
74
+ | 10500 | 0.1768 | 649.4595 | 870.4929 | 1.2363 | 12.5912 | 47.652 | 11.913 | 1147.8689 | 379.8699 |
75
+ | 12000 | 0.2020 | 552.0709 | 756.2815 | 1.1687 | 12.5514 | 47.804 | 11.951 | 915.4786 | 247.3208 |
76
+ | 13500 | 0.2273 | 574.5076 | 775.2103 | 1.1446 | 12.6584 | 47.399 | 11.85 | 1022.3383 | 258.0553 |
77
+ | 15000 | 0.2525 | 570.0630 | 872.7639 | 1.1033 | 12.573 | 47.721 | 11.93 | 1034.7090 | 205.1337 |
78
+ | 16500 | 0.2778 | 524.1483 | 695.0405 | 1.0708 | 12.5445 | 47.83 | 11.957 | 960.6801 | 179.8155 |
79
+ | 18000 | 0.3030 | 558.0261 | 722.4153 | 1.0562 | 12.6414 | 47.463 | 11.866 | 1092.5500 | 238.2534 |
80
+ | 19500 | 0.3283 | 535.8491 | 646.8846 | 1.0133 | 12.5343 | 47.869 | 11.967 | 1038.2650 | 224.3871 |
81
+ | 21000 | 0.3535 | 498.7090 | 643.3860 | 0.9866 | 12.6044 | 47.602 | 11.901 | 945.8655 | 325.0199 |
82
+ | 22500 | 0.3788 | 501.5469 | 612.7169 | 0.9680 | 12.5367 | 47.86 | 11.965 | 979.3635 | 253.6864 |
83
+ | 24000 | 0.4040 | 376.6320 | 629.0483 | 0.9542 | 12.5557 | 47.787 | 11.947 | 639.3351 | 209.0216 |
84
+ | 25500 | 0.4293 | 481.3532 | 705.2970 | 0.9196 | 12.6849 | 47.3 | 11.825 | 966.3749 | 375.7875 |
85
+ | 27000 | 0.4545 | 459.1099 | 522.3182 | 0.8577 | 12.5747 | 47.715 | 11.929 | 958.1420 | 189.4054 |
86
+ | 28500 | 0.4798 | 413.4502 | 431.4271 | 0.7560 | 12.5416 | 47.841 | 11.96 | 891.3210 | 176.5119 |
87
+ | 30000 | 0.5051 | 403.5616 | 415.3713 | 0.7195 | 12.548 | 47.817 | 11.954 | 882.3771 | 152.6556 |
88
+ | 31500 | 0.5303 | 406.3142 | 383.7035 | 0.7008 | 12.7238 | 47.156 | 11.789 | 912.3057 | 155.9905 |
89
+ | 33000 | 0.5556 | 424.4844 | 373.8076 | 0.6957 | 12.5614 | 47.765 | 11.941 | 974.8803 | 171.0759 |
90
+ | 34500 | 0.5808 | 403.1555 | 398.5213 | 0.6867 | 12.5658 | 47.748 | 11.937 | 913.2111 | 178.8704 |
91
+ | 36000 | 0.6061 | 399.7424 | 356.4906 | 0.6771 | 12.5757 | 47.711 | 11.928 | 904.7578 | 169.4632 |
92
+ | 37500 | 0.6313 | 398.5905 | 372.6379 | 0.6750 | 12.652 | 47.423 | 11.856 | 912.7961 | 158.8251 |
93
+ | 39000 | 0.6566 | 392.1436 | 371.0796 | 0.6723 | 12.6742 | 47.34 | 11.835 | 882.8148 | 176.4061 |
94
+ | 40500 | 0.6818 | 393.4750 | 371.6812 | 0.6672 | 12.6703 | 47.355 | 11.839 | 901.9575 | 134.3779 |
95
+ | 42000 | 0.7071 | 399.2395 | 357.3452 | 0.6651 | 12.6545 | 47.414 | 11.853 | 913.0604 | 135.6295 |
96
+ | 43500 | 0.7323 | 391.1350 | 370.6879 | 0.6558 | 12.6748 | 47.338 | 11.834 | 896.4939 | 156.0113 |
97
+ | 45000 | 0.7576 | 382.1500 | 345.0898 | 0.6354 | 12.6893 | 47.284 | 11.821 | 884.7507 | 140.7350 |
98
+ | 46500 | 0.7828 | 379.9360 | 334.1126 | 0.6281 | 12.6503 | 47.43 | 11.857 | 877.5396 | 127.1069 |
99
+ | 48000 | 0.8081 | 379.3625 | 342.2339 | 0.6241 | 12.6749 | 47.338 | 11.834 | 882.8514 | 128.6507 |
100
+ | 49500 | 0.8333 | 379.1130 | 333.6659 | 0.6222 | 12.6951 | 47.262 | 11.816 | 881.2473 | 125.1969 |
101
+ | 51000 | 0.8586 | 378.2769 | 332.6569 | 0.6217 | 12.6252 | 47.524 | 11.881 | 883.0703 | 128.0856 |
102
+ | 52500 | 0.8838 | 377.0043 | 335.4331 | 0.6182 | 12.6655 | 47.373 | 11.843 | 880.3371 | 128.4364 |
103
+ | 54000 | 0.9091 | 376.5811 | 333.1023 | 0.6165 | 12.6459 | 47.446 | 11.862 | 877.0681 | 129.0633 |
104
+ | 55500 | 0.9343 | 377.9547 | 333.2431 | 0.6157 | 12.6412 | 47.464 | 11.866 | 883.1432 | 127.1832 |
105
+ | 57000 | 0.9596 | 378.2183 | 332.4462 | 0.6147 | 12.6477 | 47.439 | 11.86 | 884.0200 | 126.3209 |
106
+ | 58500 | 0.9848 | 377.9839 | 333.1023 | 0.6146 | 12.6522 | 47.422 | 11.856 | 883.7274 | 126.2198 |
107
+ | 59400 | 1.0 | 378.0425 | 333.0085 | 0.6147 | 12.651 | 47.427 | 11.857 | 883.7274 | 126.2198 |
108
 
109
  ### Framework versions
110
  - Distily 0.2.0
logs/batch_size=1, learning_rate=0.0001, warmup_ratio=0.1/events.out.tfevents.1724080697.5f530b1cf724 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe64b1314c2bb18a0cba461ffbedc549b1379583a9006761bb2e5e01c6e0bfc3
3
+ size 312