Datasets:

Modalities:
Text
Formats:
parquet
ArXiv:
Libraries:
Datasets
Dask
License:
juliehunter commited on
Commit
99e495b
·
verified ·
1 Parent(s): 0051f07

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +33 -42
README.md CHANGED
@@ -627,8 +627,8 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
627
  <table>
628
  <thead>
629
  <tr>
630
- <th><a href="#subset"><strong>subset</strong></a></th>
631
- <th><strong>language</strong></th>
632
  <th><strong>M docs</strong></th>
633
  <th><strong>B words</strong></th>
634
  <th><strong>B tokens</strong></th>
@@ -640,20 +640,20 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
640
  <tr>
641
  <td rowspan="11" style="vertical-align: top;"><strong>TOTAL</strong></td>
642
  <td></td>
643
- <td>2187.656</td>
644
- <td>1359.271</td>
645
- <td>2320.616</td>
646
- <td>8862.514</td>
647
  <td></td>
648
  </tr>
649
  <tr>
650
 
651
  <td><strong>French (fr)</strong></td>
652
- <td>654.906</td>
653
- <td>586.937</td>
654
- <td>934.372</td>
655
- <td>3639.986</td>
656
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_french_pie.png">composition details</a></td>
657
  </tr>
658
  <tr>
659
 
@@ -662,7 +662,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
662
  <td>412.202</td>
663
  <td>611.894</td>
664
  <td>2553.541</td>
665
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_english_pie.png">composition details</a></td>
666
  </tr>
667
  <tr>
668
 
@@ -671,7 +671,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
671
  <td>51.306</td>
672
  <td>228.954</td>
673
  <td>630.749</td>
674
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_code_pie.png">composition details</a></td>
675
  </tr>
676
  <tr>
677
 
@@ -680,7 +680,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
680
  <td>105.609</td>
681
  <td>206.610</td>
682
  <td>764.779</td>
683
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_german_pie.png">composition details</a></td>
684
  </tr>
685
  <tr>
686
 
@@ -689,7 +689,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
689
  <td>123.857</td>
690
  <td>200.825</td>
691
  <td>759.457</td>
692
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_spanish_pie.png">composition details</a></td>
693
  </tr>
694
  <tr>
695
 
@@ -698,7 +698,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
698
  <td>62.051</td>
699
  <td>112.031</td>
700
  <td>404.454</td>
701
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_italian_pie.png">composition details</a></td>
702
  </tr>
703
  <tr>
704
 
@@ -707,7 +707,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
707
  <td>17.016</td>
708
  <td>25.494</td>
709
  <td>107.658</td>
710
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_fr-en_pie.png">composition details</a></td>
711
  </tr>
712
  <tr>
713
 
@@ -745,7 +745,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
745
  <td>477.758</td>
746
  <td>741.023</td>
747
  <td>2974.596</td>
748
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_redpajama-french_histogram.png">composition details</a></td>
749
  </tr>
750
  <tr>
751
 
@@ -754,7 +754,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
754
  <td>103.078</td>
755
  <td>201.371</td>
756
  <td>747.631</td>
757
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_redpajama-german_histogram.png">composition details</a></td>
758
  </tr>
759
  <tr>
760
 
@@ -763,7 +763,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
763
  <td>121.751</td>
764
  <td>197.125</td>
765
  <td>746.984</td>
766
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_redpajama-spanish_histogram.png">composition details</a></td>
767
  </tr>
768
  <tr>
769
 
@@ -772,7 +772,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
772
  <td>60.194</td>
773
  <td>108.416</td>
774
  <td>393.012</td>
775
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_redpajama-italian_histogram.png">composition details</a></td>
776
  </tr>
777
  <tr>
778
  <td><a href="#finewebedu"><strong>FineWebEdu</strong></a></td>
@@ -781,7 +781,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
781
  <td>327.453</td>
782
  <td>467.837</td>
783
  <td>2018.215</td>
784
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_finewebedu-english_histogram.png">composition details</a></td>
785
  </tr>
786
  <tr>
787
  <td colspan="7"><h4 id="category-newspaper">Category: Newspaper</h4></td></tr>
@@ -801,12 +801,12 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
801
  <td>8.902</td>
802
  <td>14.313</td>
803
  <td>50.844</td>
804
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_americanstories-english_histogram.png">composition details</a></td>
805
  </tr>
806
  <tr>
807
  <td colspan="7"><h4 id="category-technical">Category: Technical</h4></td></tr>
808
  <tr>
809
- <td><a href="#pes2o"><strong>PeS2o</strong></a></td>
810
  <td><strong>English (en)</strong></td>
811
  <td>38.972</td>
812
  <td>42.296</td>
@@ -833,15 +833,6 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
833
  <td></td>
834
  </tr>
835
  <tr>
836
- <td><a href="#persee"><strong>Persee</strong></a></td>
837
- <td><strong>French (fr)</strong></td>
838
- <td>1.094</td>
839
- <td>3.250</td>
840
- <td>5.754</td>
841
- <td>20.314</td>
842
- <td></td>
843
- </tr>
844
- <tr>
845
  <td><a href="#pile-uncopyrighted"><strong>Pile (USPTO_Backgrounds)</strong></a></td>
846
  <td><strong>English (en)</strong></td>
847
  <td>5.139</td>
@@ -1019,7 +1010,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1019
  <tr>
1020
  <td colspan="7"><h4 id="category-legislative-transcripts">Category: Legislative Transcripts</h4></td></tr>
1021
  <tr>
1022
- <td rowspan="4" style="vertical-align: top;"><a href="#europarl-monolingual-and-parallel"><strong>Europarl</strong></a></td>
1023
  <td><strong>German (de)</strong></td>
1024
  <td>0.0102</td>
1025
  <td>0.0451</td>
@@ -1140,7 +1131,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1140
  <tr>
1141
  <td colspan="7"><h4 id="category-math">Category: Math</h4></td></tr>
1142
  <tr>
1143
- <td><a href="#mathpile"><strong>MathPile</strong></a></td>
1144
  <td><strong>English (en)</strong></td>
1145
  <td>0.737</td>
1146
  <td>3.408</td>
@@ -1186,7 +1177,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1186
  <td>0.818</td>
1187
  <td>1.161</td>
1188
  <td>4.709</td>
1189
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_claire-english_pie.png">composition details</a></td>
1190
  </tr>
1191
  <tr>
1192
 
@@ -1195,7 +1186,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1195
  <td>0.210</td>
1196
  <td>0.311</td>
1197
  <td>1.314</td>
1198
- <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/pie_distribution_claire-french_pie.png">composition details</a></td>
1199
  </tr>
1200
  <tr>
1201
  <td><a href="#youtube"><strong>YouTube</strong></a></td>
@@ -1207,7 +1198,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1207
  <td></td>
1208
  </tr>
1209
  <tr>
1210
- <td><a href="#stac"><strong>Stac</strong></a></td>
1211
  <td><strong>English (en)</strong></td>
1212
  <td>0.0000450</td>
1213
  <td>0.0000529</td>
@@ -1216,7 +1207,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1216
  <td></td>
1217
  </tr>
1218
  <tr>
1219
- <td colspan="7"><h4 id="category-multilingual-parallel">Category: Multilingual Parallel</h4></td></tr>
1220
  <tr>
1221
  <td><a href="#croissantaligned"><strong>CroissantAligned</strong></a></td>
1222
  <td><strong>fr-en</strong></td>
@@ -1227,7 +1218,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1227
  <td></td>
1228
  </tr>
1229
  <tr>
1230
- <td rowspan="4" style="vertical-align: top;"><a href="#europarl-monolingual-and-parallel"><strong>EuroparlAligned</strong></a></td>
1231
  <td><strong>it-en</strong></td>
1232
  <td>1.901</td>
1233
  <td>0.100</td>
@@ -1265,7 +1256,7 @@ Token counts are computed using the tokenizer for [Lucie-7B](https://huggingface
1265
  <tr>
1266
  <td colspan="7"><h4 id="category-programming">Category: Programming</h4></td></tr>
1267
  <tr>
1268
- <td rowspan="30" style="vertical-align: top;"><a href="#thestack"><strong>TheStack</strong></a></td>
1269
  <td><strong>JAVASCRIPT</strong></td>
1270
  <td>21.109</td>
1271
  <td>8.526</td>
@@ -1918,4 +1909,4 @@ Data storage and significant parts of the data processing were made possible thr
1918
 
1919
  ## Contact
1920
 
1921
- <pre>[email protected]</pre>
 
627
  <table>
628
  <thead>
629
  <tr>
630
+ <th><strong>Subset</strong></th>
631
+ <th><strong>Language</strong></th>
632
  <th><strong>M docs</strong></th>
633
  <th><strong>B words</strong></th>
634
  <th><strong>B tokens</strong></th>
 
640
  <tr>
641
  <td rowspan="11" style="vertical-align: top;"><strong>TOTAL</strong></td>
642
  <td></td>
643
+ <td>2186.562</td>
644
+ <td>1356.021</td>
645
+ <td>2314.862</td>
646
+ <td>8842.200</td>
647
  <td></td>
648
  </tr>
649
  <tr>
650
 
651
  <td><strong>French (fr)</strong></td>
652
+ <td>653.812</td>
653
+ <td>583.687</td>
654
+ <td>928.618</td>
655
+ <td>3619.672</td>
656
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_french_pie.png">composition details</a></td>
657
  </tr>
658
  <tr>
659
 
 
662
  <td>412.202</td>
663
  <td>611.894</td>
664
  <td>2553.541</td>
665
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_english_pie.png">composition details</a></td>
666
  </tr>
667
  <tr>
668
 
 
671
  <td>51.306</td>
672
  <td>228.954</td>
673
  <td>630.749</td>
674
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_code_pie.png">composition details</a></td>
675
  </tr>
676
  <tr>
677
 
 
680
  <td>105.609</td>
681
  <td>206.610</td>
682
  <td>764.779</td>
683
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_german_pie.png">composition details</a></td>
684
  </tr>
685
  <tr>
686
 
 
689
  <td>123.857</td>
690
  <td>200.825</td>
691
  <td>759.457</td>
692
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_spanish_pie.png">composition details</a></td>
693
  </tr>
694
  <tr>
695
 
 
698
  <td>62.051</td>
699
  <td>112.031</td>
700
  <td>404.454</td>
701
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_italian_pie.png">composition details</a></td>
702
  </tr>
703
  <tr>
704
 
 
707
  <td>17.016</td>
708
  <td>25.494</td>
709
  <td>107.658</td>
710
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_fr-en_pie.png">composition details</a></td>
711
  </tr>
712
  <tr>
713
 
 
745
  <td>477.758</td>
746
  <td>741.023</td>
747
  <td>2974.596</td>
748
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_redpajama-french_histogram.png">composition details</a></td>
749
  </tr>
750
  <tr>
751
 
 
754
  <td>103.078</td>
755
  <td>201.371</td>
756
  <td>747.631</td>
757
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_redpajama-german_histogram.png">composition details</a></td>
758
  </tr>
759
  <tr>
760
 
 
763
  <td>121.751</td>
764
  <td>197.125</td>
765
  <td>746.984</td>
766
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_redpajama-spanish_histogram.png">composition details</a></td>
767
  </tr>
768
  <tr>
769
 
 
772
  <td>60.194</td>
773
  <td>108.416</td>
774
  <td>393.012</td>
775
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_redpajama-italian_histogram.png">composition details</a></td>
776
  </tr>
777
  <tr>
778
  <td><a href="#finewebedu"><strong>FineWebEdu</strong></a></td>
 
781
  <td>327.453</td>
782
  <td>467.837</td>
783
  <td>2018.215</td>
784
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_finewebedu-english_histogram.png">composition details</a></td>
785
  </tr>
786
  <tr>
787
  <td colspan="7"><h4 id="category-newspaper">Category: Newspaper</h4></td></tr>
 
801
  <td>8.902</td>
802
  <td>14.313</td>
803
  <td>50.844</td>
804
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_americanstories-english_histogram.png">composition details</a></td>
805
  </tr>
806
  <tr>
807
  <td colspan="7"><h4 id="category-technical">Category: Technical</h4></td></tr>
808
  <tr>
809
+ <td><a href="#pes2o-v2"><strong>PeS2o</strong></a></td>
810
  <td><strong>English (en)</strong></td>
811
  <td>38.972</td>
812
  <td>42.296</td>
 
833
  <td></td>
834
  </tr>
835
  <tr>
 
 
 
 
 
 
 
 
 
836
  <td><a href="#pile-uncopyrighted"><strong>Pile (USPTO_Backgrounds)</strong></a></td>
837
  <td><strong>English (en)</strong></td>
838
  <td>5.139</td>
 
1010
  <tr>
1011
  <td colspan="7"><h4 id="category-legislative-transcripts">Category: Legislative Transcripts</h4></td></tr>
1012
  <tr>
1013
+ <td rowspan="4" style="vertical-align: top;"><a href="#europarl-and-europarlaligned"><strong>Europarl</strong></a></td>
1014
  <td><strong>German (de)</strong></td>
1015
  <td>0.0102</td>
1016
  <td>0.0451</td>
 
1131
  <tr>
1132
  <td colspan="7"><h4 id="category-math">Category: Math</h4></td></tr>
1133
  <tr>
1134
+ <td><a href="#mathpile-commercial"><strong>MathPile</strong></a></td>
1135
  <td><strong>English (en)</strong></td>
1136
  <td>0.737</td>
1137
  <td>3.408</td>
 
1177
  <td>0.818</td>
1178
  <td>1.161</td>
1179
  <td>4.709</td>
1180
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_claire-english_pie.png">composition details</a></td>
1181
  </tr>
1182
  <tr>
1183
 
 
1186
  <td>0.210</td>
1187
  <td>0.311</td>
1188
  <td>1.314</td>
1189
+ <td><a href="https://huggingface.co/datasets/OpenLLM-France/Lucie-Training-Dataset/blob/main/figures/fig_distribution_claire-french_pie.png">composition details</a></td>
1190
  </tr>
1191
  <tr>
1192
  <td><a href="#youtube"><strong>YouTube</strong></a></td>
 
1198
  <td></td>
1199
  </tr>
1200
  <tr>
1201
+ <td><a href="#stac"><strong>STAC</strong></a></td>
1202
  <td><strong>English (en)</strong></td>
1203
  <td>0.0000450</td>
1204
  <td>0.0000529</td>
 
1207
  <td></td>
1208
  </tr>
1209
  <tr>
1210
+ <td colspan="7"><h4 id="category-multilingual-parallel-corpora">Category: Multilingual Parallel Corpora</h4></td></tr>
1211
  <tr>
1212
  <td><a href="#croissantaligned"><strong>CroissantAligned</strong></a></td>
1213
  <td><strong>fr-en</strong></td>
 
1218
  <td></td>
1219
  </tr>
1220
  <tr>
1221
+ <td rowspan="4" style="vertical-align: top;"><a href="#europarl-and-europarlaligned"><strong>EuroparlAligned</strong></a></td>
1222
  <td><strong>it-en</strong></td>
1223
  <td>1.901</td>
1224
  <td>0.100</td>
 
1256
  <tr>
1257
  <td colspan="7"><h4 id="category-programming">Category: Programming</h4></td></tr>
1258
  <tr>
1259
+ <td rowspan="30" style="vertical-align: top;"><a href="#thestack-v12"><strong>TheStack</strong></a></td>
1260
  <td><strong>JAVASCRIPT</strong></td>
1261
  <td>21.109</td>
1262
  <td>8.526</td>
 
1909
 
1910
  ## Contact
1911
 
1912
+ <pre>[email protected]</pre>