Datasets:
Update megawika-test.py
Browse files- megawika-test.py +2 -2
megawika-test.py
CHANGED
@@ -118,7 +118,7 @@ class MegaWika(datasets.GeneratorBasedBuilder):
|
|
118 |
"text": [datasets.Value("string")],
|
119 |
"parse": datasets.Value("string"),
|
120 |
"en_tokens": [datasets.Value("string")],
|
121 |
-
|
122 |
"en_lang_token_map": [[datasets.Value("int32")]] # list of pairs
|
123 |
},
|
124 |
|
@@ -225,7 +225,7 @@ class MegaWika(datasets.GeneratorBasedBuilder):
|
|
225 |
for token in tokens
|
226 |
}
|
227 |
).values()),
|
228 |
-
|
229 |
"en_lang_token_map": [
|
230 |
(int(item[0]), int(item[1]))
|
231 |
for item
|
|
|
118 |
"text": [datasets.Value("string")],
|
119 |
"parse": datasets.Value("string"),
|
120 |
"en_tokens": [datasets.Value("string")],
|
121 |
+
"lang_tokens": [datasets.Value("string")],
|
122 |
"en_lang_token_map": [[datasets.Value("int32")]] # list of pairs
|
123 |
},
|
124 |
|
|
|
225 |
for token in tokens
|
226 |
}
|
227 |
).values()),
|
228 |
+
"lang_tokens": list(entry['passage'].get("lang_tokens", {}).values()),
|
229 |
"en_lang_token_map": [
|
230 |
(int(item[0]), int(item[1]))
|
231 |
for item
|