-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
248 lines (197 loc) · 6.85 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
import argparse
import datetime
import gzip
import json
import logging
import pathlib
import sys
import traceback
sys.path.append(str(pathlib.Path("./wiki_categories").absolute()))
import os
import time
from typing import Collection, Tuple
import networkx as nx
import wiki_data_dump.mirrors
from wiki_data_dump.mirrors import MirrorType
from wiki_categories.core import CategoryTree, Assets
from wiki_categories.core.category_tree import less_than_page_count_percentile
from wiki_categories.core.wiki_utils import id_for_category_str_by_lang, CategoryNotFound
def _split_lines(text: str) -> Tuple[str, ...]:
return tuple(x.strip() for x in text.splitlines() if x.strip())
default_languages = _split_lines("""
en
ceb
de
sv
fr
nl
ru
es
it
arz
ja
zh
vi
uk
ar
pt
fa
ca
sr
ko
no
ce
fi
hu
cs
tt
sh
ro
eu
ms
eo
""")
_preferred_excluded_parents = _split_lines("""
Category:Hidden categories
Category:Tracking categories
Category:Container categories
Category:Noindexed pages
Category:Wikipedia 1.0 assessments
Category:Wikipedia administration
Category:Articles by importance
Category:Articles by quality
Category:Wikipedia categories
Category:Stub categories
Category:WikiProject templates
Category:All redirect categories
""")
def save_graph_run(
src_tree: CategoryTree,
save_dir: pathlib.Path,
root_id: int,
lang: str,
excluded_branches: Collection[str],
page_percentile: int,
max_depth: int,
mutate_src: bool = True):
if not mutate_src:
src_tree = src_tree.copy()
# Shallow copy is sufficient. Node attributes may be shared.
total_excluded = set()
for excluded in excluded_branches:
try:
excluded_id = id_for_category_str_by_lang(lang, excluded, "en")
except CategoryNotFound:
continue
total_excluded.update(src_tree.successors(excluded_id))
total_excluded.add(excluded_id)
src_tree.remove_nodes_from(total_excluded)
reachable = nx.dfs_tree(src_tree, source=root_id, depth_limit=max_depth)
src_tree.remove_nodes_from([x for x in src_tree if x not in reachable])
to_remove = less_than_page_count_percentile(src_tree, page_percentile)
for n in to_remove:
if n == root_id:
continue
src_tree.remove_node_reconstruct(n)
for x in src_tree.nodes:
attr_dict = src_tree.nodes[x]
output_dict = {
"name": attr_dict["name"],
"id": x,
"predecessors": [
{"name": src_tree.nodes[n]["name"], "id": n} for n in src_tree.predecessors(x)
],
"successors": [
{"name": src_tree.nodes[n]["name"], "id": n} for n in src_tree.successors(x)
]
}
with open(save_dir.joinpath(f"{x}.json"), 'w', encoding="utf-8") as f:
json.dump(output_dict, f, ensure_ascii=False)
with gzip.open(save_dir.joinpath("_index.txt.gz"), 'wt') as f:
for n in src_tree.nodes:
f.write(f"{n} {src_tree.nodes[n]["name"]}\n")
def process_language(lang: str, save_dir: pathlib.Path, force: bool = False) -> bool:
started = time.time()
assets = Assets(lang, wiki_dump=wiki_data_dump.WikiDump(mirror=MirrorType.WIKIMEDIA))
page_table_updated = assets.page_table_job.updated
category_links_updated = assets.category_links_job.updated
category_table_updated = assets.category_table_job.updated
page_percentile = 70
max_depth = 100
if not force and save_dir.joinpath("_meta.json").exists():
try:
with open(save_dir.joinpath("_meta.json"), 'r', encoding="utf-8") as f:
meta_json = json.load(f)
assert meta_json["page_table_updated"] == page_table_updated
assert meta_json["category_links_updated"] == category_links_updated
assert meta_json["category_table_updated"] == category_table_updated
assert meta_json["page_percentile"] == page_percentile
assert meta_json["max_depth"] == max_depth
return False
# Skip run
except KeyError:
pass
except AssertionError:
pass
except json.JSONDecodeError:
pass
save_dir.mkdir(exist_ok=True)
for file_name in os.listdir(save_dir):
file_name = str(file_name)
if file_name.endswith(".json") or file_name.endswith(".txt.gz"):
os.unlink(save_dir.joinpath(file_name))
save_graph_run(
CategoryTree(assets),
save_dir,
root_id=id_for_category_str_by_lang(lang, "Category:Contents", "en"),
lang=lang,
excluded_branches=_preferred_excluded_parents,
page_percentile=page_percentile,
max_depth=max_depth,
mutate_src=True
)
with open(save_dir.joinpath("_meta.json"), 'w', encoding="utf-8") as f:
duration = int(time.time() - started)
json.dump({
"page_table_updated": page_table_updated,
"category_links_updated": category_links_updated,
"category_table_updated": category_table_updated,
"page_percentile": page_percentile,
"max_depth": max_depth,
"run_duration_seconds": duration,
"finished": datetime.datetime.now().isoformat()
}, f)
return True
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Process languages defined by `default_languages`, and save to ./data/"
)
parser.add_argument(
"--force",
action="store_true",
help="If provided, don't check existing output for currentness and force run execution."
)
_force = vars(parser.parse_args()).get("force", False)
logging.getLogger().setLevel(logging.INFO)
started = datetime.datetime.now()
root = pathlib.Path("./data")
root.mkdir(exist_ok=True)
languages_processed = []
incomplete_languages = []
for index, language in enumerate(default_languages):
logging.info(f"Starting {language}wiki at {datetime.datetime.now()}. {index + 1} of {len(default_languages)}")
try:
finished = process_language(language, root.joinpath(language), force=_force)
if finished:
languages_processed.append(language)
except:
incomplete_languages.append(language)
logging.error(traceback.format_exc())
logging.info(f"Finished {language}wiki at {datetime.datetime.now()}.")
with open(root.joinpath("_meta.json"), 'w', encoding="utf-8") as f:
json.dump({
"started": started.isoformat(),
"finished": datetime.datetime.now().isoformat(),
"languages_processed": languages_processed,
"incomplete_languages": incomplete_languages
}, f)