forked from ONI-Wiki-zh/BotNotIncluded
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbot.py
118 lines (96 loc) · 3.71 KB
/
bot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import itertools
import os
import os.path as path
import pathlib
import re
from typing import Dict
import pywikibot
import utils
logger = utils.getLogger('meta bot')
def all_page_titles(lang="zh", site="oni"):
site = pywikibot.Site(lang, site)
return [p.title() for p in site.allpages()]
def all_cate_titles(lang="zh", site="oni"):
site = pywikibot.Site(lang, site)
return [p.title() for p in site.allcategories()]
def download_en_images():
s = pywikibot.Site("en", "oni")
dest = path.join(utils.DIR_OUT, "en images")
pathlib.Path(dest).mkdir(parents=True, exist_ok=True)
files = itertools.chain(
s.categorymembers(pywikibot.Category(s, "Duplicant images")),
s.categorymembers(pywikibot.Category(s, "Copyright Fairuse")),
)
for f in files:
if isinstance(f, pywikibot.FilePage):
t = f.title(with_ns=False)
logger.info(t)
f.download(path.join(dest, t))
def get_data_file_list() -> Dict[str, str]:
""" all file stem - wiki_page pairs defined here will be tried to upload. """
# fixed pairs
name_map = { # local data file name -> wiki page suffix
# "building": "Data/Buildings",
# "critter": "Data/Critters",
"Elements": "Data/Elements",
"TextAsset/Personalities": "Data/TextAsset/Personalities",
"codex": "Data/Codex",
"temperatures": "Data/Worldgen/Temperatures",
}
# all starts with "i18n_strings_"
i18_prefix = "i18n_strings_"
for i18_file in os.listdir(utils.DIR_OUT):
if not path.isfile(path.join(utils.DIR_OUT, i18_file)):
continue
m = re.match(rf"{i18_prefix}(\w+).lua", i18_file)
if m is None:
continue
gs = m.groups()
if len(gs) != 1:
continue
name_map[i18_file[:-4]] = f"i18n/{gs[0].capitalize()}"
# all starts with "worldgen" or "templates"
for prefix in ['worldgen', 'templates']:
for lua_file in os.listdir(utils.DIR_OUT):
if not path.isfile(path.join(utils.DIR_OUT, lua_file)):
continue
m = re.match(rf"({prefix}-(?:\w|-)+).lua", lua_file)
if m is None:
continue
gs = m.groups()
if len(gs) != 1:
continue
name = gs[0]
name = '/'.join(map(lambda s: s.capitalize(), name.split('-')))
print(name)
name_map[lua_file[:-4]] = f"data/{name}"
return name_map
def update_data(try_tag='bot-data-update', comment = None):
site = pywikibot.Site("zh", "oni")
site_tags = utils.get_tags(site)
if try_tag not in site_tags:
logger.warning(f'Tag "{try_tag}" does not exist on "{site}")')
data_files = get_data_file_list()
for local_file in data_files:
f_path = path.join(utils.DIR_OUT, local_file) + ".lua"
if not path.exists(f_path):
logger.warning(f'"{f_path}" do not exists.')
continue
page = pywikibot.Page(site, f"module:{data_files[local_file]}")
with open(f_path, "rb") as f:
new_text = f.read().decode('utf-8')
if not (page.exists() and page.text == new_text):
page.text = new_text
if comment is None:
comment = input("Edit comment")
utils.try_tags_save(page, [try_tag], f"Pywikibot: {comment}")
# doc page
doc_page = pywikibot.Page(site, f"module:{data_files[local_file]}/doc")
new_doc = "{{游戏版权}}"
if not (doc_page.exists() and doc_page.text == new_doc):
doc_page.text = new_doc
doc_page.save(f"Pywikibot: {comment}")
logger.info("Done")
if __name__ == '__main__':
# update_data()
pass