1
0
Fork 0
mirror of https://gitlab.wikimedia.org/ladsgroup/Phabricator-maintenance-bot synced 2024-11-22 03:52:37 +01:00

Create subticket and patch for new wikis

Create DNS and Wikimedia Messages patches for the new wikis

Most of the gerrit code is copied from library upgrader library:
https://gerrit.wikimedia.org/r/plugins/gitiles/labs/libraryupgrader

Bug: T253439
This commit is contained in:
Amir Sarabadani 2020-08-15 19:11:06 +02:00
parent edc2501274
commit 1d784027bb
4 changed files with 254 additions and 90 deletions

View file

@ -1,18 +1,38 @@
import requests """
Copyright (C) 2019 Kunal Mehta <legoktm@member.fsf.org>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import json import json
import os
import subprocess import subprocess
import tempfile import tempfile
import urllib import urllib
import os
from contextlib import contextmanager from contextlib import contextmanager
import requests
with open('gerrit-creds.json', 'r') as f: with open('gerrit-creds.json', 'r') as f:
creds = json.loads(f.read()) creds = json.loads(f.read())
def load_ssh_key(): def load_ssh_key():
mixin = ShellMixin() mixin = ShellMixin()
mixin.check_call(['ssh-add', '/home/amsa/Phabricator-maintenance-bot/private_key']) dir_path = os.path.dirname(os.path.realpath(__file__))
mixin.check_call(
['ssh-add', os.path.join(dir_path, 'private_key')])
@contextmanager @contextmanager
def cd(dirname): def cd(dirname):
@ -23,22 +43,23 @@ def cd(dirname):
finally: finally:
os.chdir(cwd) os.chdir(cwd)
def gerrit_url(repo: str, user=None, ssh=False) -> str: def gerrit_url(repo: str, user=None, ssh=False) -> str:
if user is not None: if user is not None:
prefix = user + '@' prefix = user + '@'
else: else:
prefix = '' prefix = ''
if ssh: if ssh:
return f'ssh://{prefix}gerrit.wikimedia.org:29418/{repo}' return 'ssh://{}gerrit.wikimedia.org:29418/{}'.format(prefix, repo)
else: else:
return f'https://{prefix}gerrit.wikimedia.org/r/{repo}.git' return 'https://{}gerrit.wikimedia.org/r/{}.git'.format(prefix, repo)
class ShellMixin: class ShellMixin:
def check_call(self, args: list, stdin='', env=None, def check_call(self, args: list, stdin='', env=None,
ignore_returncode=False) -> str: ignore_returncode=False) -> str:
debug = self.log if hasattr(self, 'log') else print debug = self.log if hasattr(self, 'log') else print
debug('$ ' + ' '.join(args)) #debug('$ ' + ' '.join(args))
res = subprocess.run( res = subprocess.run(
args, args,
input=stdin.encode(), input=stdin.encode(),
@ -46,10 +67,11 @@ class ShellMixin:
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
env=env env=env
) )
debug(res.stdout.decode()) # debug(res.stdout.decode())
if not ignore_returncode: if not ignore_returncode:
res.check_returncode() res.check_returncode()
return res.stdout.decode() return res.stdout.decode()
def clone(self, repo): def clone(self, repo):
url = gerrit_url(repo, user=creds['name']) url = gerrit_url(repo, user=creds['name'])
self.check_call(['git', 'clone', url, 'repo', '--depth=1']) self.check_call(['git', 'clone', url, 'repo', '--depth=1'])
@ -57,7 +79,8 @@ class ShellMixin:
self.check_call(['git', 'config', 'user.name', creds['name']]) self.check_call(['git', 'config', 'user.name', creds['name']])
self.check_call(['git', 'config', 'user.email', creds['email']]) self.check_call(['git', 'config', 'user.email', creds['email']])
self.check_call(['git', 'submodule', 'update', '--init']) self.check_call(['git', 'submodule', 'update', '--init'])
self.check_call(['scp', '-p', '-P', '29418', creds['name'] + '@gerrit.wikimedia.org:hooks/commit-msg', '.git/hooks/']) self.check_call(['scp', '-p', '-P', '29418', creds['name'] +
'@gerrit.wikimedia.org:hooks/commit-msg', '.git/hooks/'])
def build_push_command(self, options: dict) -> list: def build_push_command(self, options: dict) -> list:
per = '%topic=new-wikis-patches' per = '%topic=new-wikis-patches'
@ -72,6 +95,7 @@ class ShellMixin:
gerrit_url(options['repo'], creds['name'], ssh=True), gerrit_url(options['repo'], creds['name'], ssh=True),
'HEAD:refs/for/master' + per] 'HEAD:refs/for/master' + per]
class GerritBot(ShellMixin): class GerritBot(ShellMixin):
def __init__(self, name, commit_message): def __init__(self, name, commit_message):
self.name = name self.name = name
@ -84,17 +108,17 @@ class GerritBot(ShellMixin):
self.changes() self.changes()
self.commit() self.commit()
def changes(self): def changes(self):
files = [ files = [
'i18n/wikimediainterwikisearchresults/en.json', 'i18n/wikimediaprojectnames/en.json',
'i18n/wikimediainterwikisearchresults/qqq.json' 'i18n/wikimediaprojectnames/qqq.json'
] ]
for file_ in files: for file_ in files:
with open(file_, 'r') as f: with open(file_, 'r') as f:
result = json.load(f) result = json.load(f)
with open(file_, 'w') as f: with open(file_, 'w') as f:
f.write(json.dumps(result, ensure_ascii=False, indent='\t', sort_keys=True)) f.write(json.dumps(result, ensure_ascii=False,
indent='\t', sort_keys=True))
def commit(self): def commit(self):
self.check_call(['git', 'add', '.']) self.check_call(['git', 'add', '.'])
@ -102,7 +126,11 @@ class GerritBot(ShellMixin):
f.write(self.commit_message) f.write(self.commit_message)
self.check_call(['git', 'commit', '-F', '.git/COMMIT_EDITMSG']) self.check_call(['git', 'commit', '-F', '.git/COMMIT_EDITMSG'])
load_ssh_key() load_ssh_key()
self.check_call(self.build_push_command({'hashtags': ['automated-wiki-creation'], 'repo': self.name})) self.check_call(self.build_push_command(
{'hashtags': ['automated-wiki-creation'], 'repo': self.name}))
gerritbot = GerritBot('mediawiki/extensions/WikimediaMessages', "Order entries by alphabetical order\n\nThis would make creating automated patches easier")
gerritbot.run() if __name__ == "__main__":
gerritbot = GerritBot('mediawiki/extensions/WikimediaMessages',
"Order entries by alphabetical order\n\nThis would make creating automated patches easier")
gerritbot.run()

36
lib.py
View file

@ -1,6 +1,6 @@
import json import json
import time
import sys import sys
import time
import requests import requests
@ -71,6 +71,27 @@ class Client(object):
}] }]
}) })
def createSubtask(self, desc, project_phids, parent_phid, title):
self.post('maniphest.edit', {
'objectIdentifier': '',
'transactions': [{
'type': 'parent',
'value': parent_phid
},
{
'type': 'title',
'value': title
},
{
'type': 'description',
'value': desc,
},
{
'type': 'projects.add',
'value': project_phids
}]
})
def taskDetails(self, phid): def taskDetails(self, phid):
"""Lookup details of a Maniphest task.""" """Lookup details of a Maniphest task."""
r = self.post('maniphest.query', {'phids': [phid]}) r = self.post('maniphest.query', {'phids': [phid]})
@ -97,7 +118,8 @@ class Client(object):
}) })
def getTasksWithProject(self, project_phid, continue_=None, statuses=None): def getTasksWithProject(self, project_phid, continue_=None, statuses=None):
r = self._getTasksWithProjectContinue(project_phid, continue_, statuses=statuses) r = self._getTasksWithProjectContinue(
project_phid, continue_, statuses=statuses)
cursor = r['cursor'] cursor = r['cursor']
for case in r['data']: for case in r['data']:
if case['type'] != 'TASK': if case['type'] != 'TASK':
@ -133,3 +155,13 @@ class Client(object):
} }
return self.post('maniphest.search', params)[ return self.post('maniphest.search', params)[
'data'][0]['attachments']['columns'] 'data'][0]['attachments']['columns']
def getTaskSubtasks(self, phid):
params = {
"constraints": {
"phids": [phid],
"hasSubtasks": True
}
}
return self.post('maniphest.search', params)[
'data']

View file

@ -6,6 +6,7 @@ import socket
import requests import requests
from lib import Client from lib import Client
from patch_makers import DnsPatchMaker, WikimediaMessagesPatchMaker
final_text = '' final_text = ''
gerrit_path = 'https://gerrit.wikimedia.org/g/' gerrit_path = 'https://gerrit.wikimedia.org/g/'
@ -17,6 +18,13 @@ def add_text(a):
final_text += a + '\n' final_text += a + '\n'
def add_checklist(url, text, checked):
if checked:
add_text(' [x] [[{}|{}]]'.format(url, text))
else:
add_text(' [] [[{}|{}]]'.format(url, text))
def get_file_from_gerrit(path): def get_file_from_gerrit(path):
gerrit_url = 'https://gerrit.wikimedia.org/g/' gerrit_url = 'https://gerrit.wikimedia.org/g/'
url = gerrit_url + '{0}?format=TEXT'.format(path) url = gerrit_url + '{0}?format=TEXT'.format(path)
@ -34,8 +42,10 @@ def hostname_resolves(hostname):
return False return False
return True return True
def handle_special_wiki_apache(parts): def handle_special_wiki_apache(parts):
apache_file = get_file_from_gerrit('operations/puppet/+/production/modules/mediawiki/manifests/web/prod_sites.pp') apache_file = get_file_from_gerrit(
'operations/puppet/+/production/modules/mediawiki/manifests/web/prod_sites.pp')
url = '.'.join(parts) url = '.'.join(parts)
return url in apache_file return url in apache_file
@ -46,24 +56,16 @@ def post_a_comment(comment):
pass pass
def create_subticket(text, projects, task_phid): def handle_subticket_for_cloud(task_details, db_name):
pass hasSubtasks = client.getTaskSubtasks(task_details['phid'])
if hasSubtasks:
return
client.createSubtask('The new wiki is going to be **public**.', [
'PHID-PROJ-hwibeuyzizzy4xzunfsk', # DBA
'PHID-PROJ-bj6y6ks7ampcwcignhce' # Data services
], task_details['phid'], 'Prepare and check storage layer for ' + db_name)
def create_non_special_wikis_dns_subticket(parts, task_details):
pass
def create_special_wikis_dns_subticket(parts, task_details):
pass
def handle_subticket_for_cloud(ticket_phid, task_details, db_name):
pass
def create_apache_config_subticket(parts, task_details):
pass
def get_dummy_wiki(shard, family): def get_dummy_wiki(shard, family):
if family == "wiktionary": if family == "wiktionary":
@ -78,27 +80,52 @@ def get_dummy_wiki(shard, family):
}.get(shard, "?????") }.get(shard, "?????")
def create_patch_for_wikimedia_messages(db_name, english_name, url, lang, bug_id):
if not english_name:
return
r = requests.get(
'https://gerrit.wikimedia.org/r/changes/?q=bug:{}+project:mediawiki/extensions/WikimediaMessages'.format(bug_id))
b = json.loads('\n'.join(r.text.split('\n')[1:]))
if b:
return
maker = WikimediaMessagesPatchMaker(
db_name, english_name, url, lang, bug_id)
maker.run()
def create_patch_for_dns(lang, bug_id):
r = requests.get(
'https://gerrit.wikimedia.org/r/changes/?q=bug:{}+project:operations/dns'.format(bug_id))
b = json.loads('\n'.join(r.text.split('\n')[1:]))
if b:
return
maker = DnsPatchMaker(lang, bug_id)
maker.run()
def hande_task(phid): def hande_task(phid):
global final_text global final_text
final_text = '' final_text = ''
task_details = client.taskDetails(phid) task_details = client.taskDetails(phid)
print('Checking T%s' % task_details['id']) print('Checking T%s' % task_details['id'])
task_tid = 'T' + task_details['id']
add_text('\n\n------\n**Pre-install automatic checklist:**') add_text('\n\n------\n**Pre-install automatic checklist:**')
language_code = re.findall(r'\n- *?\*\*Language code:\*\* *?(\S+)', task_details['description']) wiki_spec = {}
for case in re.findall(r'\n- *?\*\*(.+?):\*\* *?(.+)', task_details['description']):
wiki_spec[case[0].strip()] = case[1].strip()
language_code = wiki_spec.get('Language code')
if not language_code: if not language_code:
print('lang code not found, skipping') print('lang code not found, skipping')
return return
language_code = language_code[0] url = wiki_spec.get('Site URL')
url = re.findall(r'\n- *?\*\*Site URL:\*\* *?(\S+)', task_details['description'])
if not url: if not url:
print('url not found, skipping') print('url not found, skipping')
return return
url = url[0]
parts = url.split('.') parts = url.split('.')
if len(parts) != 3 or parts[2] != 'org': if len(parts) != 3 or parts[2] != 'org':
print('the url looks weird, skipping') print('the url looks weird, skipping')
return return
shard = re.findall(r'\n- *?\*\*Shard:\*\* *?(\S+)', task_details['description'])[0] shard = wiki_spec.get('Shard', 'TBD')
shardDecided = shard != "TBD" shardDecided = shard != "TBD"
if shardDecided: if shardDecided:
@ -107,36 +134,31 @@ def hande_task(phid):
add_text(' [] #DBA decided about the shard') add_text(' [] #DBA decided about the shard')
special = parts[1] == 'wikimedia' special = parts[1] == 'wikimedia'
dns_url = gerrit_path + 'operations/dns/+/master/templates/wikimedia.org' if special else gerrit_path + 'operations/dns/+/master/templates/helpers/langlist.tmpl' dns_url = gerrit_path + 'operations/dns/+/master/templates/wikimedia.org' if special else gerrit_path + \
'operations/dns/+/master/templates/helpers/langlist.tmpl'
dns = hostname_resolves(url) dns = hostname_resolves(url)
if not dns: if not dns:
add_text(' [] [[{}|DNS]]'.format(dns_url)) if not special:
if special: create_patch_for_dns(language_code, task_tid)
create_special_wikis_dns_subticket(parts, task_details) add_checklist(dns_url, 'DNS', dns)
else:
create_non_special_wikis_dns_subticket(parts, task_details)
post_a_comment('It seems that there is not DNS entry for this wiki, '
'I am creaing a subticket, Please make a patch.')
else:
add_text(' [x] [[{}|DNS]]'.format(dns_url))
if parts[1] == 'wikipedia': if parts[1] == 'wikipedia':
db_name = parts[0].replace('-', '_') + 'wiki' db_name = parts[0].replace('-', '_') + 'wiki'
else: else:
db_name = parts[0].replace('-', '_') + parts[1] db_name = parts[0].replace('-', '_') + parts[1]
handle_subticket_for_cloud(client.lookupPhid('T251371'), task_details, db_name) if not special and wiki_spec.get('Special', '').lower() != 'yes':
handle_subticket_for_cloud(task_details, db_name)
if special: if special:
apache_url = gerrit_path + 'operations/puppet/+/master/modules/mediawiki/manifests/web/prod_sites.pp' apache_url = gerrit_path + \
'operations/puppet/+/master/modules/mediawiki/manifests/web/prod_sites.pp'
if not handle_special_wiki_apache(parts): if not handle_special_wiki_apache(parts):
apache = False apache = False
add_text(' [] [[{}|Apache config]]'.format(apache_url))
create_apache_config_subticket(parts, task_details)
else: else:
apache = True apache = True
add_text(' [x] [[{}|Apache config]]'.format(apache_url)) add_checklist(apache_url, 'Apache config', apache)
else: else:
apache = True apache = True
add_text(' [x] Apache config (Not needed)') add_text(' [x] Apache config (Not needed)')
@ -145,10 +167,12 @@ def hande_task(phid):
r = requests.get(langdb_url) r = requests.get(langdb_url)
if re.search(r'\n *?' + language_code + ':', r.text): if re.search(r'\n *?' + language_code + ':', r.text):
langdb = True langdb = True
add_text(' [x] [[{}|Language configuration in language data repo]]'.format(langdb_url)) add_text(
' [x] [[{}|Language configuration in language data repo]]'.format(langdb_url))
else: else:
langdb = False langdb = False
add_text(' [] [[{}|Language configuration in language data repo]]'.format(langdb_url)) add_text(
' [] [[{}|Language configuration in language data repo]]'.format(langdb_url))
core_messages_url = 'https://raw.githubusercontent.com/wikimedia/mediawiki/master/languages/messages/Messages{}.php'.format( core_messages_url = 'https://raw.githubusercontent.com/wikimedia/mediawiki/master/languages/messages/Messages{}.php'.format(
language_code[0].upper() + language_code[1:] language_code[0].upper() + language_code[1:]
@ -156,19 +180,23 @@ def hande_task(phid):
r = requests.get(core_messages_url) r = requests.get(core_messages_url)
if r.status_code == 200: if r.status_code == 200:
core_lang = True core_lang = True
add_text(' [x] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url))
else: else:
core_lang = False core_lang = False
add_text(' [] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) add_checklist(core_messages_url,
'Language configuration in mediawiki core', core_lang)
path = 'mediawiki/extensions/WikimediaMessages/+/master/i18n/wikimediaprojectnames/en.json' path = 'mediawiki/extensions/WikimediaMessages/+/master/i18n/wikimediaprojectnames/en.json'
wikimedia_messages_data = get_file_from_gerrit(path) wikimedia_messages_data = get_file_from_gerrit(path)
wikimedia_messages_data = json.loads(wikimedia_messages_data) wikimedia_messages_data = json.loads(wikimedia_messages_data)
if 'project-localized-name-' + db_name in wikimedia_messages_data: if 'project-localized-name-' + db_name in wikimedia_messages_data:
wikimedia_messages_one = True wikimedia_messages_one = True
add_text(' [x] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path))
else: else:
wikimedia_messages_one = False wikimedia_messages_one = False
add_text(' [] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) english_name = wiki_spec.get('Project name (English)')
create_patch_for_wikimedia_messages(
db_name, english_name, url, language_code, task_tid)
add_checklist(gerrit_path + path,
'Wikimedia messages configuration', wikimedia_messages_one)
url = 'https://en.wikipedia.org/wiki/MediaWiki:Project-localized-name-' + db_name url = 'https://en.wikipedia.org/wiki/MediaWiki:Project-localized-name-' + db_name
r = requests.get(url) r = requests.get(url)
if 'Wikipedia does not have a' not in r.text: if 'Wikipedia does not have a' not in r.text:
@ -184,11 +212,10 @@ def hande_task(phid):
search_messages_data = json.loads(search_messages_data) search_messages_data = json.loads(search_messages_data)
if 'search-interwiki-results-' + db_name in search_messages_data: if 'search-interwiki-results-' + db_name in search_messages_data:
wikimedia_messages_two = True wikimedia_messages_two = True
add_text(
' [x] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path))
else: else:
wikimedia_messages_two = False wikimedia_messages_two = False
add_text(' [] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) add_checklist(gerrit_path + path,
'Wikimedia messages (interwiki search result) configuration', wikimedia_messages_two)
url = 'https://en.wikipedia.org/wiki/MediaWiki:Search-interwiki-results-' + db_name url = 'https://en.wikipedia.org/wiki/MediaWiki:Search-interwiki-results-' + db_name
r = requests.get(url) r = requests.get(url)
if 'Wikipedia does not have a' not in r.text: if 'Wikipedia does not have a' not in r.text:
@ -200,7 +227,8 @@ def hande_task(phid):
else: else:
wikimedia_messages_two = True wikimedia_messages_two = True
wikimedia_messages_two_deployed = True wikimedia_messages_two_deployed = True
add_text(' [x] Wikimedia messages (interwiki search result) configuration (not needed)') add_text(
' [x] Wikimedia messages (interwiki search result) configuration (not needed)')
if dns and apache and langdb and core_lang and wikimedia_messages_one and wikimedia_messages_one_deployed and wikimedia_messages_two and wikimedia_messages_two_deployed and shardDecided: if dns and apache and langdb and core_lang and wikimedia_messages_one and wikimedia_messages_one_deployed and wikimedia_messages_two and wikimedia_messages_two_deployed and shardDecided:
add_text('**The Wiki is ready to be created.**') add_text('**The Wiki is ready to be created.**')
@ -211,37 +239,27 @@ def hande_task(phid):
path = 'mediawiki/services/restbase/deploy/+/master/scap/vars.yaml' path = 'mediawiki/services/restbase/deploy/+/master/scap/vars.yaml'
restbase = get_file_from_gerrit(path) restbase = get_file_from_gerrit(path)
if '.'.join(parts) in restbase: add_checklist(gerrit_path + path, 'RESTbase', '.'.join(parts) in restbase)
add_text(' [x] [[{}|RESTbase]]'.format(gerrit_path + path))
else:
add_text(' [] [[{}|RESTbase]]'.format(gerrit_path + path))
path = 'mediawiki/services/cxserver/+/master/config/languages.yaml' path = 'mediawiki/services/cxserver/+/master/config/languages.yaml'
cxconfig = get_file_from_gerrit(path) cxconfig = get_file_from_gerrit(path)
if '\n- ' + language_code in cxconfig: add_checklist(gerrit_path + path, 'CX Config',
add_text(' [x] [[{}|CX Config]]'.format(gerrit_path + path)) '\n- ' + language_code in cxconfig)
else:
add_text(' [] [[{}|CX Config]]'.format(gerrit_path + path))
path = 'analytics/refinery/+/master/static_data/pageview/whitelist/whitelist.tsv' path = 'analytics/refinery/+/master/static_data/pageview/whitelist/whitelist.tsv'
refinery_whitelist = get_file_from_gerrit(path) refinery_whitelist = get_file_from_gerrit(path)
if '.'.join(parts[:2]) in refinery_whitelist: add_checklist(gerrit_path + path, 'Analytics refinery',
add_text(' [x] [[{}|Analytics refinery]]'.format(gerrit_path + path)) '.'.join(parts[:2]) in refinery_whitelist)
else:
add_text(' [] [[{}|Analytics refinery]]'.format(gerrit_path + path))
url = 'pywikibot/core/+/master/pywikibot/families/{}_family.py'.format(parts[1]) url = 'pywikibot/core/+/master/pywikibot/families/{}_family.py'.format(
parts[1])
pywikibot = get_file_from_gerrit(url) pywikibot = get_file_from_gerrit(url)
if "'{}'".format(language_code) in pywikibot: add_checklist(gerrit_path + url, 'Pywikibot',
add_text(' [x] [[{}|Pywikibot]]'.format(gerrit_path + url)) "'{}'".format(language_code) in pywikibot)
else:
add_text(' [] [[{}|Pywikibot]]'.format(gerrit_path + url))
url = 'https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities' url = 'https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities'
wikiata_help_page = requests.get(url).text wikiata_help_page = requests.get(url).text
if db_name in wikiata_help_page: add_checklist(url, 'Wikidata', db_name in wikiata_help_page)
add_text(' [x] [[{}|Wikidata]]'.format(url))
else:
add_text(' [] [[{}|Wikidata]]'.format(url))
add_text(' [] Import from Incubator') add_text(' [] Import from Incubator')
add_text(' [] Clean up old interwiki links') add_text(' [] Clean up old interwiki links')
@ -256,9 +274,11 @@ def hande_task(phid):
add_text('On mwmaint1002:') add_text('On mwmaint1002:')
add_text('`scap pull`') add_text('`scap pull`')
add_text('`mwscript extensions/WikimediaMaintenance/addWiki.php --wiki={dummy} {lang} {family} {db} {url}`'.format( add_text('`mwscript extensions/WikimediaMaintenance/addWiki.php --wiki={dummy} {lang} {family} {db} {url}`'.format(
dummy=dummy_wiki, lang=language_code, family=parts[1], db=db_name, url='.'.join(parts) dummy=dummy_wiki, lang=language_code, family=parts[1], db=db_name, url='.'.join(
parts)
)) ))
summary = 'Creating {db_name} ({phab})'.format(db_name=db_name, phab='T' + task_details['id']) summary = 'Creating {db_name} ({phab})'.format(
db_name=db_name, phab=task_tid)
add_text('On deploy1001:') add_text('On deploy1001:')
if shard != "s3": if shard != "s3":
add_text('`scap sync-file wmf-config/db-eqiad.php "{}"`'.format(summary)) add_text('`scap sync-file wmf-config/db-eqiad.php "{}"`'.format(summary))
@ -266,7 +286,8 @@ def hande_task(phid):
add_text('`scap sync-file dblists "{}"`'.format(summary)) add_text('`scap sync-file dblists "{}"`'.format(summary))
add_text('`scap sync-wikiversions "{}"`'.format(summary)) add_text('`scap sync-wikiversions "{}"`'.format(summary))
if parts[1] == 'wikimedia': if parts[1] == 'wikimedia':
add_text('`scap sync-file multiversion/MWMultiVersion.php "{}"`'.format(summary)) add_text(
'`scap sync-file multiversion/MWMultiVersion.php "{}"`'.format(summary))
add_text('`scap sync-file static/images/project-logos/ "{}"`'.format(summary)) add_text('`scap sync-file static/images/project-logos/ "{}"`'.format(summary))
add_text('`scap sync-file wmf-config/InitialiseSettings.php "{}"`'.format(summary)) add_text('`scap sync-file wmf-config/InitialiseSettings.php "{}"`'.format(summary))
if parts[1] != 'wikimedia': if parts[1] != 'wikimedia':
@ -278,7 +299,8 @@ def hande_task(phid):
task_details['description'], re.DOTALL) task_details['description'], re.DOTALL)
if not old_report: if not old_report:
print('old report not found, appending') print('old report not found, appending')
client.setTaskDescription(task_details['phid'], task_details['description'] + final_text) client.setTaskDescription(
task_details['phid'], task_details['description'] + final_text)
else: else:
if old_report[0] != final_text: if old_report[0] != final_text:
print('Updating old report') print('Updating old report')

82
patch_makers.py Normal file
View file

@ -0,0 +1,82 @@
import json
from gerrit import GerritBot
class WikimediaMessagesPatchMaker(GerritBot):
def __init__(self, db_name, english_name, url, lang, bug_id):
self.db_name = db_name
self.english_name = english_name
self.wiki_url = url
self.wiki_lang = lang
super().__init__(
'mediawiki/extensions/WikimediaMessages',
'Add messages for {} ({})\n\nBug:{}'.format(
english_name, db_name, bug_id)
)
def changes(self):
file_ = 'i18n/wikimediaprojectnames/en.json'
result = self._read_json(file_)
result['project-localized-name-' + self.db_name] = self.english_name
self._write_json(file_, result)
file_ = 'i18n/wikimediaprojectnames/qqq.json'
result = self._read_json(file_)
result['project-localized-name-' + self.db_name] = '{{ProjectNameDocumentation|url=' + \
self.wiki_url + '|name=' + self.english_name + \
'|language=' + self.wiki_lang + '}}'
self._write_json(file_, result)
if not 'wikipedia' in self.wiki_url:
return
file_ = 'i18n/wikimediainterwikisearchresults/en.json'
result = self._read_json(file_)
result['search-interwiki-results-' +
self.db_name] = 'Showing results from [[:{}:|{}]].'.format(self.wiki_lang, self.english_name)
self._write_json(file_, result)
file_ = 'i18n/wikimediainterwikisearchresults/qqq.json'
result = self._read_json(file_)
result['search-interwiki-results-' + self.db_name] = 'Search results description for ' + \
self.english_name + '.\n{{LanguageNameTip|' + self.wiki_lang + '}}'
self._write_json(file_, result)
def _read_json(self, path):
with open(path, 'r') as f:
result = json.load(f)
return result
def _write_json(self, path, content):
with open(path, 'w') as f:
f.write(json.dumps(content, ensure_ascii=False,
indent='\t', sort_keys=True))
def DnsPatchMaker():
def __init__(self, lang, bug_id):
self.wiki_lang = lang
super().__init__(
'operations/dns',
'Add {} to langlist helper\n\nBug:{}'.format(lang, bug_id)
)
def changes(self):
with open('templates/helpers/langlist.tmpl', 'r') as f:
lines = f.read().split('\n')
header = []
langs = []
footer = []
for line in lines:
if not line.startswith(' '):
if not header:
header.append(line)
else:
footer.append(line)
else:
langs.append(line)
langs.append(" '{}',".format(self.wiki_lang))
langs.sort()
with open('templates/helpers/langlist.tmpl', 'w') as f:
f.write('\n'.join(footer) + '\n'.join(langs) + '\n'.join(footer))