From 1d784027bbad9f169148beee933d102896eb50ff Mon Sep 17 00:00:00 2001 From: Amir Sarabadani Date: Sat, 15 Aug 2020 19:11:06 +0200 Subject: [PATCH] Create subticket and patch for new wikis Create DNS and Wikimedia Messages patches for the new wikis Most of the gerrit code is copied from library upgrader library: https://gerrit.wikimedia.org/r/plugins/gitiles/labs/libraryupgrader Bug: T253439 --- gerrit.py | 60 +++++++++++----- lib.py | 36 +++++++++- new_wikis_handler.py | 166 ++++++++++++++++++++++++------------------- patch_makers.py | 82 +++++++++++++++++++++ 4 files changed, 254 insertions(+), 90 deletions(-) create mode 100644 patch_makers.py diff --git a/gerrit.py b/gerrit.py index 0fc0700..2942274 100644 --- a/gerrit.py +++ b/gerrit.py @@ -1,18 +1,38 @@ -import requests +""" +Copyright (C) 2019 Kunal Mehta + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see . +""" + import json +import os import subprocess import tempfile import urllib -import os from contextlib import contextmanager +import requests with open('gerrit-creds.json', 'r') as f: creds = json.loads(f.read()) + def load_ssh_key(): mixin = ShellMixin() - mixin.check_call(['ssh-add', '/home/amsa/Phabricator-maintenance-bot/private_key']) + dir_path = os.path.dirname(os.path.realpath(__file__)) + mixin.check_call( + ['ssh-add', os.path.join(dir_path, 'private_key')]) + @contextmanager def cd(dirname): @@ -23,22 +43,23 @@ def cd(dirname): finally: os.chdir(cwd) + def gerrit_url(repo: str, user=None, ssh=False) -> str: if user is not None: - prefix = user + '@' + prefix = user + '@' else: prefix = '' if ssh: - return f'ssh://{prefix}gerrit.wikimedia.org:29418/{repo}' + return 'ssh://{}gerrit.wikimedia.org:29418/{}'.format(prefix, repo) else: - return f'https://{prefix}gerrit.wikimedia.org/r/{repo}.git' + return 'https://{}gerrit.wikimedia.org/r/{}.git'.format(prefix, repo) class ShellMixin: def check_call(self, args: list, stdin='', env=None, ignore_returncode=False) -> str: debug = self.log if hasattr(self, 'log') else print - debug('$ ' + ' '.join(args)) + #debug('$ ' + ' '.join(args)) res = subprocess.run( args, input=stdin.encode(), @@ -46,10 +67,11 @@ class ShellMixin: stderr=subprocess.STDOUT, env=env ) - debug(res.stdout.decode()) + # debug(res.stdout.decode()) if not ignore_returncode: res.check_returncode() return res.stdout.decode() + def clone(self, repo): url = gerrit_url(repo, user=creds['name']) self.check_call(['git', 'clone', url, 'repo', '--depth=1']) @@ -57,7 +79,8 @@ class ShellMixin: self.check_call(['git', 'config', 'user.name', creds['name']]) self.check_call(['git', 'config', 'user.email', creds['email']]) self.check_call(['git', 'submodule', 'update', '--init']) - self.check_call(['scp', '-p', '-P', '29418', creds['name'] + '@gerrit.wikimedia.org:hooks/commit-msg', '.git/hooks/']) + self.check_call(['scp', '-p', '-P', '29418', creds['name'] + + '@gerrit.wikimedia.org:hooks/commit-msg', '.git/hooks/']) def build_push_command(self, options: dict) -> list: per = '%topic=new-wikis-patches' @@ -72,6 +95,7 @@ class ShellMixin: gerrit_url(options['repo'], creds['name'], ssh=True), 'HEAD:refs/for/master' + per] + class GerritBot(ShellMixin): def __init__(self, name, commit_message): self.name = name @@ -84,17 +108,17 @@ class GerritBot(ShellMixin): self.changes() self.commit() - def changes(self): files = [ - 'i18n/wikimediainterwikisearchresults/en.json', - 'i18n/wikimediainterwikisearchresults/qqq.json' + 'i18n/wikimediaprojectnames/en.json', + 'i18n/wikimediaprojectnames/qqq.json' ] for file_ in files: with open(file_, 'r') as f: result = json.load(f) with open(file_, 'w') as f: - f.write(json.dumps(result, ensure_ascii=False, indent='\t', sort_keys=True)) + f.write(json.dumps(result, ensure_ascii=False, + indent='\t', sort_keys=True)) def commit(self): self.check_call(['git', 'add', '.']) @@ -102,7 +126,11 @@ class GerritBot(ShellMixin): f.write(self.commit_message) self.check_call(['git', 'commit', '-F', '.git/COMMIT_EDITMSG']) load_ssh_key() - self.check_call(self.build_push_command({'hashtags': ['automated-wiki-creation'], 'repo': self.name})) + self.check_call(self.build_push_command( + {'hashtags': ['automated-wiki-creation'], 'repo': self.name})) -gerritbot = GerritBot('mediawiki/extensions/WikimediaMessages', "Order entries by alphabetical order\n\nThis would make creating automated patches easier") -gerritbot.run() \ No newline at end of file + +if __name__ == "__main__": + gerritbot = GerritBot('mediawiki/extensions/WikimediaMessages', + "Order entries by alphabetical order\n\nThis would make creating automated patches easier") + gerritbot.run() diff --git a/lib.py b/lib.py index 61076fe..770ab03 100755 --- a/lib.py +++ b/lib.py @@ -1,6 +1,6 @@ import json -import time import sys +import time import requests @@ -71,6 +71,27 @@ class Client(object): }] }) + def createSubtask(self, desc, project_phids, parent_phid, title): + self.post('maniphest.edit', { + 'objectIdentifier': '', + 'transactions': [{ + 'type': 'parent', + 'value': parent_phid + }, + { + 'type': 'title', + 'value': title + }, + { + 'type': 'description', + 'value': desc, + }, + { + 'type': 'projects.add', + 'value': project_phids + }] + }) + def taskDetails(self, phid): """Lookup details of a Maniphest task.""" r = self.post('maniphest.query', {'phids': [phid]}) @@ -97,7 +118,8 @@ class Client(object): }) def getTasksWithProject(self, project_phid, continue_=None, statuses=None): - r = self._getTasksWithProjectContinue(project_phid, continue_, statuses=statuses) + r = self._getTasksWithProjectContinue( + project_phid, continue_, statuses=statuses) cursor = r['cursor'] for case in r['data']: if case['type'] != 'TASK': @@ -133,3 +155,13 @@ class Client(object): } return self.post('maniphest.search', params)[ 'data'][0]['attachments']['columns'] + + def getTaskSubtasks(self, phid): + params = { + "constraints": { + "phids": [phid], + "hasSubtasks": True + } + } + return self.post('maniphest.search', params)[ + 'data'] diff --git a/new_wikis_handler.py b/new_wikis_handler.py index 803251a..9263b2f 100644 --- a/new_wikis_handler.py +++ b/new_wikis_handler.py @@ -6,6 +6,7 @@ import socket import requests from lib import Client +from patch_makers import DnsPatchMaker, WikimediaMessagesPatchMaker final_text = '' gerrit_path = 'https://gerrit.wikimedia.org/g/' @@ -17,6 +18,13 @@ def add_text(a): final_text += a + '\n' +def add_checklist(url, text, checked): + if checked: + add_text(' [x] [[{}|{}]]'.format(url, text)) + else: + add_text(' [] [[{}|{}]]'.format(url, text)) + + def get_file_from_gerrit(path): gerrit_url = 'https://gerrit.wikimedia.org/g/' url = gerrit_url + '{0}?format=TEXT'.format(path) @@ -34,8 +42,10 @@ def hostname_resolves(hostname): return False return True + def handle_special_wiki_apache(parts): - apache_file = get_file_from_gerrit('operations/puppet/+/production/modules/mediawiki/manifests/web/prod_sites.pp') + apache_file = get_file_from_gerrit( + 'operations/puppet/+/production/modules/mediawiki/manifests/web/prod_sites.pp') url = '.'.join(parts) return url in apache_file @@ -46,24 +56,16 @@ def post_a_comment(comment): pass -def create_subticket(text, projects, task_phid): - pass +def handle_subticket_for_cloud(task_details, db_name): + hasSubtasks = client.getTaskSubtasks(task_details['phid']) + if hasSubtasks: + return + client.createSubtask('The new wiki is going to be **public**.', [ + 'PHID-PROJ-hwibeuyzizzy4xzunfsk', # DBA + 'PHID-PROJ-bj6y6ks7ampcwcignhce' # Data services + ], task_details['phid'], 'Prepare and check storage layer for ' + db_name) -def create_non_special_wikis_dns_subticket(parts, task_details): - pass - - -def create_special_wikis_dns_subticket(parts, task_details): - pass - - -def handle_subticket_for_cloud(ticket_phid, task_details, db_name): - pass - - -def create_apache_config_subticket(parts, task_details): - pass def get_dummy_wiki(shard, family): if family == "wiktionary": @@ -78,27 +80,52 @@ def get_dummy_wiki(shard, family): }.get(shard, "?????") +def create_patch_for_wikimedia_messages(db_name, english_name, url, lang, bug_id): + if not english_name: + return + r = requests.get( + 'https://gerrit.wikimedia.org/r/changes/?q=bug:{}+project:mediawiki/extensions/WikimediaMessages'.format(bug_id)) + b = json.loads('\n'.join(r.text.split('\n')[1:])) + if b: + return + maker = WikimediaMessagesPatchMaker( + db_name, english_name, url, lang, bug_id) + maker.run() + + +def create_patch_for_dns(lang, bug_id): + r = requests.get( + 'https://gerrit.wikimedia.org/r/changes/?q=bug:{}+project:operations/dns'.format(bug_id)) + b = json.loads('\n'.join(r.text.split('\n')[1:])) + if b: + return + maker = DnsPatchMaker(lang, bug_id) + maker.run() + + def hande_task(phid): global final_text final_text = '' task_details = client.taskDetails(phid) print('Checking T%s' % task_details['id']) + task_tid = 'T' + task_details['id'] add_text('\n\n------\n**Pre-install automatic checklist:**') - language_code = re.findall(r'\n- *?\*\*Language code:\*\* *?(\S+)', task_details['description']) + wiki_spec = {} + for case in re.findall(r'\n- *?\*\*(.+?):\*\* *?(.+)', task_details['description']): + wiki_spec[case[0].strip()] = case[1].strip() + language_code = wiki_spec.get('Language code') if not language_code: print('lang code not found, skipping') return - language_code = language_code[0] - url = re.findall(r'\n- *?\*\*Site URL:\*\* *?(\S+)', task_details['description']) + url = wiki_spec.get('Site URL') if not url: print('url not found, skipping') return - url = url[0] parts = url.split('.') if len(parts) != 3 or parts[2] != 'org': print('the url looks weird, skipping') return - shard = re.findall(r'\n- *?\*\*Shard:\*\* *?(\S+)', task_details['description'])[0] + shard = wiki_spec.get('Shard', 'TBD') shardDecided = shard != "TBD" if shardDecided: @@ -107,36 +134,31 @@ def hande_task(phid): add_text(' [] #DBA decided about the shard') special = parts[1] == 'wikimedia' - dns_url = gerrit_path + 'operations/dns/+/master/templates/wikimedia.org' if special else gerrit_path + 'operations/dns/+/master/templates/helpers/langlist.tmpl' + dns_url = gerrit_path + 'operations/dns/+/master/templates/wikimedia.org' if special else gerrit_path + \ + 'operations/dns/+/master/templates/helpers/langlist.tmpl' dns = hostname_resolves(url) if not dns: - add_text(' [] [[{}|DNS]]'.format(dns_url)) - if special: - create_special_wikis_dns_subticket(parts, task_details) - else: - create_non_special_wikis_dns_subticket(parts, task_details) - post_a_comment('It seems that there is not DNS entry for this wiki, ' - 'I am creaing a subticket, Please make a patch.') - else: - add_text(' [x] [[{}|DNS]]'.format(dns_url)) + if not special: + create_patch_for_dns(language_code, task_tid) + add_checklist(dns_url, 'DNS', dns) if parts[1] == 'wikipedia': db_name = parts[0].replace('-', '_') + 'wiki' else: db_name = parts[0].replace('-', '_') + parts[1] - handle_subticket_for_cloud(client.lookupPhid('T251371'), task_details, db_name) + if not special and wiki_spec.get('Special', '').lower() != 'yes': + handle_subticket_for_cloud(task_details, db_name) if special: - apache_url = gerrit_path + 'operations/puppet/+/master/modules/mediawiki/manifests/web/prod_sites.pp' + apache_url = gerrit_path + \ + 'operations/puppet/+/master/modules/mediawiki/manifests/web/prod_sites.pp' if not handle_special_wiki_apache(parts): apache = False - add_text(' [] [[{}|Apache config]]'.format(apache_url)) - create_apache_config_subticket(parts, task_details) else: apache = True - add_text(' [x] [[{}|Apache config]]'.format(apache_url)) + add_checklist(apache_url, 'Apache config', apache) else: apache = True add_text(' [x] Apache config (Not needed)') @@ -145,10 +167,12 @@ def hande_task(phid): r = requests.get(langdb_url) if re.search(r'\n *?' + language_code + ':', r.text): langdb = True - add_text(' [x] [[{}|Language configuration in language data repo]]'.format(langdb_url)) + add_text( + ' [x] [[{}|Language configuration in language data repo]]'.format(langdb_url)) else: langdb = False - add_text(' [] [[{}|Language configuration in language data repo]]'.format(langdb_url)) + add_text( + ' [] [[{}|Language configuration in language data repo]]'.format(langdb_url)) core_messages_url = 'https://raw.githubusercontent.com/wikimedia/mediawiki/master/languages/messages/Messages{}.php'.format( language_code[0].upper() + language_code[1:] @@ -156,19 +180,23 @@ def hande_task(phid): r = requests.get(core_messages_url) if r.status_code == 200: core_lang = True - add_text(' [x] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) else: core_lang = False - add_text(' [] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) + add_checklist(core_messages_url, + 'Language configuration in mediawiki core', core_lang) + path = 'mediawiki/extensions/WikimediaMessages/+/master/i18n/wikimediaprojectnames/en.json' wikimedia_messages_data = get_file_from_gerrit(path) wikimedia_messages_data = json.loads(wikimedia_messages_data) if 'project-localized-name-' + db_name in wikimedia_messages_data: wikimedia_messages_one = True - add_text(' [x] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) else: wikimedia_messages_one = False - add_text(' [] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) + english_name = wiki_spec.get('Project name (English)') + create_patch_for_wikimedia_messages( + db_name, english_name, url, language_code, task_tid) + add_checklist(gerrit_path + path, + 'Wikimedia messages configuration', wikimedia_messages_one) url = 'https://en.wikipedia.org/wiki/MediaWiki:Project-localized-name-' + db_name r = requests.get(url) if 'Wikipedia does not have a' not in r.text: @@ -184,11 +212,10 @@ def hande_task(phid): search_messages_data = json.loads(search_messages_data) if 'search-interwiki-results-' + db_name in search_messages_data: wikimedia_messages_two = True - add_text( - ' [x] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) else: wikimedia_messages_two = False - add_text(' [] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) + add_checklist(gerrit_path + path, + 'Wikimedia messages (interwiki search result) configuration', wikimedia_messages_two) url = 'https://en.wikipedia.org/wiki/MediaWiki:Search-interwiki-results-' + db_name r = requests.get(url) if 'Wikipedia does not have a' not in r.text: @@ -200,7 +227,8 @@ def hande_task(phid): else: wikimedia_messages_two = True wikimedia_messages_two_deployed = True - add_text(' [x] Wikimedia messages (interwiki search result) configuration (not needed)') + add_text( + ' [x] Wikimedia messages (interwiki search result) configuration (not needed)') if dns and apache and langdb and core_lang and wikimedia_messages_one and wikimedia_messages_one_deployed and wikimedia_messages_two and wikimedia_messages_two_deployed and shardDecided: add_text('**The Wiki is ready to be created.**') @@ -211,37 +239,27 @@ def hande_task(phid): path = 'mediawiki/services/restbase/deploy/+/master/scap/vars.yaml' restbase = get_file_from_gerrit(path) - if '.'.join(parts) in restbase: - add_text(' [x] [[{}|RESTbase]]'.format(gerrit_path + path)) - else: - add_text(' [] [[{}|RESTbase]]'.format(gerrit_path + path)) + add_checklist(gerrit_path + path, 'RESTbase', '.'.join(parts) in restbase) + path = 'mediawiki/services/cxserver/+/master/config/languages.yaml' cxconfig = get_file_from_gerrit(path) - if '\n- ' + language_code in cxconfig: - add_text(' [x] [[{}|CX Config]]'.format(gerrit_path + path)) - else: - add_text(' [] [[{}|CX Config]]'.format(gerrit_path + path)) + add_checklist(gerrit_path + path, 'CX Config', + '\n- ' + language_code in cxconfig) path = 'analytics/refinery/+/master/static_data/pageview/whitelist/whitelist.tsv' refinery_whitelist = get_file_from_gerrit(path) - if '.'.join(parts[:2]) in refinery_whitelist: - add_text(' [x] [[{}|Analytics refinery]]'.format(gerrit_path + path)) - else: - add_text(' [] [[{}|Analytics refinery]]'.format(gerrit_path + path)) + add_checklist(gerrit_path + path, 'Analytics refinery', + '.'.join(parts[:2]) in refinery_whitelist) - url = 'pywikibot/core/+/master/pywikibot/families/{}_family.py'.format(parts[1]) + url = 'pywikibot/core/+/master/pywikibot/families/{}_family.py'.format( + parts[1]) pywikibot = get_file_from_gerrit(url) - if "'{}'".format(language_code) in pywikibot: - add_text(' [x] [[{}|Pywikibot]]'.format(gerrit_path + url)) - else: - add_text(' [] [[{}|Pywikibot]]'.format(gerrit_path + url)) + add_checklist(gerrit_path + url, 'Pywikibot', + "'{}'".format(language_code) in pywikibot) url = 'https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities' wikiata_help_page = requests.get(url).text - if db_name in wikiata_help_page: - add_text(' [x] [[{}|Wikidata]]'.format(url)) - else: - add_text(' [] [[{}|Wikidata]]'.format(url)) + add_checklist(url, 'Wikidata', db_name in wikiata_help_page) add_text(' [] Import from Incubator') add_text(' [] Clean up old interwiki links') @@ -256,9 +274,11 @@ def hande_task(phid): add_text('On mwmaint1002:') add_text('`scap pull`') add_text('`mwscript extensions/WikimediaMaintenance/addWiki.php --wiki={dummy} {lang} {family} {db} {url}`'.format( - dummy=dummy_wiki, lang=language_code, family=parts[1], db=db_name, url='.'.join(parts) + dummy=dummy_wiki, lang=language_code, family=parts[1], db=db_name, url='.'.join( + parts) )) - summary = 'Creating {db_name} ({phab})'.format(db_name=db_name, phab='T' + task_details['id']) + summary = 'Creating {db_name} ({phab})'.format( + db_name=db_name, phab=task_tid) add_text('On deploy1001:') if shard != "s3": add_text('`scap sync-file wmf-config/db-eqiad.php "{}"`'.format(summary)) @@ -266,7 +286,8 @@ def hande_task(phid): add_text('`scap sync-file dblists "{}"`'.format(summary)) add_text('`scap sync-wikiversions "{}"`'.format(summary)) if parts[1] == 'wikimedia': - add_text('`scap sync-file multiversion/MWMultiVersion.php "{}"`'.format(summary)) + add_text( + '`scap sync-file multiversion/MWMultiVersion.php "{}"`'.format(summary)) add_text('`scap sync-file static/images/project-logos/ "{}"`'.format(summary)) add_text('`scap sync-file wmf-config/InitialiseSettings.php "{}"`'.format(summary)) if parts[1] != 'wikimedia': @@ -278,7 +299,8 @@ def hande_task(phid): task_details['description'], re.DOTALL) if not old_report: print('old report not found, appending') - client.setTaskDescription(task_details['phid'], task_details['description'] + final_text) + client.setTaskDescription( + task_details['phid'], task_details['description'] + final_text) else: if old_report[0] != final_text: print('Updating old report') diff --git a/patch_makers.py b/patch_makers.py new file mode 100644 index 0000000..55ebe00 --- /dev/null +++ b/patch_makers.py @@ -0,0 +1,82 @@ +import json + +from gerrit import GerritBot + + +class WikimediaMessagesPatchMaker(GerritBot): + def __init__(self, db_name, english_name, url, lang, bug_id): + self.db_name = db_name + self.english_name = english_name + self.wiki_url = url + self.wiki_lang = lang + super().__init__( + 'mediawiki/extensions/WikimediaMessages', + 'Add messages for {} ({})\n\nBug:{}'.format( + english_name, db_name, bug_id) + ) + + def changes(self): + file_ = 'i18n/wikimediaprojectnames/en.json' + result = self._read_json(file_) + result['project-localized-name-' + self.db_name] = self.english_name + self._write_json(file_, result) + + file_ = 'i18n/wikimediaprojectnames/qqq.json' + result = self._read_json(file_) + result['project-localized-name-' + self.db_name] = '{{ProjectNameDocumentation|url=' + \ + self.wiki_url + '|name=' + self.english_name + \ + '|language=' + self.wiki_lang + '}}' + self._write_json(file_, result) + + if not 'wikipedia' in self.wiki_url: + return + + file_ = 'i18n/wikimediainterwikisearchresults/en.json' + result = self._read_json(file_) + result['search-interwiki-results-' + + self.db_name] = 'Showing results from [[:{}:|{}]].'.format(self.wiki_lang, self.english_name) + self._write_json(file_, result) + + file_ = 'i18n/wikimediainterwikisearchresults/qqq.json' + result = self._read_json(file_) + result['search-interwiki-results-' + self.db_name] = 'Search results description for ' + \ + self.english_name + '.\n{{LanguageNameTip|' + self.wiki_lang + '}}' + self._write_json(file_, result) + + def _read_json(self, path): + with open(path, 'r') as f: + result = json.load(f) + return result + + def _write_json(self, path, content): + with open(path, 'w') as f: + f.write(json.dumps(content, ensure_ascii=False, + indent='\t', sort_keys=True)) + + +def DnsPatchMaker(): + def __init__(self, lang, bug_id): + self.wiki_lang = lang + super().__init__( + 'operations/dns', + 'Add {} to langlist helper\n\nBug:{}'.format(lang, bug_id) + ) + + def changes(self): + with open('templates/helpers/langlist.tmpl', 'r') as f: + lines = f.read().split('\n') + header = [] + langs = [] + footer = [] + for line in lines: + if not line.startswith(' '): + if not header: + header.append(line) + else: + footer.append(line) + else: + langs.append(line) + langs.append(" '{}',".format(self.wiki_lang)) + langs.sort() + with open('templates/helpers/langlist.tmpl', 'w') as f: + f.write('\n'.join(footer) + '\n'.join(langs) + '\n'.join(footer))