diff --git a/cron b/cron index eea1c4a..509e61f 100755 --- a/cron +++ b/cron @@ -1,3 +1,4 @@ 15 * * * * jsub -once -N column_mover python3 /data/project/phabbot/phabbot/column_mover.py /data/project/phabbot/phabbot/creds.json 3600 >/dev/null 2>&1 10 * * * * jsub -once -N patch_for_review python3 /data/project/phabbot/phabbot/patchforreview_remover.py /data/project/phabbot/phabbot/creds.json 3600 >/dev/null 2>&1 5 * * * * jlocal bash /data/project/phabbot/phabbot/updater.sh >/dev/null 2>&1 +10 22 * * * jsub -once -N new_wikis_handler python3 /data/project/phabbot/phabbot/new_wikis_handler.py /data/project/phabbot/phabbot/creds.json 10000000 >/dev/null 2>&1 \ No newline at end of file diff --git a/lib.py b/lib.py index 8b83cb5..61076fe 100755 --- a/lib.py +++ b/lib.py @@ -62,6 +62,15 @@ class Client(object): }] }) + def setTaskDescription(self, task_phid, new_desc): + self.post('maniphest.edit', { + 'objectIdentifier': task_phid, + 'transactions': [{ + 'type': 'description', + 'value': new_desc, + }] + }) + def taskDetails(self, phid): """Lookup details of a Maniphest task.""" r = self.post('maniphest.query', {'phids': [phid]}) @@ -87,8 +96,8 @@ class Client(object): }] }) - def getTasksWithProject(self, project_phid, continue_=None): - r = self._getTasksWithProjectContinue(project_phid, continue_) + def getTasksWithProject(self, project_phid, continue_=None, statuses=None): + r = self._getTasksWithProjectContinue(project_phid, continue_, statuses=statuses) cursor = r['cursor'] for case in r['data']: if case['type'] != 'TASK': @@ -99,7 +108,7 @@ class Client(object): project_phid, cursor['after']): yield case - def _getTasksWithProjectContinue(self, project_phid, continue_=None): + def _getTasksWithProjectContinue(self, project_phid, continue_=None, statuses=None): params = { 'limit': 100, 'constraints': { @@ -109,6 +118,8 @@ class Client(object): } if continue_: params['after'] = continue_ + if statuses: + params['constraints']['statuses'] = statuses return self.post('maniphest.search', params) def getTaskColumns(self, phid): diff --git a/new_wikis_handler.py b/new_wikis_handler.py index fd66758..2552f29 100644 --- a/new_wikis_handler.py +++ b/new_wikis_handler.py @@ -1,12 +1,20 @@ import base64 import json import re -import sys import requests from lib import Client +final_text = '' +gerrit_path = 'https://gerrit.wikimedia.org/g/' +client = Client.newFromCreds() + + +def add_text(a): + global final_text + final_text += a + '\n' + def get_file_from_gerrit(path): gerrit_url = 'https://gerrit.wikimedia.org/g/' @@ -28,7 +36,7 @@ def handle_special_wiki_dns(parts): def handle_special_wiki_apache(parts): - apache_file = get_file_from_gerrit('operations/puppet/+/master/modules/mediawiki/manifests/web/prod_sites.pp') + apache_file = get_file_from_gerrit('operations/puppet/+/production/modules/mediawiki/manifests/web/prod_sites.pp') url = '.'.join(parts) return url in apache_file @@ -59,11 +67,11 @@ def create_apache_config_subticket(parts, task_details): pass -def main(): - print('\n------\n**Pre-install automatic checklist:**') - client = Client.newFromCreds() - gerrit_path = 'https://gerrit.wikimedia.org/g/' - task_details = client.taskDetails(client.lookupPhid(sys.argv[2])) +def hande_task(phid): + global final_text + final_text = '' + add_text('\n\n------\n**Pre-install automatic checklist:**') + task_details = client.taskDetails(phid) language_code = re.findall(r'\n- *?\*\*Language code:\*\* *?(\S+)', task_details['description']) if not language_code: return @@ -85,7 +93,7 @@ def main(): dns_url = gerrit_path + 'operations/dns/+/master/templates/helpers/langlist.tmpl' special = False if not dns: - print(' [] [[{}|DNS]]'.format(dns_url)) + add_text(' [] [[{}|DNS]]'.format(dns_url)) if special: create_special_wikis_dns_subticket(parts, task_details) else: @@ -93,12 +101,12 @@ def main(): post_a_comment('It seems that there is not DNS entry for this wiki, ' 'I am creaing a subticket, Please make a patch.') else: - print(' [x] [[{}|DNS]]'.format(dns_url)) + add_text(' [x] [[{}|DNS]]'.format(dns_url)) if parts[1] == 'wikipedia': - db_name = language_code + 'wiki' + db_name = parts[0] + 'wiki' else: - db_name = language_code + parts[1] + db_name = parts[0] + parts[1] handle_subticket_for_cloud(client.lookupPhid('T251371'), task_details, db_name) @@ -106,23 +114,23 @@ def main(): apache_url = gerrit_path + 'operations/puppet/+/master/modules/mediawiki/manifests/web/prod_sites.pp' if not handle_special_wiki_apache(parts): apache = False - print(' [] [[{}|Apache config]]'.format(apache_url)) + add_text(' [] [[{}|Apache config]]'.format(apache_url)) create_apache_config_subticket(parts, task_details) else: apache = True - print(' [x] [[{}|Apache config]]'.format(apache_url)) + add_text(' [x] [[{}|Apache config]]'.format(apache_url)) else: apache = True - print(' [x] Apache config (Not needed)') + add_text(' [x] Apache config (Not needed)') langdb_url = 'https://raw.githubusercontent.com/wikimedia/language-data/master/data/langdb.yaml' r = requests.get(langdb_url) if re.search(r'\n *?' + language_code + ':', r.text): langdb = True - print(' [x] [[{}|Language configuration in language data repo]]'.format(langdb_url)) + add_text(' [x] [[{}|Language configuration in language data repo]]'.format(langdb_url)) else: langdb = False - print(' [] [[{}|Language configuration in language data repo]]'.format(langdb_url)) + add_text(' [] [[{}|Language configuration in language data repo]]'.format(langdb_url)) core_messages_url = 'https://raw.githubusercontent.com/wikimedia/mediawiki/master/languages/messages/Messages{}.php'.format( language_code[0].upper() + language_code[1:] @@ -130,28 +138,28 @@ def main(): r = requests.get(core_messages_url) if r.status_code == 200: core_lang = True - print(' [x] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) + add_text(' [x] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) else: core_lang = False - print(' [] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) + add_text(' [] [[{}|Language configuration in mediawiki core]]'.format(core_messages_url)) path = 'mediawiki/extensions/WikimediaMessages/+/master/i18n/wikimediaprojectnames/en.json' wikimedia_messages_data = get_file_from_gerrit(path) wikimedia_messages_data = json.loads(wikimedia_messages_data) if 'project-localized-name-' + db_name in wikimedia_messages_data: wikimedia_messages_one = True - print(' [x] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) + add_text(' [x] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) else: wikimedia_messages_one = False - print(' [] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) + add_text(' [] [[{}|Wikimedia messages configuration]]'.format(gerrit_path + path)) url = 'https://en.wikipedia.org/wiki/MediaWiki:Project-localized-name-' + db_name r = requests.get(url) if 'Wikipedia does not have a' not in r.text: wikimedia_messages_one_deployed = True - print(' [x] [[{}|deployed]]'.format(url)) + add_text(' [x] [[{}|deployed]]'.format(url)) else: wikimedia_messages_one_deployed = False - print(' [] [[{}|deployed]]'.format(url)) + add_text(' [] [[{}|deployed]]'.format(url)) if parts[1] == 'wikipedia': path = 'mediawiki/extensions/WikimediaMessages/+/master/i18n/wikimediainterwikisearchresults/en.json' @@ -159,86 +167,113 @@ def main(): search_messages_data = json.loads(search_messages_data) if 'search-interwiki-results-' + db_name in search_messages_data: wikimedia_messages_two = True - print(' [x] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) + add_text( + ' [x] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) else: wikimedia_messages_two = False - print(' [] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) + add_text(' [] [[{}|Wikimedia messages (interwiki search result) configuration]]'.format(gerrit_path + path)) url = 'https://en.wikipedia.org/wiki/MediaWiki:Search-interwiki-results-' + db_name r = requests.get(url) if 'Wikipedia does not have a' not in r.text: wikimedia_messages_two_deployed = True - print(' [x] [[{}|deployed]]'.format(url)) + add_text(' [x] [[{}|deployed]]'.format(url)) else: wikimedia_messages_two_deployed = False - print(' [] [[{}|deployed]]'.format(url)) + add_text(' [] [[{}|deployed]]'.format(url)) else: wikimedia_messages_two = True wikimedia_messages_two_deployed = True - print(' [x] Wikimedia messages (interwiki search result) configuration (not needed)') + add_text(' [x] Wikimedia messages (interwiki search result) configuration (not needed)') if dns and apache and langdb and core_lang and wikimedia_messages_one and wikimedia_messages_one_deployed and wikimedia_messages_two and wikimedia_messages_two_deployed: - print('**The Wiki is ready to be created.**') + add_text('**The Wiki is ready to be created.**') else: - print('**The creation is blocked until these part are all done.**') + add_text('**The creation is blocked until these part are all done.**') - print('\n-------\n**Post install automatic checklist:**') + add_text('\n-------\n**Post install automatic checklist:**') path = 'mediawiki/services/restbase/deploy/+/master/scap/vars.yaml' restbase = get_file_from_gerrit(path) if '.'.join(parts) in restbase: - print(' [x] [[{}|RESTbase]]'.format(gerrit_path + path)) + add_text(' [x] [[{}|RESTbase]]'.format(gerrit_path + path)) else: - print(' [] [[{}|RESTbase]]'.format(gerrit_path + path)) + add_text(' [] [[{}|RESTbase]]'.format(gerrit_path + path)) path = 'mediawiki/services/cxserver/+/master/config/languages.yaml' cxconfig = get_file_from_gerrit(path) if '\n- ' + language_code in cxconfig: - print(' [x] [[{}|CX Config]]'.format(gerrit_path + path)) + add_text(' [x] [[{}|CX Config]]'.format(gerrit_path + path)) else: - print(' [] [[{}|CX Config]]'.format(gerrit_path + path)) + add_text(' [] [[{}|CX Config]]'.format(gerrit_path + path)) path = 'analytics/refinery/+/master/static_data/pageview/whitelist/whitelist.tsv' refinery_whitelist = get_file_from_gerrit(path) if '.'.join(parts[:2]) in refinery_whitelist: - print(' [x] [[{}|Analytics refinery]]'.format(gerrit_path + path)) + add_text(' [x] [[{}|Analytics refinery]]'.format(gerrit_path + path)) else: - print(' [] [[{}|Analytics refinery]]'.format(gerrit_path + path)) + add_text(' [] [[{}|Analytics refinery]]'.format(gerrit_path + path)) url = 'pywikibot/core/+/master/pywikibot/families/{}_family.py'.format(parts[1]) pywikibot = get_file_from_gerrit(url) if f"'{language_code}'" in pywikibot: - print(' [x] [[{}|Pywikibot]]'.format(gerrit_path + url)) + add_text(' [x] [[{}|Pywikibot]]'.format(gerrit_path + url)) else: - print(' [] [[{}|Pywikibot]]'.format(gerrit_path + url)) + add_text(' [] [[{}|Pywikibot]]'.format(gerrit_path + url)) url = 'https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities' wikiata_help_page = requests.get(url).text if db_name in wikiata_help_page: - print(' [x] [[{}|Wikidata]]'.format(url)) + add_text(' [x] [[{}|Wikidata]]'.format(url)) else: - print(' [] [[{}|Wikidata]]'.format(url)) + add_text(' [] [[{}|Wikidata]]'.format(url)) - print(' [] Import from Incubator') - print(' [] Clean up old interwiki links') - print('\n-------') - print('**Step by step commands**:') + add_text(' [] Import from Incubator') + add_text(' [] Clean up old interwiki links') + add_text('\n-------') + add_text('**Step by step commands**:') if parts[1] == 'wiktionary': dummy_wiki = 'aawiktionary' else: dummy_wiki = 'aawiki' - print('On mwmaint1002:') - print('`mwscript extensions/WikimediaMaintenance/addWiki.php --wiki={dummy} {lang} {family} {db} {url}`'.format( + add_text('On deploy1001:') + add_text('`cd /srv/mediawiki-staging/`') + add_text('`git fetch`') + add_text('`git log -p HEAD..@{u}`') + add_text('`git rebase`') + add_text('On mwmaint1002:') + add_text('`scap pull`') + add_text('`mwscript extensions/WikimediaMaintenance/addWiki.php --wiki={dummy} {lang} {family} {db} {url}`'.format( dummy=dummy_wiki, lang=language_code, family=parts[1], db=db_name, url='.'.join(parts) )) - summary = 'Creating {db_name} ({phab})'.format(db_name=db_name, phab=sys.argv[2]) - print('On deploy1001:') - print('`scap sync-file dblists "{}"`'.format(summary)) - print('`scap sync-wikiversions "{}"`'.format(summary)) + summary = 'Creating {db_name} ({phab})'.format(db_name=db_name, phab='T' + task_details['id']) + add_text('On deploy1001:') + add_text('`scap sync-file dblists "{}"`'.format(summary)) + add_text('`scap sync-wikiversions "{}"`'.format(summary)) if parts[1] == 'wikimedia': - print('`scap sync-file multiversion/MWMultiVersion.php "{}"`'.format(summary)) - print('`scap sync-file wmf-config/InitialiseSettings.php "{}"`'.format(summary)) - print('`scap sync-file static/images/project-logos/ "{}"`'.format(summary)) - print('`scap sync-file langlist "{}"`'.format(summary)) - print('`scap update-interwiki-cache`') + add_text('`scap sync-file multiversion/MWMultiVersion.php "{}"`'.format(summary)) + add_text('`scap sync-file wmf-config/InitialiseSettings.php "{}"`'.format(summary)) + add_text('`scap sync-file static/images/project-logos/ "{}"`'.format(summary)) + if parts[1] != 'wikimedia': + add_text('`scap sync-file langlist "{}"`'.format(summary)) + add_text('`scap update-interwiki-cache`') + add_text('\n**End of automatic output**') + old_report = re.findall( + r'(\n\n------\n\*\*Pre-install automatic checklist:\*\*.+?\n\*\*End of automatic output\*\*\n)', + task_details['description'], re.DOTALL) + if not old_report: + print('old report not found, appending') + client.setTaskDescription(task_details['phid'], task_details['description'] + final_text) + else: + if old_report[0] != final_text: + print('Updating old report') + client.setTaskDescription(task_details['phid'], + task_details['description'].replace(old_report[0], final_text)) + + +def main(): + open_create_wikis_phid = 'PHID-PROJ-kmpu7gznmc2edea3qn2x' + for phid in client.getTasksWithProject(open_create_wikis_phid, statuses=['open']): + print('Checking', phid) + hande_task(phid) main()