mirror of
https://gitlab.wikimedia.org/ladsgroup/Phabricator-maintenance-bot
synced 2024-11-22 03:52:37 +01:00
Add creating parent tickets for new wiki creations
Bug: T253439
This commit is contained in:
parent
b72da2b58f
commit
6929f56b9b
1 changed files with 34 additions and 15 deletions
|
@ -54,17 +54,21 @@ def hostname_resolves(hostname):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def handle_restbase(url, phid):
|
def handle_restbase(url, phid, create_tickets, db_name):
|
||||||
path = get_gerrit_path(
|
path = get_gerrit_path(
|
||||||
'mediawiki/services/restbase/deploy',
|
'mediawiki/services/restbase/deploy',
|
||||||
'scap/vars.yaml'
|
'scap/vars.yaml'
|
||||||
)
|
)
|
||||||
restbase = get_file_from_gerrit(path)
|
restbase = get_file_from_gerrit(path)
|
||||||
add_checklist(gerrit_path + path, 'RESTbase', url in restbase)
|
add_checklist(gerrit_path + path, 'RESTbase', url in restbase)
|
||||||
if url not in restbase:
|
if url in restbase:
|
||||||
print(client.getTaskParents(phid))
|
return
|
||||||
import sys
|
if create_tickets:
|
||||||
sys.exit()
|
client.createParentTask(
|
||||||
|
'Per https://wikitech.wikimedia.org/wiki/Add_a_wiki once the wiki has been created',
|
||||||
|
['PHID-PROJ-mszihytuo3ij3fcxcxgm'],
|
||||||
|
phid,
|
||||||
|
'Add {} to RESTBase'.format(db_name))
|
||||||
|
|
||||||
|
|
||||||
def handle_cx(language_code, bug_id):
|
def handle_cx(language_code, bug_id):
|
||||||
|
@ -89,7 +93,6 @@ def handle_cx(language_code, bug_id):
|
||||||
|
|
||||||
|
|
||||||
def handle_analytics(url, bug_id):
|
def handle_analytics(url, bug_id):
|
||||||
return
|
|
||||||
path = get_gerrit_path(
|
path = get_gerrit_path(
|
||||||
'analytics/refinery',
|
'analytics/refinery',
|
||||||
'static_data/pageview/whitelist/whitelist.tsv'
|
'static_data/pageview/whitelist/whitelist.tsv'
|
||||||
|
@ -110,7 +113,7 @@ def handle_analytics(url, bug_id):
|
||||||
maker.run()
|
maker.run()
|
||||||
|
|
||||||
|
|
||||||
def handle_pywikibot(family, language_code):
|
def handle_pywikibot(family, language_code, create_tickets, db_name, phid):
|
||||||
path = get_gerrit_path(
|
path = get_gerrit_path(
|
||||||
'pywikibot/core',
|
'pywikibot/core',
|
||||||
'pywikibot/families/{}_family.py'.format(family)
|
'pywikibot/families/{}_family.py'.format(family)
|
||||||
|
@ -118,15 +121,27 @@ def handle_pywikibot(family, language_code):
|
||||||
pywikibot = get_file_from_gerrit(path)
|
pywikibot = get_file_from_gerrit(path)
|
||||||
add_checklist(gerrit_path + path, 'Pywikibot',
|
add_checklist(gerrit_path + path, 'Pywikibot',
|
||||||
"'{}'".format(language_code) in pywikibot)
|
"'{}'".format(language_code) in pywikibot)
|
||||||
|
if create_tickets:
|
||||||
|
client.createParentTask(
|
||||||
|
'Per https://wikitech.wikimedia.org/wiki/Add_a_wiki once the wiki has been created',
|
||||||
|
['PHID-PROJ-orw42whe2lepxc7gghdq'],
|
||||||
|
phid,
|
||||||
|
'Add support for {} to Pywikibot'.format(db_name))
|
||||||
|
|
||||||
|
|
||||||
def handle_wikidata(db_name):
|
def handle_wikidata(db_name, create_tickets, phid):
|
||||||
url = 'https://www.wikidata.org/w/api.php'
|
url = 'https://www.wikidata.org/w/api.php'
|
||||||
wikiata_help_page = requests.get(url, params={
|
wikiata_help_page = requests.get(url, params={
|
||||||
'action': 'help',
|
'action': 'help',
|
||||||
'modules': 'wbgetentities'
|
'modules': 'wbgetentities'
|
||||||
}).text
|
}).text
|
||||||
add_checklist(url, 'Wikidata', db_name in wikiata_help_page)
|
add_checklist(url, 'Wikidata', db_name in wikiata_help_page)
|
||||||
|
if create_tickets:
|
||||||
|
client.createParentTask(
|
||||||
|
'Per https://wikitech.wikimedia.org/wiki/Add_a_wiki once the wiki has been created',
|
||||||
|
['PHID-PROJ-egbmgxclscgwu2rbnotm', 'PHID-PROJ-7ocjej2gottz7cikkdc6'],
|
||||||
|
phid,
|
||||||
|
'Add Wikidata support for {}'.format(db_name))
|
||||||
|
|
||||||
|
|
||||||
def handle_special_wiki_apache(parts):
|
def handle_special_wiki_apache(parts):
|
||||||
|
@ -189,8 +204,11 @@ def handle_dns(special, url, language_code, task_tid):
|
||||||
'templates/helpers/langlist.tmpl')
|
'templates/helpers/langlist.tmpl')
|
||||||
dns_url = gerrit_path + dns_path
|
dns_url = gerrit_path + dns_path
|
||||||
dns = hostname_resolves(url)
|
dns = hostname_resolves(url)
|
||||||
|
print(url)
|
||||||
if not dns:
|
if not dns:
|
||||||
|
print('dns not found')
|
||||||
if not special:
|
if not special:
|
||||||
|
print('not special')
|
||||||
create_patch_for_dns(language_code, task_tid)
|
create_patch_for_dns(language_code, task_tid)
|
||||||
add_checklist(dns_url, 'DNS', dns)
|
add_checklist(dns_url, 'DNS', dns)
|
||||||
return dns
|
return dns
|
||||||
|
@ -203,7 +221,7 @@ def handle_apache(special, parts):
|
||||||
|
|
||||||
file_path = 'modules/mediawiki/manifests/web/prod_sites.pp'
|
file_path = 'modules/mediawiki/manifests/web/prod_sites.pp'
|
||||||
apache_url = gerrit_path + \
|
apache_url = gerrit_path + \
|
||||||
'operations/puppet/+/production/' + file_path
|
'operations/puppet/+/production/' + file_path
|
||||||
if not handle_special_wiki_apache(parts):
|
if not handle_special_wiki_apache(parts):
|
||||||
apache = False
|
apache = False
|
||||||
else:
|
else:
|
||||||
|
@ -246,7 +264,7 @@ def handle_wikimedia_messages_one(
|
||||||
add_checklist(gerrit_path + path,
|
add_checklist(gerrit_path + path,
|
||||||
'Wikimedia messages configuration', wikimedia_messages_one)
|
'Wikimedia messages configuration', wikimedia_messages_one)
|
||||||
url = 'https://en.wikipedia.org/wiki/' + \
|
url = 'https://en.wikipedia.org/wiki/' + \
|
||||||
'MediaWiki:Project-localized-name-' + db_name
|
'MediaWiki:Project-localized-name-' + db_name
|
||||||
r = requests.get(url)
|
r = requests.get(url)
|
||||||
if 'Wikipedia does not have a' not in r.text:
|
if 'Wikipedia does not have a' not in r.text:
|
||||||
wikimedia_messages_one_deployed = True
|
wikimedia_messages_one_deployed = True
|
||||||
|
@ -278,7 +296,7 @@ def handle_wikimedia_messages_two(db_name, parts):
|
||||||
config,
|
config,
|
||||||
wikimedia_messages_two)
|
wikimedia_messages_two)
|
||||||
url = 'https://en.wikipedia.org/wiki/' + \
|
url = 'https://en.wikipedia.org/wiki/' + \
|
||||||
'MediaWiki:Search-interwiki-results-' + db_name
|
'MediaWiki:Search-interwiki-results-' + db_name
|
||||||
r = requests.get(url)
|
r = requests.get(url)
|
||||||
if 'Wikipedia does not have a' not in r.text:
|
if 'Wikipedia does not have a' not in r.text:
|
||||||
wikimedia_messages_two_deployed = True
|
wikimedia_messages_two_deployed = True
|
||||||
|
@ -401,7 +419,7 @@ def hande_task(task_details):
|
||||||
# Extract wiki config
|
# Extract wiki config
|
||||||
wiki_spec = {}
|
wiki_spec = {}
|
||||||
for case in re.findall(
|
for case in re.findall(
|
||||||
r'\n- *?\*\*(.+?):\*\* *?(.+)',
|
r'\n- *?\*\*(.+?):\*\* *?(.+)',
|
||||||
task_details['description']):
|
task_details['description']):
|
||||||
wiki_spec[case[0].strip()] = case[1].strip()
|
wiki_spec[case[0].strip()] = case[1].strip()
|
||||||
language_code = wiki_spec.get('Language code')
|
language_code = wiki_spec.get('Language code')
|
||||||
|
@ -446,11 +464,12 @@ def hande_task(task_details):
|
||||||
|
|
||||||
if visibility.lower() != 'private':
|
if visibility.lower() != 'private':
|
||||||
add_text('\n-------\n**Post install automatic checklist:**')
|
add_text('\n-------\n**Post install automatic checklist:**')
|
||||||
handle_restbase(url, task_details['phid'])
|
create_tickets = client.getTaskParents(task_details['phid'])
|
||||||
|
handle_restbase(url, task_details['phid'], not create_tickets, db_name)
|
||||||
handle_cx(language_code, task_tid)
|
handle_cx(language_code, task_tid)
|
||||||
handle_analytics('.'.join(parts[:2]), task_tid)
|
handle_analytics('.'.join(parts[:2]), task_tid)
|
||||||
handle_pywikibot(parts[1], language_code)
|
handle_pywikibot(parts[1], language_code, not create_tickets, db_name, task_details['phid'])
|
||||||
handle_wikidata(db_name)
|
handle_wikidata(db_name, not create_tickets, task_details['phid'])
|
||||||
add_text(' [] Import from Incubator')
|
add_text(' [] Import from Incubator')
|
||||||
add_text(' [] Clean up old interwiki links')
|
add_text(' [] Clean up old interwiki links')
|
||||||
add_create_instructions(parts, shard, language_code, db_name, task_tid)
|
add_create_instructions(parts, shard, language_code, db_name, task_tid)
|
||||||
|
|
Loading…
Reference in a new issue