2020-05-19 11:16:25 +02:00
import base64
import json
import re
2020-07-10 22:44:29 +02:00
import socket
2020-05-19 11:16:25 +02:00
import requests
from lib import Client
2020-08-29 23:51:34 +02:00
from patch_makers import ( AnalyticsPatchMaker , CxPatchMaker , DnsPatchMaker ,
WikimediaMessagesPatchMaker )
2020-05-19 11:16:25 +02:00
2020-05-30 16:51:55 +02:00
final_text = ' '
gerrit_path = ' https://gerrit.wikimedia.org/g/ '
client = Client . newFromCreds ( )
2021-07-24 09:01:53 +02:00
def get_checklist_text ( url , text , checked ) :
2020-08-15 19:11:06 +02:00
if checked :
2021-07-24 09:01:53 +02:00
return ' [x] [[ {} | {} ]] ' . format ( url , text )
2020-08-15 19:11:06 +02:00
else :
2021-07-24 09:01:53 +02:00
return ' [] [[ {} | {} ]] ' . format ( url , text )
2020-08-15 19:11:06 +02:00
2020-05-19 11:16:25 +02:00
def get_file_from_gerrit ( path ) :
gerrit_url = ' https://gerrit.wikimedia.org/g/ '
url = gerrit_url + ' {0} ?format=TEXT ' . format ( path )
2020-07-14 16:43:18 +02:00
r = requests . get ( url )
if r . status_code == 200 :
return base64 . b64decode ( r . text ) . decode ( ' utf-8 ' )
else :
return ' '
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def get_gerrit_path ( repo , filename ) :
return repo + ' /+/master/ ' + filename
def get_github_url ( repo , filename ) :
return ' https://raw.githubusercontent.com/wikimedia/ {} /master/ {} ' . format (
repo , filename
)
2021-07-24 09:01:53 +02:00
class PostCreationHandler ( object ) :
def __init__ ( self , phid , db_name , url , language_code , parts ) :
self . main_pid = phid
self . db_name = db_name
self . url = url
self . parts = parts
self . language_code = language_code
self . post_ticket_bug_id = ' '
self . post_ticket_text = ' '
self . checkers = [
self . _check_restbase ,
self . _check_cx ,
self . _check_analytics ,
self . _check_pywikibot ,
self . _check_wikidata ,
]
self . handlers = [
self . _handle_restbase ,
self . _handle_cx ,
self . _handle_analytics ,
self . _handle_pywikibot ,
self . _handle_wikidata ,
self . _handle_wikistats ,
]
self . handlers_needed = { }
def handle ( self ) :
for checker in self . checkers :
checker ( )
2023-10-15 22:30:16 +02:00
self . add_text ( ' [] Namespaces ' )
self . add_text ( ' [] Logos and wordmarks ' )
2021-07-24 09:01:53 +02:00
self . add_text ( ' [] Import from Incubator ' )
self . add_text ( ' [] Clean up old interwiki links ' )
2023-02-02 02:30:59 +01:00
self . add_text ( ' [] For content wikis: [[ https://meta.wikimedia.org/wiki/Stewards % 27_noticeboard | ask the stewards ]] to add the wiki to the global bot policy wikiset ' )
self . add_text ( ' [] Add the wiki to a CVNBot for SWMT monitoring ' )
2021-07-24 09:01:53 +02:00
self . _create_ticket ( )
for handler in self . handlers :
handler ( )
def add_text ( self , a ) :
self . post_ticket_text + = a + ' \n '
def add_checklist ( self , url , text , checked ) :
self . add_text ( get_checklist_text ( url , text , checked ) )
def _create_ticket ( self ) :
result = client . createParentTask (
self . post_ticket_text ,
2022-04-05 11:59:54 +02:00
[
' PHID-PROJ-2fuv7mxzjnpjfuojdnfd ' , # wiki-setup
' PHID-PROJ-2b7oz62ylk3jk4aus262 ' , # platform-engineering
2023-02-02 02:30:59 +01:00
' PHID-PROJ-flkea3bsbxquupwv5g2s ' , # countervandalism-network
2022-04-05 11:59:54 +02:00
] ,
2021-07-24 09:01:53 +02:00
self . main_pid ,
' Post-creation work for {} ' . format ( self . db_name ) ) [ ' object ' ]
self . post_ticket_phid = result [ ' phid ' ]
2021-09-29 16:50:35 +02:00
self . post_ticket_bug_id = ' T ' + str ( result [ ' id ' ] )
2021-07-24 09:01:53 +02:00
def _check_restbase ( self ) :
path = get_gerrit_path (
' mediawiki/services/restbase/deploy ' ,
' scap/vars.yaml '
)
restbase = get_file_from_gerrit ( path )
self . add_checklist ( gerrit_path + path , ' RESTbase ' , self . url in restbase )
2022-03-27 01:42:46 +01:00
self . handlers_needed [ ' restbase ' ] = self . url not in restbase
2021-07-24 09:01:53 +02:00
def _handle_restbase ( self ) :
if not self . handlers_needed [ ' restbase ' ] :
return
client . createSubtask (
2020-10-08 04:39:10 +02:00
' Per https://wikitech.wikimedia.org/wiki/Add_a_wiki once the wiki has been created ' ,
[ ' PHID-PROJ-mszihytuo3ij3fcxcxgm ' ] ,
2021-07-24 09:01:53 +02:00
self . post_ticket_phid ,
' Add {} to RESTBase ' . format ( self . db_name ) )
def _check_cx ( self ) :
path = get_gerrit_path (
' mediawiki/services/cxserver ' ,
' config/languages.yaml '
)
cxconfig = get_file_from_gerrit ( path )
cx = ' \n - ' + self . language_code in cxconfig
self . add_checklist ( gerrit_path + path , ' CX Config ' , cx )
self . handlers_needed [ ' cx ' ] = not cx
def _handle_cx ( self ) :
if not self . handlers_needed [ ' cx ' ] :
return
r = requests . get (
' https://gerrit.wikimedia.org/r/changes/ '
' ?q=bug: {} +project:mediawiki/services/cxserver ' . format ( self . post_ticket_bug_id ) )
b = json . loads ( ' \n ' . join ( r . text . split ( ' \n ' ) [ 1 : ] ) )
if b :
return
maker = CxPatchMaker ( self . language_code , self . post_ticket_bug_id )
maker . run ( )
def _check_analytics ( self ) :
path = get_gerrit_path (
' analytics/refinery ' ,
2023-03-25 12:52:27 +01:00
' static_data/pageview/allowlist/allowlist.tsv '
2021-07-24 09:01:53 +02:00
)
url = ' . ' . join ( self . parts [ : 2 ] )
refinery_whitelist = get_file_from_gerrit ( path )
self . add_checklist ( gerrit_path + path , ' Analytics refinery ' ,
url in refinery_whitelist )
self . handlers_needed [ ' analytics ' ] = url not in refinery_whitelist
def _handle_analytics ( self ) :
if not self . handlers_needed [ ' analytics ' ] :
return
url = ' . ' . join ( self . parts [ : 2 ] )
r = requests . get (
' https://gerrit.wikimedia.org/r/changes/ '
' ?q=bug: {} +project:analytics/refinery ' . format ( self . post_ticket_bug_id ) )
b = json . loads ( ' \n ' . join ( r . text . split ( ' \n ' ) [ 1 : ] ) )
if b :
return
maker = AnalyticsPatchMaker ( url , self . post_ticket_bug_id )
maker . run ( )
def _check_pywikibot ( self ) :
path = get_gerrit_path (
' pywikibot/core ' ,
' pywikibot/families/ {} _family.py ' . format ( self . parts [ 1 ] )
)
pywikibot = get_file_from_gerrit ( path )
self . add_checklist ( gerrit_path + path , ' Pywikibot ' ,
" ' {} ' " . format ( self . language_code ) in pywikibot )
2022-03-27 01:42:46 +01:00
self . handlers_needed [ ' pywikibot ' ] = " ' {} ' " . format ( self . language_code ) not in pywikibot
2021-07-24 09:01:53 +02:00
def _handle_pywikibot ( self ) :
2022-03-27 01:42:46 +01:00
if not self . handlers_needed [ ' pywikibot ' ] :
return
2021-07-24 09:01:53 +02:00
client . createSubtask (
' Per https://wikitech.wikimedia.org/wiki/Add_a_wiki once the wiki has been created ' ,
[ ' PHID-PROJ-orw42whe2lepxc7gghdq ' ] ,
self . post_ticket_phid ,
' Add support for {} to Pywikibot ' . format ( self . db_name ) )
def _check_wikidata ( self ) :
url = ' https://www.wikidata.org/w/api.php '
2022-03-25 21:04:10 +01:00
wikidata_help_page = requests . get ( url , params = {
2021-07-24 09:01:53 +02:00
' action ' : ' help ' ,
' modules ' : ' wbgetentities '
} ) . text
2022-03-25 21:04:10 +01:00
self . add_checklist ( url , ' Wikidata ' , self . db_name in wikidata_help_page )
2021-07-24 09:01:53 +02:00
def _handle_wikidata ( self ) :
client . createSubtask (
' Per https://wikitech.wikimedia.org/wiki/Add_a_wiki once the wiki has been created ' ,
[ ' PHID-PROJ-egbmgxclscgwu2rbnotm ' , ' PHID-PROJ-7ocjej2gottz7cikkdc6 ' ] ,
self . post_ticket_phid ,
' Add Wikidata support for {} ' . format ( self . db_name ) )
2020-08-29 21:53:23 +02:00
2021-07-24 09:01:53 +02:00
def _handle_wikistats ( self ) :
client . createSubtask ( " Please add new wiki ` %s ` to Wikistats, once it is created. Thanks! " % self . db_name , [
' PHID-PROJ-6sht6g4xpdii4c4bga2i ' # VPS-project-Wikistats
] , self . post_ticket_phid , ' Add %s to wikistats ' % self . db_name )
2020-08-29 21:53:23 +02:00
2020-08-29 23:51:34 +02:00
2021-07-24 09:01:53 +02:00
def add_text ( a ) :
global final_text
final_text + = a + ' \n '
2020-08-29 23:51:34 +02:00
2021-07-24 09:01:53 +02:00
def add_checklist ( url , text , checked ) :
add_text ( get_checklist_text ( url , text , checked ) )
2020-08-29 21:53:23 +02:00
2021-07-24 09:01:53 +02:00
def hostname_resolves ( hostname ) :
try :
socket . gethostbyname ( hostname )
except socket . error :
return False
return True
2020-08-29 21:53:23 +02:00
2020-05-19 11:16:25 +02:00
def handle_special_wiki_apache ( parts ) :
2022-11-13 19:20:35 +01:00
file_path = ' hieradata/common/mediawiki.yaml '
2020-08-15 19:11:06 +02:00
apache_file = get_file_from_gerrit (
2020-08-29 21:53:23 +02:00
' operations/puppet/+/production/ ' + file_path )
2020-05-19 11:16:25 +02:00
url = ' . ' . join ( parts )
return url in apache_file
def post_a_comment ( comment ) :
comment = ' Hello, I am helping on creating this wiki. ' + comment + \
' ^_^ Sincerely, your Fully Automated Resource Tackler '
pass
2020-09-29 14:12:05 +02:00
def handle_subticket_for_cloud ( task_details , db_name , wiki_status ) :
2020-08-15 19:11:06 +02:00
hasSubtasks = client . getTaskSubtasks ( task_details [ ' phid ' ] )
if hasSubtasks :
return
2020-05-19 11:16:25 +02:00
2020-09-14 22:28:17 +02:00
client . createSubtask ( " The new wiki ' s visibility will be: ** %s **. " % wiki_status , [
2020-08-15 19:11:06 +02:00
' PHID-PROJ-hwibeuyzizzy4xzunfsk ' , # DBA
' PHID-PROJ-bj6y6ks7ampcwcignhce ' # Data services
] , task_details [ ' phid ' ] , ' Prepare and check storage layer for ' + db_name )
2020-05-19 11:16:25 +02:00
2020-12-19 17:40:27 +01:00
def handle_ticket_for_wikistats ( task_details , db_name ) :
2020-12-20 17:30:28 +01:00
client . createParentTask ( " Please add new wiki ` %s ` to Wikistats, once it is created. Thanks! " % db_name , [
2020-12-18 01:35:15 +01:00
' PHID-PROJ-6sht6g4xpdii4c4bga2i ' # VPS-project-Wikistats
] , task_details [ ' phid ' ] , ' Add %s to wikistats ' % db_name )
2020-05-19 11:16:25 +02:00
2020-08-02 18:45:41 +02:00
def get_dummy_wiki ( shard , family ) :
if family == " wiktionary " :
return {
2020-08-11 10:49:08 +02:00
" s3 " : " aawiki " ,
" s5 " : " mhwiktionary " ,
2020-08-02 18:45:41 +02:00
} . get ( shard , " ????? " )
else :
return {
2020-08-02 18:48:16 +02:00
" s3 " : " aawiki " ,
2020-08-11 10:49:08 +02:00
" s5 " : " muswiki "
2020-08-02 18:45:41 +02:00
} . get ( shard , " ????? " )
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def create_patch_for_wikimedia_messages (
db_name , english_name , url , lang , bug_id ) :
2020-08-15 19:11:06 +02:00
if not english_name :
return
r = requests . get (
2020-08-29 21:53:23 +02:00
' https://gerrit.wikimedia.org/r/changes/?q= '
' bug: {} +project:mediawiki/extensions/WikimediaMessages ' . format ( bug_id ) )
2020-08-15 19:11:06 +02:00
b = json . loads ( ' \n ' . join ( r . text . split ( ' \n ' ) [ 1 : ] ) )
if b :
return
maker = WikimediaMessagesPatchMaker (
db_name , english_name , url , lang , bug_id )
maker . run ( )
2020-08-29 21:53:23 +02:00
def handle_dns ( special , url , language_code , task_tid ) :
dns_path = get_gerrit_path (
' operations/dns ' ,
' templates/wikimedia.org ' if special else
' templates/helpers/langlist.tmpl ' )
dns_url = gerrit_path + dns_path
2020-07-10 22:44:29 +02:00
dns = hostname_resolves ( url )
2020-10-08 04:39:10 +02:00
print ( url )
2020-05-19 11:16:25 +02:00
if not dns :
2020-10-08 04:39:10 +02:00
print ( ' dns not found ' )
2020-08-15 19:11:06 +02:00
if not special :
2020-10-08 04:39:10 +02:00
print ( ' not special ' )
2020-08-15 19:11:06 +02:00
create_patch_for_dns ( language_code , task_tid )
add_checklist ( dns_url , ' DNS ' , dns )
2020-08-29 21:53:23 +02:00
return dns
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def handle_apache ( special , parts ) :
if not special :
add_text ( ' [x] Apache config (Not needed) ' )
return True
2020-05-19 11:16:25 +02:00
2022-11-13 19:20:35 +01:00
file_path = ' hieradata/common/mediawiki.yaml '
2020-08-29 21:53:23 +02:00
apache_url = gerrit_path + \
2020-10-08 04:39:10 +02:00
' operations/puppet/+/production/ ' + file_path
2020-08-29 21:53:23 +02:00
if not handle_special_wiki_apache ( parts ) :
apache = False
2020-05-19 11:16:25 +02:00
else :
apache = True
2020-08-29 21:53:23 +02:00
add_checklist ( apache_url , ' Apache config ' , apache )
return apache
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def handle_langdb ( language_code ) :
langdb_url = get_github_url ( ' language-data ' , ' data/langdb.yaml ' )
2020-05-19 11:16:25 +02:00
r = requests . get ( langdb_url )
2020-08-29 21:53:23 +02:00
config = ' Language configuration in language data repo '
2020-05-19 11:16:25 +02:00
if re . search ( r ' \ n *? ' + language_code + ' : ' , r . text ) :
langdb = True
else :
langdb = False
2020-08-29 21:53:23 +02:00
add_checklist ( langdb_url , config , langdb )
return langdb
def handle_wikimedia_messages_one (
db_name ,
wiki_spec ,
url ,
language_code ,
task_tid ) :
path = get_gerrit_path (
' mediawiki/extensions/WikimediaMessages ' ,
' i18n/wikimediaprojectnames/en.json '
2020-05-19 11:16:25 +02:00
)
wikimedia_messages_data = get_file_from_gerrit ( path )
wikimedia_messages_data = json . loads ( wikimedia_messages_data )
2023-10-15 22:30:16 +02:00
if not ' project-localized-name- ' + db_name in wikimedia_messages_data :
2020-08-15 19:11:06 +02:00
english_name = wiki_spec . get ( ' Project name (English) ' )
create_patch_for_wikimedia_messages (
db_name , english_name , url , language_code , task_tid )
add_checklist ( gerrit_path + path ,
2023-10-15 22:30:16 +02:00
' Wikimedia messages configuration (optional) ' , True )
2020-08-29 21:53:23 +02:00
url = ' https://en.wikipedia.org/wiki/ ' + \
2020-10-08 04:39:10 +02:00
' MediaWiki:Project-localized-name- ' + db_name
2020-05-19 11:16:25 +02:00
r = requests . get ( url )
if ' Wikipedia does not have a ' not in r . text :
2020-05-30 16:51:55 +02:00
add_text ( ' [x] [[ {} |deployed]] ' . format ( url ) )
2020-05-19 11:16:25 +02:00
else :
2023-10-15 22:30:16 +02:00
add_text ( ' [x] [[ {} |deployed]] ' . format ( url ) )
2020-05-19 11:16:25 +02:00
2023-10-15 22:30:16 +02:00
return True
2020-08-29 21:53:23 +02:00
def handle_wikimedia_messages_two ( db_name , parts ) :
config = ' Wikimedia messages (interwiki search result) configuration '
if parts [ 1 ] != ' wikipedia ' :
add_text ( ' [x] {} (not needed) ' . format ( config ) )
return True
path = get_gerrit_path (
' mediawiki/extensions/WikimediaMessages ' ,
' i18n/wikimediainterwikisearchresults/en.json '
)
search_messages_data = json . loads ( get_file_from_gerrit ( path ) )
if ' search-interwiki-results- ' + db_name in search_messages_data :
2020-05-19 11:16:25 +02:00
wikimedia_messages_two = True
2020-08-29 21:53:23 +02:00
else :
wikimedia_messages_two = False
add_checklist (
gerrit_path + path ,
config ,
wikimedia_messages_two )
url = ' https://en.wikipedia.org/wiki/ ' + \
2020-10-08 04:39:10 +02:00
' MediaWiki:Search-interwiki-results- ' + db_name
2020-08-29 21:53:23 +02:00
r = requests . get ( url )
if ' Wikipedia does not have a ' not in r . text :
2020-05-19 11:16:25 +02:00
wikimedia_messages_two_deployed = True
2020-08-29 21:53:23 +02:00
add_text ( ' [x] [[ {} |deployed]] ' . format ( url ) )
2020-05-19 11:16:25 +02:00
else :
2020-08-29 21:53:23 +02:00
wikimedia_messages_two_deployed = False
add_text ( ' [] [[ {} |deployed]] ' . format ( url ) )
return wikimedia_messages_two and wikimedia_messages_two_deployed
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def create_patch_for_dns ( lang , bug_id ) :
r = requests . get (
' https://gerrit.wikimedia.org/r/changes/ '
' ?q=bug: {} +project:operations/dns ' . format ( bug_id ) )
b = json . loads ( ' \n ' . join ( r . text . split ( ' \n ' ) [ 1 : ] ) )
if b :
return
maker = DnsPatchMaker ( lang , bug_id )
maker . run ( )
2020-08-15 19:11:06 +02:00
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def handle_core_lang ( language_code ) :
core_messages_url = get_github_url (
' mediawiki ' ,
' languages/messages/Messages {} .php ' . format (
language_code [ 0 ] . upper ( ) + language_code [ 1 : ] ) )
r = requests . get ( core_messages_url )
if r . status_code == 200 :
core_lang = True
else :
core_lang = False
add_checklist ( core_messages_url ,
' Language configuration in mediawiki core ' , core_lang )
return core_lang
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def get_db_name ( wiki_spec , parts ) :
db_name = wiki_spec . get ( ' Database name ' )
if not db_name :
if parts [ 1 ] == ' wikipedia ' :
db_name = parts [ 0 ] . replace ( ' - ' , ' _ ' ) + ' wiki '
else :
db_name = parts [ 0 ] . replace ( ' - ' , ' _ ' ) + parts [ 1 ]
return db_name
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
def add_create_instructions ( parts , shard , language_code , db_name , task_tid ) :
2020-05-30 16:51:55 +02:00
add_text ( ' \n ------- ' )
add_text ( ' **Step by step commands**: ' )
2020-08-02 18:48:16 +02:00
dummy_wiki = get_dummy_wiki ( shard , parts [ 1 ] )
2022-01-12 20:17:55 +01:00
add_text ( ' On deployment host: ' )
2020-05-30 16:51:55 +02:00
add_text ( ' `cd /srv/mediawiki-staging/` ' )
add_text ( ' `git fetch` ' )
add_text ( ' `git log -p HEAD..@ {u} ` ' )
add_text ( ' `git rebase` ' )
add_text ( ' On mwmaint1002: ' )
add_text ( ' `scap pull` ' )
2020-08-29 21:53:23 +02:00
addwiki_path = ' mwscript extensions/WikimediaMaintenance/addWiki.php '
add_text (
' ` {addwiki_path} --wiki= {dummy} {lang} {family} {db} {url} ` ' . format (
addwiki_path = addwiki_path ,
dummy = dummy_wiki ,
lang = language_code ,
family = parts [ 1 ] ,
db = db_name ,
url = ' . ' . join ( parts ) ) )
2023-08-18 11:08:31 +02:00
2022-01-12 20:17:55 +01:00
add_text ( ' On deployment host: ' )
2023-08-18 11:08:31 +02:00
add_text ( ' `scap sync-world " Creating {db_name} ( {phab} ) " ` ' . format (
db_name = db_name , phab = task_tid ) )
2021-03-16 10:57:54 +01:00
add_text ( ' On mwmaint1002: ' )
2023-01-10 20:49:30 +01:00
add_text ( ' ` {search_path} --wiki= {dbname} --cluster=all 2>&1 | tee {log} ` ' . format (
2021-03-16 10:57:54 +01:00
search_path = ' mwscript extensions/CirrusSearch/maintenance/UpdateSearchIndexConfig.php ' ,
dbname = db_name ,
2023-01-10 20:49:30 +01:00
log = ' /tmp/ {dbname} .UpdateSearchIndexConfig.log ' . format ( dbname = db_name ) ,
2021-03-16 10:57:54 +01:00
) )
2021-03-16 11:00:28 +01:00
2022-01-12 20:17:55 +01:00
add_text ( ' On deployment host: ' )
2020-05-30 16:51:55 +02:00
add_text ( ' `scap update-interwiki-cache` ' )
2020-08-29 21:53:23 +02:00
def update_task_report ( task_details ) :
global final_text
if not final_text :
return
2020-05-30 16:51:55 +02:00
old_report = re . findall (
2020-08-29 21:53:23 +02:00
r ' ( \ n \ n------ \ n \ * \ *Pre-install automatic checklist: '
r ' \ * \ *.+? \ n \ * \ *End of automatic output \ * \ * \ n) ' ,
2020-05-30 16:51:55 +02:00
task_details [ ' description ' ] , re . DOTALL )
if not old_report :
print ( ' old report not found, appending ' )
2020-08-15 19:11:06 +02:00
client . setTaskDescription (
task_details [ ' phid ' ] , task_details [ ' description ' ] + final_text )
2020-05-30 16:51:55 +02:00
else :
if old_report [ 0 ] != final_text :
print ( ' Updating old report ' )
2020-08-29 21:53:23 +02:00
client . setTaskDescription (
task_details [ ' phid ' ] ,
task_details [ ' description ' ] . replace (
old_report [ 0 ] ,
final_text ) )
def hande_task ( task_details ) :
global final_text
final_text = ' '
print ( ' Checking T %s ' % task_details [ ' id ' ] )
task_tid = ' T ' + task_details [ ' id ' ]
# Extract wiki config
wiki_spec = { }
for case in re . findall (
2020-10-08 04:39:10 +02:00
r ' \ n- *? \ * \ *(.+?): \ * \ * *?(.+) ' ,
2020-08-29 21:53:23 +02:00
task_details [ ' description ' ] ) :
wiki_spec [ case [ 0 ] . strip ( ) ] = case [ 1 ] . strip ( )
language_code = wiki_spec . get ( ' Language code ' )
if not language_code :
print ( ' lang code not found, skipping ' )
return
url = wiki_spec . get ( ' Site URL ' )
if not url :
print ( ' url not found, skipping ' )
return
parts = url . split ( ' . ' )
if len ( parts ) != 3 or parts [ 2 ] != ' org ' :
print ( ' the url looks weird, skipping ' )
return
db_name = get_db_name ( wiki_spec , parts )
shard = wiki_spec . get ( ' Shard ' , ' TBD ' )
2020-09-29 14:12:05 +02:00
visibility = wiki_spec . get ( ' Visibility ' , ' unknown ' )
2020-08-29 21:53:23 +02:00
shardDecided = shard != " TBD "
special = parts [ 1 ] == ' wikimedia '
add_text ( ' \n \n ------ \n **Pre-install automatic checklist:** ' )
if shardDecided :
add_text ( ' [X] #DBA decided about the shard ' )
else :
add_text ( ' [] #DBA decided about the shard ' )
dns = handle_dns ( special , url , language_code , task_tid )
if not special and wiki_spec . get ( ' Special ' , ' ' ) . lower ( ) != ' yes ' :
2020-09-29 14:12:05 +02:00
handle_subticket_for_cloud ( task_details , db_name , visibility )
2020-08-29 21:53:23 +02:00
apache = handle_apache ( special , parts )
langdb = handle_langdb ( language_code )
core_lang = handle_core_lang ( language_code )
wm_message_one = handle_wikimedia_messages_one (
db_name , wiki_spec , url , language_code , task_tid
)
wm_message_two = handle_wikimedia_messages_two ( db_name , parts )
if dns and apache and langdb and core_lang and wm_message_one and \
wm_message_two and shardDecided :
add_text ( ' **The Wiki is ready to be created.** ' )
else :
add_text ( ' **The creation is blocked until these part are all done.** ' )
2021-07-24 09:01:53 +02:00
if visibility . lower ( ) != ' private ' and not client . getTaskParents ( task_details [ ' phid ' ] ) :
handler = PostCreationHandler ( task_details [ ' phid ' ] , db_name , url , language_code , parts )
handler . handle ( )
2020-08-29 21:53:23 +02:00
add_create_instructions ( parts , shard , language_code , db_name , task_tid )
add_text ( ' \n **End of automatic output** ' )
2020-05-30 16:51:55 +02:00
def main ( ) :
open_create_wikis_phid = ' PHID-PROJ-kmpu7gznmc2edea3qn2x '
2020-08-29 21:53:23 +02:00
for phid in client . getTasksWithProject (
open_create_wikis_phid , statuses = [ ' open ' ] ) :
task_details = client . taskDetails ( phid )
hande_task ( task_details )
update_task_report ( task_details )
2020-05-19 11:16:25 +02:00
2020-08-29 21:53:23 +02:00
if __name__ == " __main__ " :
main ( )