1
0
Fork 0

rename master to main branch following LLVM

This commit is contained in:
Mikhail Goncharov 2020-12-09 17:23:01 +01:00
parent 12c4c43bb0
commit 99e43a8d9a
20 changed files with 116 additions and 63 deletions

View file

@ -13,14 +13,14 @@ Phabricator](https://reviews.llvm.org) instance.
*Phabricator* (https://reviews.llvm.org) is the code review tool in the LLVM *Phabricator* (https://reviews.llvm.org) is the code review tool in the LLVM
project. project.
The workflow checks the patches before a user merges them to the master branch - The workflow checks the patches before a user merges them to the main branch -
thus the term *pre-merge testing**. When a user uploads a patch to the LLVM thus the term *pre-merge testing**. When a user uploads a patch to the LLVM
Phabricator, Phabricator triggers the checks and then displays the results. Phabricator, Phabricator triggers the checks and then displays the results.
The CI system checks the patches **before** a user merges them to the master The CI system checks the patches **before** a user merges them to the main
branch. This way bugs in a patch are contained during the code review stage and branch. This way bugs in a patch are contained during the code review stage and
do not pollute the master branch. The more bugs the CI system can catch during do not pollute the main branch. The more bugs the CI system can catch during
the code review phase, the more stable and bug-free the master branch will the code review phase, the more stable and bug-free the main branch will
become. <sup>[citation needed]()</sup> become. <sup>[citation needed]()</sup>
This repository contains the configurations and script to run pre-merge checks This repository contains the configurations and script to run pre-merge checks
@ -58,7 +58,7 @@ please make sure that either:
* You set a git hash as `sourceControlBaseRevision` in Phabricator which is * You set a git hash as `sourceControlBaseRevision` in Phabricator which is
* available on the Github repository, **or** you define the dependencies of your * available on the Github repository, **or** you define the dependencies of your
* patch in Phabricator, **or** your patch can be applied to the master branch. * patch in Phabricator, **or** your patch can be applied to the main branch.
Only then can the build server apply the patch locally and run the builds and Only then can the build server apply the patch locally and run the builds and
tests. tests.

View file

@ -46,7 +46,7 @@ RUN choco install -y sccache
RUN pip install psutil RUN pip install psutil
# install python dependencies for the scripts # install python dependencies for the scripts
RUN pip install -r https://raw.githubusercontent.com/google/llvm-premerge-checks/master/scripts/requirements.txt RUN pip install -r https://raw.githubusercontent.com/google/llvm-premerge-checks/main/scripts/requirements.txt
# configure Python encoding # configure Python encoding
ENV PYTHONIOENCODING=UTF-8 ENV PYTHONIOENCODING=UTF-8

View file

@ -35,7 +35,7 @@ ENV LC_ALL en_US.UTF-8
# Install python dependencies for the scripts. ADD will check contentents of a file for changes changed. # Install python dependencies for the scripts. ADD will check contentents of a file for changes changed.
# TODO: that should be done during the build as it will pull this repo anyway and will have latest version. # TODO: that should be done during the build as it will pull this repo anyway and will have latest version.
ADD "https://raw.githubusercontent.com/google/llvm-premerge-checks/master/scripts/requirements.txt" requirements.txt ADD "https://raw.githubusercontent.com/google/llvm-premerge-checks/main/scripts/requirements.txt" requirements.txt
RUN pip3 install -r requirements.txt RUN pip3 install -r requirements.txt
RUN ln -s /usr/bin/clang-10 /usr/bin/clang;\ RUN ln -s /usr/bin/clang-10 /usr/bin/clang;\

View file

@ -25,7 +25,7 @@ build request. Buildkite job sends build results directly to Phabricator.
- every review creates a new branch in [fork of - every review creates a new branch in [fork of
llvm-project](https://github.com/llvm-premerge-tests/llvm-project). llvm-project](https://github.com/llvm-premerge-tests/llvm-project).
![deployment diagram](http://www.plantuml.com/plantuml/proxy?src=https://raw.githubusercontent.com/google/llvm-premerge-checks/master/docs/deployment.plantuml) ![deployment diagram](http://www.plantuml.com/plantuml/proxy?src=https://raw.githubusercontent.com/google/llvm-premerge-checks/main/docs/deployment.plantuml)
# Phabricator integration # Phabricator integration
@ -57,7 +57,7 @@ export SCRIPT_DIR="${SRC}"/scripts
rm -rf "${SRC}" rm -rf "${SRC}"
git clone --depth 1 https://github.com/google/llvm-premerge-checks.git "${SRC}" git clone --depth 1 https://github.com/google/llvm-premerge-checks.git "${SRC}"
cd "${SRC}" cd "${SRC}"
git fetch origin "${ph_scripts_refspec:-master}":x git fetch origin "${ph_scripts_refspec:-main}":x
git checkout x git checkout x
cd "$BUILDKITE_BUILD_CHECKOUT_PATH" cd "$BUILDKITE_BUILD_CHECKOUT_PATH"
${SCRIPT_DIR}/buildkite/build_branch_pipeline.py | tee /dev/tty | buildkite-agent pipeline upload ${SCRIPT_DIR}/buildkite/build_branch_pipeline.py | tee /dev/tty | buildkite-agent pipeline upload
@ -75,7 +75,7 @@ That in sends an HTTP POST request to [**phab-proxy**](../phabricator-proxy)
that submits a new buildkite job **diff-checks**. All parameters from the that submits a new buildkite job **diff-checks**. All parameters from the
original request are put in the build's environment with `ph_` prefix (to avoid original request are put in the build's environment with `ph_` prefix (to avoid
shadowing any Buildkite environment variable). "ph_scripts_refspec" parameter shadowing any Buildkite environment variable). "ph_scripts_refspec" parameter
defines refspec of llvm-premerge-checks to use ("master" by default). defines refspec of llvm-premerge-checks to use ("main" by default).
**diff-checks** pipeline **diff-checks** pipeline
([create_branch_pipeline.py](../scripts/create_branch_pipeline.py)) ([create_branch_pipeline.py](../scripts/create_branch_pipeline.py))

View file

@ -32,7 +32,7 @@ Set `CONDUIT_TOKEN` with your personal one from `https://reviews.llvm.org/settin
## Testing changes before merging ## Testing changes before merging
It's recommended to test even smallest changes before committing them to the `master` branch. It's recommended to test even smallest changes before committing them to the `main` branch.
1. Create a pull request here. 1. Create a pull request here.
1. Manually create a buildkite build in the pipeline you are updating and specify 1. Manually create a buildkite build in the pipeline you are updating and specify
@ -144,7 +144,7 @@ To spawn a new windows agent:
1. Login to the new machine via RDP (you will need a RDP client, e.g. Chrome app). 1. Login to the new machine via RDP (you will need a RDP client, e.g. Chrome app).
1. In the RDP session: run these commands in the CMD window under Administrator to bootstrap the Windows machine: 1. In the RDP session: run these commands in the CMD window under Administrator to bootstrap the Windows machine:
```powershell ```powershell
Invoke-WebRequest -uri 'https://raw.githubusercontent.com/google/llvm-premerge-checks/master/scripts/windows_agent_bootstrap.ps1' -OutFile c:\windows_agent_bootstrap.ps1 Invoke-WebRequest -uri 'https://raw.githubusercontent.com/google/llvm-premerge-checks/main/scripts/windows_agent_bootstrap.ps1' -OutFile c:\windows_agent_bootstrap.ps1
c:/windows_agent_bootstrap.ps1 -ssd c:/windows_agent_bootstrap.ps1 -ssd
``` ```
Ignore the pop-up to format the new disk and wait for the machine to reboot. Ignore the pop-up to format the new disk and wait for the machine to reboot.
@ -173,12 +173,12 @@ schtasks.exe /create /tn "Start Buildkite agent" /ru SYSTEM /SC ONSTART /DELAY 0
## Custom environment variables ## Custom environment variables
Buildkite pipelines have a number of custom environment variables one can set to change their behavior. That is useful to debug issues Buildkite pipelines have a number of custom environment variables one can set to change their behavior. That is useful to debug issues
or test changes. They are mostly used by pipleine generators, e.g. [build_master_pipeline](../scripts/build_master_pipeline.py), or test changes. They are mostly used by pipleine generators, e.g. [pipeline_main](../scripts/pipeline_main.py),
please refer to the source code for the details. These variables have `ph_` prefix and can be set with URL parameters in Harbormaster build. please refer to the source code for the details. These variables have `ph_` prefix and can be set with URL parameters in Harbormaster build.
Most commonly used are: Most commonly used are:
- `ph_scripts_refspec` ("master" by default): refspec branch of llvm-premerge-checks to use. This variable is also used in pipeline "bootstrap" in Buildkite interface. - `ph_scripts_refspec` ("main" by default): refspec branch of llvm-premerge-checks to use. This variable is also used in pipeline "bootstrap" in Buildkite interface.
- `ph_dry_run_report`: do not report any results back to Phabricator. - `ph_dry_run_report`: do not report any results back to Phabricator.
- `ph_no_cache`: (if set to any value) clear compilation cache before the build. - `ph_no_cache`: (if set to any value) clear compilation cache before the build.
- `ph_projects`: which projects to use, "detect" will look on diff to infer the projects, "default" selects all projects. - `ph_projects`: which projects to use, "detect" will look on diff to infer the projects, "default" selects all projects.

View file

@ -27,7 +27,7 @@ def build():
for k, v in params.items(): for k, v in params.items():
if len(v) == 1: if len(v) == 1:
build_env['ph_' + k] = v[0] build_env['ph_' + k] = v[0]
refspec = 'master' refspec = 'main'
if 'ph_scripts_refspec' in build_env: if 'ph_scripts_refspec' in build_env:
refspec = build_env['ph_scripts_refspec'] refspec = build_env['ph_scripts_refspec']
build_request = { build_request = {

View file

@ -140,7 +140,7 @@ if __name__ == '__main__':
pmt_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) pmt_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Benchmark for LLVM pre-merge tests.') description='Benchmark for LLVM pre-merge tests.')
parser.add_argument('--commit', type=str, default='master', help="LLVM commit to run this benchmark on.") parser.add_argument('--commit', type=str, default='main', help="LLVM commit to run this benchmark on.")
parser.add_argument('--result-file', type=str, default='pmt-benchmark.csv', parser.add_argument('--result-file', type=str, default='pmt-benchmark.csv',
help="path to CSV file where to store the benchmark results") help="path to CSV file where to store the benchmark results")
parser.add_argument('--workdir', type=str, default=os.path.join(os.getcwd(), 'benchmark'), parser.add_argument('--workdir', type=str, default=os.path.join(os.getcwd(), 'benchmark'),

View file

@ -72,7 +72,7 @@ def run(base_commit, ignore_config, step: Optional[Step], report: Optional[Repor
severity = match.group(4) severity = match.group(4)
text = match.group(5) text = match.group(5)
text += '\n[[{} | not useful]] '.format( text += '\n[[{} | not useful]] '.format(
'https://github.com/google/llvm-premerge-checks/blob/master/docs/clang_tidy.md#warning-is-not-useful') 'https://github.com/google/llvm-premerge-checks/blob/main/docs/clang_tidy.md#warning-is-not-useful')
if severity in ['warning', 'error']: if severity in ['warning', 'error']:
if severity == 'warning': if severity == 'warning':
warn_count += 1 warn_count += 1
@ -100,7 +100,7 @@ def run(base_commit, ignore_config, step: Optional[Step], report: Optional[Repor
report.add_artifact(os.getcwd(), p, 'clang-tidy') report.add_artifact(os.getcwd(), p, 'clang-tidy')
if errors_count + warn_count != 0: if errors_count + warn_count != 0:
step.success = False step.success = False
url = "https://github.com/google/llvm-premerge-checks/blob/master/docs/clang_tidy.md#review-comments." url = "https://github.com/google/llvm-premerge-checks/blob/main/docs/clang_tidy.md#review-comments."
annotate(f'clang-tidy found {errors_count} errors and {warn_count} warnings. {inline_comments} of them were ' annotate(f'clang-tidy found {errors_count} errors and {warn_count} warnings. {inline_comments} of them were '
f'added as review comments [why?]({url})', style='error') f'added as review comments [why?]({url})', style='error')
logging.debug(f'report: {report}') logging.debug(f'report: {report}')

View file

@ -5,22 +5,22 @@ a set of metrics. This doc will summarize the metrics and tools. All of the data
shall be collected as time series, so that we can see changes over time. shall be collected as time series, so that we can see changes over time.
* Impact - The metrics we ultimately want to improve * Impact - The metrics we ultimately want to improve
* Percentage of [build-bot build](http://lab.llvm.org:8011/) on master * Percentage of [build-bot build](http://lab.llvm.org:8011/) on main
failing. (Buildbot_percentage_failing) failing. (Buildbot_percentage_failing)
* Time to fix a broken master build: Time between start of failing builds * Time to fix a broken main build: Time between start of failing builds
until the build is fixed. (BuildBot_time_to_fix) until the build is fixed. (BuildBot_time_to_fix)
* Percentage of Revisions on Phabricator where a broken build was fixed * Percentage of Revisions on Phabricator where a broken build was fixed
afterwards. This would indicate that a bug was found and fixed during afterwards. This would indicate that a bug was found and fixed during
the code review phase. (Premerge_fixes) the code review phase. (Premerge_fixes)
* Number of reverts on master. This indicates that something was broken on * Number of reverts on main. This indicates that something was broken on
master that slipped through the pre-merge tests or was submitted without main that slipped through the pre-merge tests or was submitted without
any review. (Upstream_reverts) any review. (Upstream_reverts)
* Users and behavior - Interesting to see and useful to adapt our approach. * Users and behavior - Interesting to see and useful to adapt our approach.
* Percentage of commits to master that went through Phabricator. * Percentage of commits to main that went through Phabricator.
* Number of participants in pre-merge tests. * Number of participants in pre-merge tests.
* Percentage of Revisions with pre-merge tests executed * Percentage of Revisions with pre-merge tests executed
* Number of 30-day active committers on master and Phabricator. * Number of 30-day active committers on main and Phabricator.
* Builds - See how the infrastructure is doing. * Builds - See how the infrastructure is doing.
* Time between upload of diff until build results available. * Time between upload of diff until build results available.

View file

@ -201,8 +201,8 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"# ds = jobs[jobs['pipeline'] == 'llvm-master-build'][jobs['step_key'] == 'windows'][jobs['state']=='passed'][~jobs['agent_name'].str.startswith('buildkite-')][jobs['started_at'] > np.datetime64('2020-01-22')]\n", "# ds = jobs[jobs['pipeline'] == 'llvm-main-build'][jobs['step_key'] == 'windows'][jobs['state']=='passed'][~jobs['agent_name'].str.startswith('buildkite-')][jobs['started_at'] > np.datetime64('2020-01-22')]\n",
"ds = jobs[jobs['pipeline'] == 'llvm-master-build'][jobs['step_key'] == 'windows'][jobs['state']=='passed'][~jobs['agent_name'].str.startswith('buildkite-')][jobs['started_at'] > np.datetime64('2020-01-22')]\n", "ds = jobs[jobs['pipeline'] == 'llvm-main-build'][jobs['step_key'] == 'windows'][jobs['state']=='passed'][~jobs['agent_name'].str.startswith('buildkite-')][jobs['started_at'] > np.datetime64('2020-01-22')]\n",
"ds = ds.drop_duplicates()\n", "ds = ds.drop_duplicates()\n",
"# remove one slowest run (repo checkout)\n", "# remove one slowest run (repo checkout)\n",
"# t = ds.loc[ds.groupby([\"agent_name\"])[\"run_duration\"].idxmax()]\n", "# t = ds.loc[ds.groupby([\"agent_name\"])[\"run_duration\"].idxmax()]\n",

View file

@ -14,7 +14,7 @@
# limitations under the License. # limitations under the License.
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# This script will collect all breakages of the master branch builds from # This script will collect all breakages of the main branch builds from
# buildkite and format the results nicely. # buildkite and format the results nicely.
# Arguments: # Arguments:
# llvm-path : folder where the LLVM checkout is kept # llvm-path : folder where the LLVM checkout is kept
@ -107,7 +107,7 @@ if __name__ == '__main__':
bk = BuildKiteMasterStats(args.llvm_path, args.token) bk = BuildKiteMasterStats(args.llvm_path, args.token)
if not os.path.exists(CACHE_FILE): if not os.path.exists(CACHE_FILE):
results = bk.get_stats('llvm-project','llvm-master-build') results = bk.get_stats('llvm-project','llvm-main-build')
bk.save_results(CACHE_FILE, results) bk.save_results(CACHE_FILE, results)
builds = bk.get_builds(CACHE_FILE) builds = bk.get_builds(CACHE_FILE)

View file

@ -70,7 +70,7 @@ def get_reverts_per_day(repo_path: str, max_age: datetime.datetime) -> RepoStats
repo.git.fetch() repo.git.fetch()
diff_regex = re.compile(r'^Differential Revision: https:\/\/reviews\.llvm\.org\/(.*)$', re.MULTILINE) diff_regex = re.compile(r'^Differential Revision: https:\/\/reviews\.llvm\.org\/(.*)$', re.MULTILINE)
for commit in repo.iter_commits('master'): for commit in repo.iter_commits('main'):
if commit.committed_datetime < max_age: if commit.committed_datetime < max_age:
break break
stats.commits += 1 stats.commits += 1

View file

@ -121,7 +121,7 @@ class RepoStats:
self.commit_by_author_domain = dict() # type: Dict[str, List[MyCommit]] self.commit_by_author_domain = dict() # type: Dict[str, List[MyCommit]]
def parse_repo(self, maxage: datetime.datetime): def parse_repo(self, maxage: datetime.datetime):
for commit in self.repo.iter_commits('master'): for commit in self.repo.iter_commits('main'):
if commit.committed_datetime < maxage: if commit.committed_datetime < maxage:
break break
mycommit = MyCommit(commit) mycommit = MyCommit(commit)
@ -262,7 +262,7 @@ class RepoStats:
True: {}, True: {},
False: {}, False: {},
} # type: Dict[bool, Dict[str, int]] } # type: Dict[bool, Dict[str, int]]
for commit in self.repo.iter_commits('master'): for commit in self.repo.iter_commits('main'):
if commit.committed_datetime < maxage: if commit.committed_datetime < maxage:
break break
mycommit = MyCommit(commit) mycommit = MyCommit(commit)
@ -295,7 +295,7 @@ class RepoStats:
True: {b: 0 for b in buckets}, True: {b: 0 for b in buckets},
False: {b: 0 for b in buckets}, False: {b: 0 for b in buckets},
} # type: Dict[bool, Dict[int, int]] } # type: Dict[bool, Dict[int, int]]
for commit in self.repo.iter_commits('master'): for commit in self.repo.iter_commits('main'):
if commit.committed_datetime < maxage: if commit.committed_datetime < maxage:
break break
mycommit = self.commit_by_hash[commit.hexsha] mycommit = self.commit_by_hash[commit.hexsha]

View file

@ -127,7 +127,7 @@ class ApplyPatch:
f'instead of resolved "{base_commit}"') f'instead of resolved "{base_commit}"')
base_commit = self.find_commit(self.base_revision) base_commit = self.find_commit(self.base_revision)
if base_commit is None: if base_commit is None:
base_commit = self.repo.heads['master'].commit base_commit = self.repo.heads['main'].commit
annotate(f"Cannot find a base git revision. Will use current HEAD.", annotate(f"Cannot find a base git revision. Will use current HEAD.",
style='warning', context='patch_diff') style='warning', context='patch_diff')
self.create_branch(base_commit) self.create_branch(base_commit)
@ -161,23 +161,27 @@ class ApplyPatch:
As origin is disjoint from upstream, it needs to be updated by this script. As origin is disjoint from upstream, it needs to be updated by this script.
""" """
logging.info('Syncing local, origin and upstream...') logging.info('Syncing local, origin and upstream...')
self.repo.git.clean('-ffxdq')
self.repo.git.reset('--hard')
self.repo.git.fetch('--all')
self.repo.git.checkout('master')
if 'upstream' not in self.repo.remotes: if 'upstream' not in self.repo.remotes:
self.repo.create_remote('upstream', url=LLVM_GITHUB_URL) self.repo.create_remote('upstream', url=LLVM_GITHUB_URL)
self.repo.remotes.upstream.fetch() self.repo.remotes.upstream.fetch()
self.repo.git.pull('origin', 'master') self.repo.git.clean('-ffxdq')
self.repo.git.pull('upstream', 'master') self.repo.git.reset('--hard')
self.repo.git.fetch('--all')
if self.find_commit('main') is None:
origin = self.repo.remotes.origin
self.repo.create_head('main', origin.refs.main)
self.repo.heads.main.set_tracking_branch(origin.refs.main)
self.repo.heads.main.checkout()
self.repo.git.pull('origin', 'main')
self.repo.git.pull('upstream', 'main')
if self.push_branch: if self.push_branch:
self.repo.git.push('origin', 'master') self.repo.git.push('origin', 'main')
@backoff.on_exception(backoff.expo, Exception, max_tries=5, logger='', factor=3) @backoff.on_exception(backoff.expo, Exception, max_tries=5, logger='', factor=3)
def find_commit(self, rev): def find_commit(self, rev):
try: try:
return self.repo.commit(rev) return self.repo.commit(rev)
except ValueError as e: except:
return None return None
@backoff.on_exception(backoff.expo, Exception, max_tries=5, logger='', factor=3) @backoff.on_exception(backoff.expo, Exception, max_tries=5, logger='', factor=3)
@ -297,7 +301,7 @@ class ApplyPatch:
rev = self.base_revision rev = self.base_revision
age_limit = datetime.datetime.now() - APPLIED_SCAN_LIMIT age_limit = datetime.datetime.now() - APPLIED_SCAN_LIMIT
if rev == 'auto': # FIXME: use revison that created the branch if rev == 'auto': # FIXME: use revison that created the branch
rev = 'master' rev = 'main'
for commit in self.repo.iter_commits(rev): for commit in self.repo.iter_commits(rev):
if datetime.datetime.fromtimestamp(commit.committed_date) < age_limit: if datetime.datetime.fromtimestamp(commit.committed_date) < age_limit:
break break

View file

@ -50,7 +50,7 @@ class Phab2Github:
def sync(self): def sync(self):
"""Sync Phabricator to Github.""" """Sync Phabricator to Github."""
_LOGGER.info('Starting sync...') _LOGGER.info('Starting sync...')
self._refresh_master() self._refresh_main()
self._delete_phab_branches() self._delete_phab_branches()
revisions = self.phab_wrapper.get_revisions() revisions = self.phab_wrapper.get_revisions()
pull_requests = {p.title: p for p in self.github_repo.get_pulls(state='open')} pull_requests = {p.title: p for p in self.github_repo.get_pulls(state='open')}
@ -70,11 +70,11 @@ class Phab2Github:
pr = self.github_repo.create_pull(title=revision.pr_title, pr = self.github_repo.create_pull(title=revision.pr_title,
body=revision.pr_summary, body=revision.pr_summary,
head=revision.branch_name, head=revision.branch_name,
base='master') base='main')
_LOGGER.info(pr.html_url) _LOGGER.info(pr.html_url)
_LOGGER.info('Sync completed.') _LOGGER.info('Sync completed.')
def _refresh_master(self): def _refresh_main(self):
"""Clone/update local git repo.""" """Clone/update local git repo."""
if not os.path.exists(self.workdir): if not os.path.exists(self.workdir):
os.mkdir(self.workdir) os.mkdir(self.workdir)
@ -83,16 +83,16 @@ class Phab2Github:
_LOGGER.info('pulling origin and upstream...') _LOGGER.info('pulling origin and upstream...')
self.repo = git.Repo(self.llvm_dir) self.repo = git.Repo(self.llvm_dir)
self.repo.git.fetch('--all') self.repo.git.fetch('--all')
self.repo.git.checkout('master') self.repo.git.checkout('main')
self.repo.git.pull('upstream', 'master') self.repo.git.pull('upstream', 'main')
self.repo.git.push('origin', 'master') self.repo.git.push('origin', 'main')
else: else:
_LOGGER.info('cloning repository...') _LOGGER.info('cloning repository...')
git.Repo.clone_from(MY_GITHUB_URL, self.llvm_dir) git.Repo.clone_from(MY_GITHUB_URL, self.llvm_dir)
self.repo = git.Repo(self.llvm_dir) self.repo = git.Repo(self.llvm_dir)
self.repo.create_remote('upstream', url=LLVM_GITHUB_URL) self.repo.create_remote('upstream', url=LLVM_GITHUB_URL)
self.repo.remotes.upstream.fetch() self.repo.remotes.upstream.fetch()
_LOGGER.info('refresh of master branch completed') _LOGGER.info('refresh of main branch completed')
def create_branches_for_revision(self, revision: Revision): def create_branches_for_revision(self, revision: Revision):
"""Create branches for a Revision and it's Diffs. """Create branches for a Revision and it's Diffs.
@ -110,10 +110,10 @@ class Phab2Github:
try: try:
self.apply_patch(diff, patch) self.apply_patch(diff, patch)
except ApplyPatchException as e: except ApplyPatchException as e:
# TODO: retry on master if this fails # TODO: retry on main if this fails
_LOGGER.error('Could not apply patch for Diff {}. Deleting branch'.format(diff.id)) _LOGGER.error('Could not apply patch for Diff {}. Deleting branch'.format(diff.id))
_LOGGER.exception(e) _LOGGER.exception(e)
self.repo.heads['master'].checkout() self.repo.heads['main'].checkout()
self.repo.delete_head(diff.branch_name) self.repo.delete_head(diff.branch_name)
diffs = [d for d in revision.sorted_diffs if self._has_branch(d)] diffs = [d for d in revision.sorted_diffs if self._has_branch(d)]
@ -143,15 +143,15 @@ class Phab2Github:
"""Create a branch for diff.""" """Create a branch for diff."""
base_hash = diff.base_hash base_hash = diff.base_hash
if base_hash is None: if base_hash is None:
base_hash = 'upstream/master' base_hash = 'upstream/main'
_LOGGER.info('creating branch {} based on {}...'.format(diff.branch_name, base_hash)) _LOGGER.info('creating branch {} based on {}...'.format(diff.branch_name, base_hash))
try: try:
new_branch = self.repo.create_head(diff.branch_name, base_hash) new_branch = self.repo.create_head(diff.branch_name, base_hash)
except ValueError: except ValueError:
# commit hash not found, try again with master # commit hash not found, try again with main
_LOGGER.warning('commit hash {} not found in upstream repository. ' _LOGGER.warning('commit hash {} not found in upstream repository. '
'Trying master instead...'.format(diff.branch_name, base_hash)) 'Trying main instead...'.format(diff.branch_name, base_hash))
base_hash = 'upstream/master' base_hash = 'upstream/main'
new_branch = self.repo.create_head(diff.branch_name, base_hash) new_branch = self.repo.create_head(diff.branch_name, base_hash)
self.repo.head.reference = new_branch self.repo.head.reference = new_branch
self.repo.head.reset(index=True, working_tree=True) self.repo.head.reset(index=True, working_tree=True)
@ -184,7 +184,7 @@ class Phab2Github:
def _delete_phab_branches(self): def _delete_phab_branches(self):
"""Delete all branches sarting with 'phab-'.""" """Delete all branches sarting with 'phab-'."""
_LOGGER.info('Deleting local Phabricator-relates branches...') _LOGGER.info('Deleting local Phabricator-relates branches...')
self.repo.git.checkout('master') self.repo.git.checkout('main')
for branch in [b for b in self.repo.heads if b.name.startswith('phab-')]: for branch in [b for b in self.repo.heads if b.name.startswith('phab-')]:
_LOGGER.info('Deleding branch {}'.format(branch)) _LOGGER.info('Deleding branch {}'.format(branch))
self.repo.git.branch('-D', branch.name) self.repo.git.branch('-D', branch.name)

View file

@ -88,13 +88,13 @@ class ApplyPatch:
subprocess.check_call('git reset --hard {}'.format(self.git_hash), subprocess.check_call('git reset --hard {}'.format(self.git_hash),
stdout=sys.stdout, stderr=sys.stderr, shell=True) stdout=sys.stdout, stderr=sys.stderr, shell=True)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
print('WARNING: checkout of hash failed, using master branch instead.') print('WARNING: checkout of hash failed, using main branch instead.')
self.msg += [ self.msg += [
'Could not check out parent git hash "{}". It was not found in ' 'Could not check out parent git hash "{}". It was not found in '
'the repository. Did you configure the "Parent Revision" in ' 'the repository. Did you configure the "Parent Revision" in '
'Phabricator properly? Trying to apply the patch to the ' 'Phabricator properly? Trying to apply the patch to the '
'master branch instead...'.format(self.git_hash)] 'main branch instead...'.format(self.git_hash)]
subprocess.check_call('git checkout master', stdout=sys.stdout, subprocess.check_call('git checkout main', stdout=sys.stdout,
stderr=sys.stderr, shell=True) stderr=sys.stderr, shell=True)
subprocess.check_call('git show -s', stdout=sys.stdout, subprocess.check_call('git show -s', stdout=sys.stdout,
stderr=sys.stderr, shell=True) stderr=sys.stderr, shell=True)

49
scripts/pipeline_main.py Executable file
View file

@ -0,0 +1,49 @@
#!/usr/bin/env python3
# Copyright 2020 Google LLC
#
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://llvm.org/LICENSE.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from steps import generic_linux, generic_windows, from_shell_output
import yaml
steps_generators = [
# TODO: enable when we have enough mac agents
# '${BUILDKITE_BUILD_CHECKOUT_PATH}/libcxx/utils/ci/buildkite-pipeline-snapshot.sh',
]
if __name__ == '__main__':
scripts_refspec = os.getenv("ph_scripts_refspec", "main")
no_cache = os.getenv('ph_no_cache') is not None
projects = os.getenv('ph_projects', 'clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;openmp;polly')
log_level = os.getenv('ph_log_level', 'WARNING')
notify_emails = list(filter(None, os.getenv('ph_notify_emails', '').split(',')))
steps = []
steps.extend(generic_linux(
os.getenv('ph_projects', 'clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;openmp;polly'),
False))
# FIXME: openmp is removed as it constantly fails.
# TODO: Make this project list be evaluated through "choose_projects"(? as now we define "all" and exclusions in
# two placess).
steps.extend(generic_windows(
os.getenv('ph_projects', 'clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;polly')))
for gen in steps_generators:
steps.extend(from_shell_output(gen))
notify = []
for e in notify_emails:
notify.append({'email': e})
print(yaml.dump({'steps': steps, 'notify': notify}))

View file

@ -24,7 +24,7 @@ steps_generators = [
] ]
if __name__ == '__main__': if __name__ == '__main__':
scripts_refspec = os.getenv("ph_scripts_refspec", "master") scripts_refspec = os.getenv("ph_scripts_refspec", "main")
no_cache = os.getenv('ph_no_cache') is not None no_cache = os.getenv('ph_no_cache') is not None
projects = os.getenv('ph_projects', 'clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;openmp;polly') projects = os.getenv('ph_projects', 'clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;openmp;polly')
log_level = os.getenv('ph_log_level', 'WARNING') log_level = os.getenv('ph_log_level', 'WARNING')

View file

@ -27,7 +27,7 @@ steps_generators = [
] ]
if __name__ == '__main__': if __name__ == '__main__':
scripts_refspec = os.getenv("ph_scripts_refspec", "master") scripts_refspec = os.getenv("ph_scripts_refspec", "main")
diff_id = os.getenv("ph_buildable_diff", "") diff_id = os.getenv("ph_buildable_diff", "")
no_cache = os.getenv('ph_no_cache') is not None no_cache = os.getenv('ph_no_cache') is not None
projects = os.getenv('ph_projects', 'detect') projects = os.getenv('ph_projects', 'detect')

View file

@ -26,7 +26,7 @@ import yaml
def generic_linux(projects: str, check_diff: bool) -> List: def generic_linux(projects: str, check_diff: bool) -> List:
if os.getenv('ph_skip_linux') is not None: if os.getenv('ph_skip_linux') is not None:
return [] return []
scripts_refspec = os.getenv("ph_scripts_refspec", "master") scripts_refspec = os.getenv("ph_scripts_refspec", "main")
no_cache = os.getenv('ph_no_cache') is not None no_cache = os.getenv('ph_no_cache') is not None
log_level = os.getenv('ph_log_level', 'WARNING') log_level = os.getenv('ph_log_level', 'WARNING')
linux_agents = {'queue': 'linux'} linux_agents = {'queue': 'linux'}
@ -79,7 +79,7 @@ def generic_linux(projects: str, check_diff: bool) -> List:
def generic_windows(projects: str) -> List: def generic_windows(projects: str) -> List:
if os.getenv('ph_skip_windows') is not None: if os.getenv('ph_skip_windows') is not None:
return [] return []
scripts_refspec = os.getenv("ph_scripts_refspec", "master") scripts_refspec = os.getenv("ph_scripts_refspec", "main")
no_cache = os.getenv('ph_no_cache') is not None no_cache = os.getenv('ph_no_cache') is not None
log_level = os.getenv('ph_log_level', 'WARNING') log_level = os.getenv('ph_log_level', 'WARNING')
clear_sccache = 'powershell -command "sccache --stop-server; echo \\$env:SCCACHE_DIR; ' \ clear_sccache = 'powershell -command "sccache --stop-server; echo \\$env:SCCACHE_DIR; ' \