1
0
Fork 0

Merge branch 'master' of github.com:google/llvm-premerge-checks

This commit is contained in:
Christian Kühnel 2019-12-11 16:48:28 +00:00
commit 78d6dd6d2a
13 changed files with 356 additions and 187 deletions

23
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,23 @@
---
name: Bug report
about: Create a report to help us improve llvm-premerge-checks
title: ""
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
Relevant links:
**To Reproduce**
Steps to reproduce the behavior:
1. Submit
2.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.

4
.gitignore vendored
View file

@ -1 +1,3 @@
.vscode/ .vscode/
.idea/
*.iml

View file

@ -23,16 +23,18 @@ pipeline {
PHABRICATOR_HOST = 'https://reviews.llvm.org' PHABRICATOR_HOST = 'https://reviews.llvm.org'
PHAB_LOG = "${WORKSPACE}/build/.phabricator-comment" PHAB_LOG = "${WORKSPACE}/build/.phabricator-comment"
SCRIPT_DIR = "${WORKSPACE}/llvm-premerge-checks/scripts" SCRIPT_DIR = "${WORKSPACE}/llvm-premerge-checks/scripts"
MY_BUILD_ID = "${JOB_BASE_NAME}-${BUILD_NUMBER}" BUILD_ID = "${JOB_BASE_NAME}-${BUILD_NUMBER}"
TARGET_DIR = "/mnt/nfs/results/${MY_BUILD_ID}" TARGET_DIR = "/mnt/nfs/results/${BUILD_ID}"
RESULT_URL = "http://results.llvm-merge-guard.org/${MY_BUILD_ID}" RESULT_URL = "http://results.llvm-merge-guard.org/${BUILD_ID}"
TEST_REPORT = "${WORKSPACE}/build/test-results.xml"
DIFF_JSON = "${WORKSPACE}/build/diff.json" DIFF_JSON = "${WORKSPACE}/build/diff.json"
} }
stages { stages {
stage("build info"){ stage("build info"){
steps { steps {
echo "Building diff ${DIFF_ID} with PHID ${PHID}" echo "Building diff ${DIFF_ID} with PHID ${PHID}"
script {
currentBuild.displayName = "diff ${DIFF_ID}"
}
} }
} }
stage("git checkout"){ stage("git checkout"){
@ -44,8 +46,10 @@ pipeline {
{ {
git url: 'https://github.com/google/llvm-premerge-checks.git' git url: 'https://github.com/google/llvm-premerge-checks.git'
} }
sh 'rm -rf build || true'
sh 'mkdir -p build' sh 'mkdir -p build'
} sh 'mkdir -p "${TARGET_DIR}"'
}
} }
stage('arc patch'){ stage('arc patch'){
steps { steps {
@ -67,9 +71,15 @@ pipeline {
sh "${SCRIPT_DIR}/run_ninja.sh check-all" sh "${SCRIPT_DIR}/run_ninja.sh check-all"
} }
} }
stage('linters') {
steps {
sh "${SCRIPT_DIR}/lint.sh"
}
}
} }
post { post {
always { always {
echo "Console log is available at ${RESULT_URL}" echo "Console log is available at ${RESULT_URL}"
dir("${TARGET_DIR}") { dir("${TARGET_DIR}") {
// copy console log to result folder // copy console log to result folder
@ -84,20 +94,15 @@ pipeline {
""" """
} }
/// send results to Phabricator /// send results to Phabricator
sh """
set +x
cat <<-EOF>> ${PHAB_LOG}
Log files: [[${RESULT_URL}/console-log.txt | console-log.txt]], [[${RESULT_URL}/CMakeCache.txt | CMakeCache.txt]]
EOF
"""
sh """${SCRIPT_DIR}/phabtalk/phabtalk.py "${PHID}" "${DIFF_ID}" \ sh """${SCRIPT_DIR}/phabtalk/phabtalk.py "${PHID}" "${DIFF_ID}" \
--conduit-token "${CONDUIT_TOKEN}" \ --conduit-token "${CONDUIT_TOKEN}" \
--test-result-file "${TEST_REPORT}" \ --test-result-file "test-results.xml" \
--comment-file "${PHAB_LOG}" \
--host "${PHABRICATOR_HOST}/api/" \ --host "${PHABRICATOR_HOST}/api/" \
--buildresult ${currentBuild.result} --buildresult ${currentBuild.result} \
--clang-format-patch "clang-format.patch" \
--results-dir "${TARGET_DIR}" \
--results-url "${RESULT_URL}"
""" """
} }
} }
} }

View file

@ -21,7 +21,6 @@ pipeline {
BUILD_ID="${JOB_BASE_NAME}-${BUILD_NUMBER}" BUILD_ID="${JOB_BASE_NAME}-${BUILD_NUMBER}"
TARGET_DIR="/mnt/nfs/results/${BUILD_ID}" TARGET_DIR="/mnt/nfs/results/${BUILD_ID}"
SCRIPT_DIR = "${WORKSPACE}/llvm-premerge-checks/scripts" SCRIPT_DIR = "${WORKSPACE}/llvm-premerge-checks/scripts"
} }
stages { stages {
stage("git checkout"){ stage("git checkout"){
@ -33,7 +32,10 @@ pipeline {
{ {
git url: 'https://github.com/google/llvm-premerge-checks.git' git url: 'https://github.com/google/llvm-premerge-checks.git'
} }
} sh 'rm -rf build || true'
sh 'mkdir -p build'
sh 'mkdir -p "${TARGET_DIR}"'
}
} }
stage('CMake') { stage('CMake') {
steps { steps {

View file

@ -21,7 +21,6 @@ pipeline {
BUILD_ID="${JOB_BASE_NAME}-${BUILD_NUMBER}" BUILD_ID="${JOB_BASE_NAME}-${BUILD_NUMBER}"
TARGET_DIR="/mnt/nfs/results/${BUILD_ID}" TARGET_DIR="/mnt/nfs/results/${BUILD_ID}"
SCRIPT_DIR = "${WORKSPACE}/llvm-premerge-checks/scripts" SCRIPT_DIR = "${WORKSPACE}/llvm-premerge-checks/scripts"
} }
stages { stages {
stage("git checkout"){ stage("git checkout"){
@ -29,6 +28,7 @@ pipeline {
git url: 'https://github.com/llvm/llvm-project.git' git url: 'https://github.com/llvm/llvm-project.git'
powershell 'git clean -fdx' powershell 'git clean -fdx'
powershell 'New-Item -ItemType Directory -Force -Path llvm-premerge-checks' powershell 'New-Item -ItemType Directory -Force -Path llvm-premerge-checks'
powershell 'New-Item -ItemType Directory -Force -Path ${TARGET_DIR}'
dir("llvm-premerge-checks") dir("llvm-premerge-checks")
{ {
git url: 'https://github.com/google/llvm-premerge-checks.git' git url: 'https://github.com/google/llvm-premerge-checks.git'

View file

@ -3,7 +3,7 @@
This repository contains the configuration files for the pre-merge checks for the LLVM project. This github project contains the documentation and the server configuration cluster of build machines that are used to check all incoming commits to the LLVM project. This repository contains the configuration files for the pre-merge checks for the LLVM project. This github project contains the documentation and the server configuration cluster of build machines that are used to check all incoming commits to the LLVM project.
# User documentation # User documentation
see [docs/user_doc.md](docs/user_doc.md) See [docs/user_doc.md](docs/user_doc.md)
# Pre-merge check vision for end of 2019 # Pre-merge check vision for end of 2019
Maria is a developer working on a new idea for the LLVM project. When she submits a new diff to Phabricator (or updates an existing diff), the pre-merge checks are triggered automatically in the cloud. The pre-merge checks run in one configuration (amd64, Debian Testing, clang8) and comprise these steps: Maria is a developer working on a new idea for the LLVM project. When she submits a new diff to Phabricator (or updates an existing diff), the pre-merge checks are triggered automatically in the cloud. The pre-merge checks run in one configuration (amd64, Debian Testing, clang8) and comprise these steps:
@ -59,7 +59,7 @@ On the Jenkins side:
There is no backup of the credentials. If you need to change it, generate a new one and update it in Jenkins and Phabricator. There is no backup of the credentials. If you need to change it, generate a new one and update it in Jenkins and Phabricator.
# Additional Information # Additional Information
* [Playbooks](docs/playbook.yaml) for installing/upgrading * [Playbooks](docs/playbooks.md) for installing/upgrading
* [User documentation](docs/user_doc.md) * [User documentation](docs/user_doc.md)
# License # License

View file

@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# Starts a new instances of a docker image. Example:
# sudo build_run.sh agent-debian-testing-clang8-ssd /bin/bash
set -eux set -eux
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

View file

@ -105,3 +105,11 @@ powershell
Invoke-WebRequest -uri 'https://raw.githubusercontent.com/google/llvm-premerge-checks/master/kubernetes/windows_agent_bootstrap.ps1' -OutFile windows_agent_bootstrap.ps1 Invoke-WebRequest -uri 'https://raw.githubusercontent.com/google/llvm-premerge-checks/master/kubernetes/windows_agent_bootstrap.ps1' -OutFile windows_agent_bootstrap.ps1
.\windows_agent_bootstrap.ps1 .\windows_agent_bootstrap.ps1
``` ```
## Testing scripts locally
Build and run agent docker image `sudo build_run.sh agent-debian-testing-clang8-ssd /bin/bash`.
Within a container set environment variables similar to [pipeline](https://github.com/google/llvm-premerge-checks/blob/master/Jenkins/Phabricator-pipeline/Jenkinsfile).
Additionally set `WORKSPACE`, `PHID` and `DIFF_ID` parameters. Set `CONDUIT_TOKEN` with your personal one from `https://reviews.llvm.org/settings/user/<USERNAME>/page/apitokens/`.

37
scripts/lint.sh Executable file
View file

@ -0,0 +1,37 @@
#!/bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://llvm.org/LICENSE.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs clang-format
# Inputs: TARGET_DIR, WORKSPACE
# Outputs: ${TARGET_DIR}/clang-format.patch (if there are clang-format findings).
set -eux
echo "Running linters... ====================================="
cd "${WORKSPACE}"
# Let clang format apply patches --diff doesn't produces results in the format
# we want.
python3 /usr/bin/git-clang-format-8 --style=llvm --binary=/usr/bin/clang-format-8
set +e
git diff -U0 --exit-code > "${TARGET_DIR}"/clang-format.patch
STATUS="${PIPESTATUS[0]}"
set -e
# Drop file if there are no findings.
if [[ $STATUS == 0 ]]; then rm "${TARGET_DIR}"/clang-format.patch; fi
# Revert changes of git-clang-format.
git checkout -- .
echo "linters completed ======================================"

View file

@ -1,2 +1,4 @@
This folder contains Python scripts that to Phabricator. This folder contains Python scripts that talk to Phabricator.
They require a few libraries listed in `requirements.txt`.
They require a few libraries listed in `requirements.txt`.
To install the requirements locally run `pip3 install -r requirements.txt`.

View file

@ -19,33 +19,41 @@ build status, a summary and the test reults to Phabricator."""
import argparse import argparse
import os import os
import re
import socket
import time import time
from typing import Optional from typing import Optional
from phabricator import Phabricator
import socket
from lxml import etree
class TestResults: from lxml import etree
from phabricator import Phabricator
class BuildReport:
def __init__(self): def __init__(self):
self.result_type = None # type: str self.comments = []
self.unit = [] #type: List self.success = True
self.working = False
self.unit = [] # type: List
self.lint = []
self.test_stats = { self.test_stats = {
'pass':0, 'pass': 0,
'fail':0, 'fail': 0,
'skip':0 'skip': 0
} # type: Dict[str, int] } # type: Dict[str, int]
class PhabTalk: class PhabTalk:
"""Talk to Phabricator to upload build results.""" """Talk to Phabricator to upload build results.
See https://secure.phabricator.com/conduit/method/harbormaster.sendmessage/
"""
def __init__(self, token: Optional[str], host: Optional[str], dryrun: bool): def __init__(self, token: Optional[str], host: Optional[str], dryrun: bool):
self._phab = None # type: Optional[Phabricator] self._phab = None # type: Optional[Phabricator]
if not dryrun: if not dryrun:
self._phab = Phabricator(token=token, host=host) self._phab = Phabricator(token=token, host=host)
self._phab.update_interfaces() self._phab.update_interfaces()
@property @property
def dryrun(self): def dryrun(self):
return self._phab is None return self._phab is None
@ -58,178 +66,260 @@ class PhabTalk:
result = self._phab.differential.querydiffs(ids=[diff]) result = self._phab.differential.querydiffs(ids=[diff])
return 'D' + result[diff]['revisionID'] return 'D' + result[diff]['revisionID']
def _comment_on_diff(self, diff: str, text: str): def _comment_on_diff(self, diff_id: str, text: str):
"""Add a comment to a differential based on the diff_id""" """Add a comment to a differential based on the diff_id"""
print('Sending comment to diff {}:'.format(diff)) print('Sending comment to diff {}:'.format(diff_id))
print(text) print(text)
self._comment_on_revision(self._get_revision_id(diff), text) self._comment_on_revision(self._get_revision_id(diff_id), text)
def _comment_on_revision(self, revision: str, text: str): def _comment_on_revision(self, revision: str, text: str):
"""Add comment on a differential based on the revision id.""" """Add comment on a differential based on the revision id."""
transactions = [{ transactions = [{
'type' : 'comment', 'type': 'comment',
'value' : text 'value': text
}] }]
if self.dryrun: if self.dryrun:
print('differential.revision.edit =================') print('differential.revision.edit =================')
print('Transactions: {}'.format(transactions)) print('Transactions: {}'.format(transactions))
return return
# API details at # API details at
# https://secure.phabricator.com/conduit/method/differential.revision.edit/ # https://secure.phabricator.com/conduit/method/differential.revision.edit/
self._phab.differential.revision.edit(objectIdentifier=revision, transactions=transactions) self._phab.differential.revision.edit(objectIdentifier=revision,
transactions=transactions)
def _comment_on_diff_from_file(self, diff: str, text_file_path: str, test_results: TestResults, buildresult:str): def submit_report(self, diff_id: str, phid: str, report: BuildReport, build_result: str):
"""Comment on a diff, read text from file.""" """Submit collected report to Phabricator.
header = ''
if test_results.result_type is None:
# do this if there are no test results
header = 'Build result: {} - '.format(buildresult)
else:
header = 'Build result: {} - '.format(test_results.result_type)
header += '{} tests passed, {} failed and {} were skipped.\n'.format(
test_results.test_stats['pass'],
test_results.test_stats['fail'],
test_results.test_stats['skip'],
)
for test_case in test_results.unit:
if test_case['result'] == 'fail':
header += ' failed: {}/{}\n'.format(test_case['namespace'], test_case['name'])
text = ''
if text_file_path is not None and os.path.exists(text_file_path):
with open(text_file_path) as input_file:
text = input_file.read()
if len(header+text) == 0:
print('Comment for Phabricator would be empty. Not posting it.')
return
self._comment_on_diff(diff, header + text)
def _report_test_results(self, phid: str, test_results: TestResults, build_result: str):
"""Report failed tests to phabricator.
Only reporting failed tests as the full test suite is too large to upload.
""" """
# use jenkins build status if possible result_type = 'pass'
result = self._translate_jenkins_status(build_result) if report.working:
# fall back to test results if Jenkins status is not availble result_type = 'working'
if result is None: elif not report.success:
result = test_results.result_type result_type = 'fail'
# If we do not have a proper status: fail the build.
if result is None:
result = 'fail'
if self.dryrun: if self.dryrun:
print('harbormaster.sendmessage =================') print('harbormaster.sendmessage =================')
print('type: {}'.format(result)) print('type: {}'.format(result_type))
print('unit: {}'.format(test_results.unit)) print('unit: {}'.format(report.unit))
return print('lint: {}'.format(report.lint))
else:
_try_call(lambda: self._phab.harbormaster.sendmessage(
buildTargetPHID=phid,
type=result_type,
unit=report.unit,
lint=report.lint))
# API details at if len(report.comments) > 0:
# https://secure.phabricator.com/conduit/method/harbormaster.sendmessage/ _try_call(lambda: self._comment_on_diff(diff_id, '\n\n'.join(report.comments)))
self._phab.harbormaster.sendmessage(buildTargetPHID=phid,
type=result,
unit=test_results.unit)
def _compute_test_results(self, build_result_file: str) -> TestResults:
result = TestResults()
if build_result_file is None: def _parse_patch(patch) -> []:
# If no result file is specified: assume this is intentional """Extract the changed lines from `patch` file.
result.result_type = None The return value is a list of dictionaries {filename, line, diff}.
return result Diff must be generated with -U0 (no context lines).
if not os.path.exists(build_result_file): """
print('Warning: Could not find test results file: {}'.format(build_result_file)) entries = []
result.result_type = None lines = []
return result filename = None
line_number = 0
root_node = etree.parse(build_result_file) for line in patch:
result.result_type = 'pass' match = re.search(r'^(\+\+\+|---) [^/]+/(.*)', line)
if match:
for test_case in root_node.xpath('//testcase'): if len(lines) > 0:
test_result = self._test_case_status(test_case) entries.append({
result.test_stats[test_result] += 1 'filename': filename,
'diff': ''.join(lines),
'line': line_number,
})
lines = []
filename = match.group(2).rstrip('\r\n')
continue
match = re.search(r'^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?', line)
if match:
if len(lines) > 0:
entries.append({
'filename': filename,
'diff': ''.join(lines),
'line': line_number,
})
lines = []
line_number = int(match.group(1))
continue
if line.startswith('+') or line.startswith('-'):
lines.append(line)
if len(lines) > 0:
entries.append({
'filename': filename,
'diff': ''.join(lines),
'line': line_number,
})
return entries
if test_result == 'fail':
failure = test_case.find('failure')
test_result = {
'name' : test_case.attrib['name'],
'namespace' : test_case.attrib['classname'],
'result' : test_result,
'duration' : float(test_case.attrib['time']),
'details' : failure.text
}
result.result_type = 'fail'
result.unit.append(test_result)
return result
@staticmethod def _add_clang_format(report: BuildReport, clang_format_patch: str, results_dir: str,
def _test_case_status(test_case) -> str: results_url: str):
"""Get the status of a test case based on an etree node.""" """Populates results from diff produced by clang format."""
if test_case.find('failure') is not None: if clang_format_patch is None:
return 'fail' return
if test_case.find('skipped') is not None: p = os.path.join(results_dir, clang_format_patch)
return 'skip' ok = True
return 'pass' if os.path.exists(p):
ok = False
diffs = _parse_patch(open(p, 'rt'))
for d in diffs:
lint_message = {
'name': 'Please fix the formatting',
'severity': 'autofix',
'code': 'clang-format',
'path': d['filename'],
'line': d['line'],
'char': 1,
'description': '```\n' + d['diff'] + '\n```',
}
report.lint.append(lint_message)
comment = section_title('clang-format', ok)
if not ok:
comment += 'Please format your changes with clang-format by running `git-clang-format HEAD^` or apply ' \
'this [[ {}/{} | patch ]].'.format(results_url, clang_format_patch)
report.comments.append(comment)
def report_all(self, diff_id: str, ph_id: str, test_result_file: str, comment_file: str, build_result:str):
test_results = self._compute_test_results(test_result_file)
self._report_test_results(ph_id, test_results, build_result)
self._comment_on_diff_from_file(diff_id, comment_file, test_results, build_result)
print('reporting completed.')
@staticmethod def _try_call(call):
def _translate_jenkins_status(jenkins_status: str) -> str: """Tries to call function several times retrying on socked.timeout."""
""" c = 0
Translate the build status form Jenkins to Phabricator.
Jenkins semantics: https://jenkins.llvm-merge-guard.org/pipeline-syntax/globals#currentBuild
Phabricator semantics: https://reviews.llvm.org/conduit/method/harbormaster.sendmessage/
"""
if jenkins_status.lower() == 'success':
return 'pass'
if jenkins_status.lower() == 'null':
return 'working'
return 'fail'
def main():
args = _parse_args()
errorcount = 0
while True: while True:
# retry on connenction problems
try: try:
p = PhabTalk(args.conduit_token, args.host, args.dryrun) call()
p.report_all(args.diff_id, args.ph_id, args.test_result_file, args.comment_file, args.buildresult)
except socket.timeout as e: except socket.timeout as e:
errorcount += 1 c += 1
if errorcount > 5: if c > 5:
print('Connection to Pharicator failed, giving up: {}'.format(e)) print('Connection to Pharicator failed, giving up: {}'.format(e))
raise raise
print('Connection to Pharicator failed, retrying: {}'.format(e)) print('Connection to Pharicator failed, retrying: {}'.format(e))
time.sleep(errorcount*10) time.sleep(c * 10)
break break
def _add_test_results(report: BuildReport, build_result_file: str):
"""Populates results from build test results XML.
Only reporting failed tests as the full test suite is too large to upload.
"""
if build_result_file is None:
return
if not os.path.exists(build_result_file):
print('Warning: Could not find test results file: {}'.format(
build_result_file))
return
root_node = etree.parse(build_result_file)
ok = True
for test_case in root_node.xpath('//testcase'):
test_result = _test_case_status(test_case)
report.test_stats[test_result] += 1
if test_result == 'fail':
ok = False
failure = test_case.find('failure')
test_result = {
'name': test_case.attrib['name'],
'namespace': test_case.attrib['classname'],
'result': test_result,
'duration': float(test_case.attrib['time']),
'details': failure.text
}
report.unit.append(test_result)
report.success = ok and report.success
comment = section_title('Unit tests', ok)
comment += '{} tests passed, {} failed and {} were skipped.\n'.format(
report.test_stats['pass'],
report.test_stats['fail'],
report.test_stats['skip'],
)
for test_case in report.unit:
if test_case['result'] == 'fail':
comment += ' failed: {}/{}\n'.format(test_case['namespace'], test_case['name'])
report.comments.append(comment)
def _add_links_to_artifacts(report: BuildReport, results_dir: str, results_url: str):
"""Comment on a diff, read text from file."""
file_links = []
for f in os.listdir(results_dir):
if not os.path.isfile(os.path.join(results_dir, f)):
continue
file_links.append('[[{0}/{1} | {1}]]'.format(results_url, f))
if len(file_links) > 0:
report.comments.append('[[ {} | Build artifacts ]]: '.format(results_url) + ', '.join(file_links))
def _test_case_status(test_case) -> str:
"""Get the status of a test case based on an etree node."""
if test_case.find('failure') is not None:
return 'fail'
if test_case.find('skipped') is not None:
return 'skip'
return 'pass'
def section_title(title: str, ok: bool) -> str:
return '{} {}: {}. '.format(
'{icon check-circle color=green}' if ok else '{icon times-circle color=red}',
title,
'pass' if ok else 'fail')
def main():
args = _parse_args()
report = BuildReport()
if args.buildresult is not None:
print('Jenkins result: {}'.format(args.buildresult))
if args.buildresult.lower() == 'success':
pass
elif args.buildresult.lower() == 'null':
report.working = True
else:
report.success = False
_add_test_results(report, os.path.join(args.results_dir, args.test_result_file))
_add_clang_format(report, args.clang_format_patch, args.results_dir, args.results_url)
_add_links_to_artifacts(report, args.results_dir, args.results_url)
p = PhabTalk(args.conduit_token, args.host, args.dryrun)
p.submit_report(args.diff_id, args.ph_id, report, args.buildresult)
def _parse_args(): def _parse_args():
parser = argparse.ArgumentParser(description='Write build status back to Phabricator.') parser = argparse.ArgumentParser(
description='Write build status back to Phabricator.')
parser.add_argument('ph_id', type=str) parser.add_argument('ph_id', type=str)
parser.add_argument('diff_id', type=str) parser.add_argument('diff_id', type=str)
parser.add_argument('--comment-file', type=str, dest='comment_file', default=None)
parser.add_argument('--test-result-file', type=str, dest='test_result_file', parser.add_argument('--test-result-file', type=str, dest='test_result_file',
default=os.path.join(os.path.curdir,'test-results.xml')) default='test-results.xml')
parser.add_argument('--conduit-token', type=str, dest='conduit_token', default=None) parser.add_argument('--conduit-token', type=str, dest='conduit_token',
parser.add_argument('--host', type=str, dest='host', default="None", default=None)
help="full URL to API with trailing slash, e.g. https://reviews.llvm.org/api/") parser.add_argument('--host', type=str, dest='host', default="None",
parser.add_argument('--dryrun', action='store_true',help="output results to the console, do not report back to the server") help="full URL to API with trailing slash, e.g. https://reviews.llvm.org/api/")
parser.add_argument('--dryrun', action='store_true',
help="output results to the console, do not report back to the server")
parser.add_argument('--buildresult', type=str, default=None, parser.add_argument('--buildresult', type=str, default=None,
choices=['SUCCESS', 'UNSTABLE', 'FAILURE', 'null']) choices=['SUCCESS', 'UNSTABLE', 'FAILURE', 'null'])
return parser.parse_args() parser.add_argument('--clang-format-patch', type=str, default=None,
dest='clang_format_patch',
help="path to diff produced by git-clang-format, relative to results-dir")
parser.add_argument('--results-dir', type=str, default=None,
dest='results_dir',
help="directory of all build artifacts")
parser.add_argument('--results-url', type=str, default=None,
dest='results_url',
help="public URL to access results directory")
return parser.parse_args()
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View file

@ -14,29 +14,27 @@
# limitations under the License. # limitations under the License.
set -eux set -eux
# Runs Cmake.
# Inputs: CCACHE_PATH, WORKSPACE, TARGET_DIR; $WORKSPACE/build must exist.
# Outputs: $TARGET_DIR/CMakeCache.txt, $WORKSPACE/compile_commands.json (symlink).
echo "Running CMake... ======================================" echo "Running CMake... ======================================"
cd ${WORKSPACE}
rm -rf build || true
mkdir build
cd build
export CC=clang-8 export CC=clang-8
export CXX=clang++-8 export CXX=clang++-8
export LD=LLD export LD=LLD
#TODO: move this to the pipeline cd "$WORKSPACE"/build
TARGET_DIR="/mnt/nfs/results/${JOB_BASE_NAME}-${BUILD_NUMBER}"
mkdir -p ${TARGET_DIR}
set +e set +e
cmake -GNinja ../llvm -DCMAKE_BUILD_TYPE=Release -D LLVM_ENABLE_LLD=ON \ cmake -GNinja ../llvm -DCMAKE_BUILD_TYPE=Release -D LLVM_ENABLE_LLD=ON \
-D LLVM_ENABLE_PROJECTS="clang;clang-tools-extra;libcxx;libcxxabi;lld;libunwind" \ -D LLVM_ENABLE_PROJECTS="clang;clang-tools-extra;libcxx;libcxxabi;lld;libunwind" \
-D LLVM_CCACHE_BUILD=ON -D LLVM_CCACHE_DIR="${CCACHE_PATH}" -D LLVM_CCACHE_MAXSIZE=20G \ -D LLVM_CCACHE_BUILD=ON -D LLVM_CCACHE_DIR="${CCACHE_PATH}" -D LLVM_CCACHE_MAXSIZE=20G \
-D LLVM_ENABLE_ASSERTIONS=ON -DCMAKE_CXX_FLAGS=-gmlt \ -D LLVM_ENABLE_ASSERTIONS=ON -DCMAKE_CXX_FLAGS=-gmlt \
-DLLVM_LIT_ARGS="-v --xunit-xml-output ${WORKSPACE}/build/test-results.xml" -DLLVM_LIT_ARGS="-v --xunit-xml-output ${WORKSPACE}/build/test-results.xml"
RETURN_CODE="${PIPESTATUS[0]}" RETURN_CODE="${PIPESTATUS[0]}"
set -e set -e
#TODO: move this to the Pipeline ln -s "$WORKSPACE"/build/compile_commands.json "$WORKSPACE"
cp CMakeCache.txt ${TARGET_DIR} cp CMakeCache.txt ${TARGET_DIR}
echo "CMake completed ======================================" echo "CMake completed ======================================"
exit ${RETURN_CODE} exit "${RETURN_CODE}"

View file

@ -14,25 +14,24 @@
# limitations under the License. # limitations under the License.
set -eu set -eu
# Runs ninja
# Inputs: TARGET_DIR, WORKSPACE.
# Outputs: $TARGET_DIR/test_results.xml
CMD=$1 CMD=$1
echo "Running ${CMD}... =====================================" echo "Running ninja ${CMD}... ====================================="
cd ${WORKSPACE}
# TODO: move copy operation to pipeline
BUILD_ID="${JOB_BASE_NAME}-${BUILD_NUMBER}"
TARGET_DIR="/mnt/nfs/results/${BUILD_ID}"
ulimit -n 8192 ulimit -n 8192
cd build cd "${WORKSPACE}/build"
set +e set +e
ninja ${CMD} ninja ${CMD}
RETURN_CODE="$?" RETURN_CODE="$?"
set -e set -e
echo "check-all completed ======================================" echo "ninja ${CMD} completed ======================================"
# TODO: move copy operation to pipeline
if test -f "test-results.xml" ; then if test -f "test-results.xml" ; then
cp test-results.xml ${TARGET_DIR} cp test-results.xml "${TARGET_DIR}"
fi fi
exit ${RETURN_CODE} exit ${RETURN_CODE}