2020-05-25 16:42:40 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# Copyright 2020 Google LLC
|
|
|
|
#
|
|
|
|
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# https://llvm.org/LICENSE.txt
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
# Runs all check on buildkite agent.
|
2020-06-03 13:40:22 +02:00
|
|
|
import argparse
|
2020-05-25 16:42:40 +02:00
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import pathlib
|
2020-07-10 17:53:22 +02:00
|
|
|
import re
|
2020-05-25 16:42:40 +02:00
|
|
|
import shutil
|
2020-07-10 17:53:22 +02:00
|
|
|
import sys
|
2020-05-25 16:42:40 +02:00
|
|
|
import time
|
2020-07-10 17:53:22 +02:00
|
|
|
from functools import partial
|
2020-06-03 13:40:22 +02:00
|
|
|
from typing import Callable
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
import clang_format_report
|
|
|
|
import clang_tidy_report
|
|
|
|
import run_cmake
|
|
|
|
import test_results_report
|
2020-10-02 14:04:29 +02:00
|
|
|
from buildkite_utils import upload_file
|
2020-07-10 17:53:22 +02:00
|
|
|
from exec_utils import watch_shell, if_not_matches, tee
|
2020-06-03 13:40:22 +02:00
|
|
|
from phabtalk.phabtalk import Report, PhabTalk, Step
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
|
2020-07-10 17:53:22 +02:00
|
|
|
def ninja_all_report(step: Step, _: Report, filter_output: bool):
|
2020-06-18 10:54:57 +02:00
|
|
|
print('Full log will be available in Artifacts "ninja-all.log"', flush=True)
|
2020-07-09 15:24:27 +02:00
|
|
|
step.reproduce_commands.append('ninja all')
|
2020-07-10 17:53:22 +02:00
|
|
|
with open(f'{artifacts_dir}/ninja-all.log', 'wb') as f:
|
|
|
|
w = sys.stdout.buffer.write
|
|
|
|
if filter_output:
|
|
|
|
r = re.compile(r'^\[.*] (Building|Linking|Linting|Copying|Generating|Creating)')
|
|
|
|
w = partial(if_not_matches, write=sys.stdout.buffer.write, regexp=r)
|
|
|
|
rc = watch_shell(
|
|
|
|
partial(tee, write1=w, write2=f.write),
|
|
|
|
partial(tee, write1=sys.stderr.buffer.write, write2=f.write),
|
|
|
|
'ninja all', cwd=build_dir)
|
|
|
|
logging.debug(f'ninja all: returned {rc}')
|
|
|
|
step.set_status_from_exit_code(rc)
|
2020-07-22 15:45:53 +02:00
|
|
|
if not step.success:
|
|
|
|
report.add_artifact(artifacts_dir, 'ninja-all.log', 'build failed')
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
|
2020-07-10 17:53:22 +02:00
|
|
|
def ninja_check_all_report(step: Step, _: Report, filter_output: bool):
|
2020-06-18 10:54:57 +02:00
|
|
|
print('Full log will be available in Artifacts "ninja-check-all.log"', flush=True)
|
2020-07-09 15:24:27 +02:00
|
|
|
step.reproduce_commands.append('ninja check-all')
|
2020-07-10 17:53:22 +02:00
|
|
|
with open(f'{artifacts_dir}/ninja-check-all.log', 'wb') as f:
|
|
|
|
w = sys.stdout.buffer.write
|
|
|
|
if filter_output:
|
|
|
|
r = re.compile(r'^(\[.*] (Building|Linking|Generating)|(PASS|XFAIL|UNSUPPORTED):)')
|
|
|
|
w = partial(if_not_matches, write=sys.stdout.buffer.write, regexp=r)
|
|
|
|
rc = watch_shell(
|
|
|
|
partial(tee, write1=w, write2=f.write),
|
|
|
|
partial(tee, write1=sys.stderr.buffer.write, write2=f.write),
|
|
|
|
'ninja check-all', cwd=build_dir)
|
|
|
|
logging.debug(f'ninja check-all: returned {rc}')
|
|
|
|
step.set_status_from_exit_code(rc)
|
2020-06-03 13:40:22 +02:00
|
|
|
test_results_report.run(build_dir, 'test-results.xml', step, report)
|
2020-07-22 15:45:53 +02:00
|
|
|
if not step.success:
|
|
|
|
message = 'tests failed'
|
|
|
|
f = report.test_stats['fail']
|
|
|
|
if f == 1:
|
|
|
|
message = '1 test failed'
|
|
|
|
if f > 1:
|
|
|
|
message = f'{f} tests failed'
|
|
|
|
report.add_artifact(artifacts_dir, 'ninja-check-all.log', message)
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
|
2020-06-03 13:40:22 +02:00
|
|
|
def run_step(name: str, report: Report, thunk: Callable[[Step, Report], None]) -> Step:
|
2020-05-25 16:42:40 +02:00
|
|
|
start = time.time()
|
2020-06-17 15:07:19 +02:00
|
|
|
print(f'--- {name}', flush=True) # New section in Buildkite log.
|
2020-06-03 13:40:22 +02:00
|
|
|
step = Step()
|
|
|
|
step.name = name
|
|
|
|
thunk(step, report)
|
|
|
|
step.duration = time.time() - start
|
2020-05-25 16:42:40 +02:00
|
|
|
# Expand section if it failed.
|
2020-06-03 13:40:22 +02:00
|
|
|
if not step.success:
|
2020-06-17 15:07:19 +02:00
|
|
|
print('^^^ +++', flush=True)
|
2020-06-03 13:40:22 +02:00
|
|
|
report.steps.append(step)
|
|
|
|
return step
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
|
2020-06-04 08:40:04 +02:00
|
|
|
def cmake_report(projects: str, step: Step, _: Report):
|
2020-05-25 16:42:40 +02:00
|
|
|
global build_dir
|
2020-07-09 15:24:27 +02:00
|
|
|
cmake_result, build_dir, cmake_artifacts, commands = run_cmake.run(projects, os.getcwd())
|
2020-05-25 16:42:40 +02:00
|
|
|
for file in cmake_artifacts:
|
|
|
|
if os.path.exists(file):
|
|
|
|
shutil.copy2(file, artifacts_dir)
|
2020-06-03 13:40:22 +02:00
|
|
|
step.set_status_from_exit_code(cmake_result)
|
2020-07-09 15:24:27 +02:00
|
|
|
step.reproduce_commands = commands
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
|
2020-06-03 13:40:22 +02:00
|
|
|
def as_dict(obj):
|
|
|
|
try:
|
|
|
|
return obj.toJSON()
|
|
|
|
except:
|
|
|
|
return obj.__dict__
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2020-06-03 13:40:22 +02:00
|
|
|
parser = argparse.ArgumentParser(description='Runs premerge checks8')
|
|
|
|
parser.add_argument('--log-level', type=str, default='WARNING')
|
|
|
|
parser.add_argument('--check-clang-format', action='store_true')
|
|
|
|
parser.add_argument('--check-clang-tidy', action='store_true')
|
2020-07-10 17:53:22 +02:00
|
|
|
parser.add_argument('--filter-output', action='store_true')
|
2020-06-04 14:10:08 +02:00
|
|
|
parser.add_argument('--projects', type=str, default='detect',
|
|
|
|
help="Projects to select, either a list or projects like 'clang;libc', or "
|
|
|
|
"'detect' to automatically infer proejcts from the diff, or "
|
|
|
|
"'default' to add all enabled projects")
|
2020-06-03 13:40:22 +02:00
|
|
|
args = parser.parse_args()
|
|
|
|
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
2020-05-25 16:42:40 +02:00
|
|
|
build_dir = ''
|
2020-06-03 13:40:22 +02:00
|
|
|
step_key = os.getenv("BUILDKITE_STEP_KEY")
|
2020-05-25 16:42:40 +02:00
|
|
|
scripts_dir = pathlib.Path(__file__).parent.absolute()
|
|
|
|
artifacts_dir = os.path.join(os.getcwd(), 'artifacts')
|
|
|
|
os.makedirs(artifacts_dir, exist_ok=True)
|
2020-06-03 13:40:22 +02:00
|
|
|
report_path = f'{step_key}_result.json'
|
2020-05-25 16:42:40 +02:00
|
|
|
report = Report()
|
2020-06-03 13:40:22 +02:00
|
|
|
report.os = f'{os.getenv("BUILDKITE_AGENT_META_DATA_OS")}'
|
|
|
|
report.name = step_key
|
|
|
|
report.success = False
|
|
|
|
# Create report with failure in case something below fails.
|
|
|
|
with open(report_path, 'w') as f:
|
|
|
|
json.dump(report.__dict__, f, default=as_dict)
|
|
|
|
report.success = True
|
2020-06-04 08:40:04 +02:00
|
|
|
cmake = run_step('cmake', report, lambda s, r: cmake_report(args.projects, s, r))
|
2020-06-03 13:40:22 +02:00
|
|
|
if cmake.success:
|
2020-07-10 17:53:22 +02:00
|
|
|
ninja_all = run_step('ninja all', report, partial(ninja_all_report, filter_output=args.filter_output))
|
2020-06-03 13:40:22 +02:00
|
|
|
if ninja_all.success:
|
2020-07-10 17:53:22 +02:00
|
|
|
run_step('ninja check-all', report, partial(ninja_check_all_report, filter_output=args.filter_output))
|
2020-06-03 13:40:22 +02:00
|
|
|
if args.check_clang_tidy:
|
|
|
|
run_step('clang-tidy', report,
|
|
|
|
lambda s, r: clang_tidy_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-tidy.ignore'), s, r))
|
|
|
|
if args.check_clang_format:
|
|
|
|
run_step('clang-format', report,
|
|
|
|
lambda s, r: clang_format_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-format.ignore'), s, r))
|
2020-05-25 16:42:40 +02:00
|
|
|
logging.debug(report)
|
2020-07-09 15:24:27 +02:00
|
|
|
print('+++ Summary', flush=True)
|
2020-05-25 16:42:40 +02:00
|
|
|
for s in report.steps:
|
2020-06-03 13:40:22 +02:00
|
|
|
mark = 'OK '
|
|
|
|
if not s.success:
|
|
|
|
report.success = False
|
|
|
|
mark = 'FAIL '
|
|
|
|
msg = ''
|
|
|
|
if len(s.messages):
|
|
|
|
msg = ': ' + '\n '.join(s.messages)
|
2020-07-09 15:24:27 +02:00
|
|
|
print(f'{mark} {s.name}{msg}', flush=True)
|
|
|
|
print('--- Reproduce build locally', flush=True)
|
2020-08-14 15:17:21 +02:00
|
|
|
print(f'git clone {os.getenv("BUILDKITE_REPO")} llvm-project')
|
|
|
|
print('cd llvm-project')
|
2020-08-14 15:54:11 +02:00
|
|
|
print(f'git checkout {os.getenv("BUILDKITE_COMMIT")}')
|
2020-07-09 15:24:27 +02:00
|
|
|
for s in report.steps:
|
|
|
|
if len(s.reproduce_commands) == 0:
|
|
|
|
continue
|
|
|
|
print('\n'.join(s.reproduce_commands), flush=True)
|
|
|
|
print('', flush=True)
|
|
|
|
if not report.success:
|
|
|
|
print('^^^ +++', flush=True)
|
2020-05-25 16:42:40 +02:00
|
|
|
|
|
|
|
ph_target_phid = os.getenv('ph_target_phid')
|
|
|
|
if ph_target_phid is not None:
|
2020-10-02 14:04:29 +02:00
|
|
|
phabtalk = PhabTalk(os.getenv('CONDUIT_TOKEN'))
|
2020-06-03 13:40:22 +02:00
|
|
|
for u in report.unit:
|
|
|
|
u['engine'] = step_key
|
2020-10-05 12:48:34 +02:00
|
|
|
phabtalk.update_build_status(ph_target_phid, True, report.success, report.lint, report.unit)
|
2020-05-25 16:42:40 +02:00
|
|
|
for a in report.artifacts:
|
|
|
|
url = upload_file(a['dir'], a['file'])
|
|
|
|
if url is not None:
|
2020-10-02 14:04:29 +02:00
|
|
|
phabtalk.maybe_add_url_artifact(ph_target_phid, url, f'{a["name"]} ({step_key})')
|
2020-05-25 16:42:40 +02:00
|
|
|
else:
|
2020-10-05 12:48:34 +02:00
|
|
|
logging.warning('ph_target_phid is not specified. Will not update the build status in Phabricator')
|
2020-06-03 13:40:22 +02:00
|
|
|
with open(report_path, 'w') as f:
|
|
|
|
json.dump(report.__dict__, f, default=as_dict)
|
2020-07-10 17:53:22 +02:00
|
|
|
|
2020-06-03 13:40:22 +02:00
|
|
|
if not report.success:
|
2020-06-17 15:07:19 +02:00
|
|
|
print('Build completed with failures', flush=True)
|
2020-06-03 13:40:22 +02:00
|
|
|
exit(1)
|