return exit status of ninja command
- Fix for #207. Previously grep was exiting with 1 and fail build if all lines from ninja are filtered. Also piping might hide exit code of the initial command. Now "tee" and "grep" is implemented on python level and we get exit code of the original command. - Fixed default value of "scripts_branch", print scripts commit; - Added option to filter ninja output (on by default as before). - Copied cache usege reporting and error handling from "master" branch build to diff checks (useful for debugging).
This commit is contained in:
parent
01ccfca7b2
commit
89d294ef42
4 changed files with 144 additions and 32 deletions
|
@ -17,40 +17,62 @@ import os
|
|||
import yaml
|
||||
|
||||
if __name__ == '__main__':
|
||||
script_branch = os.getenv("scripts_branch", "master")
|
||||
scripts_branch = os.getenv("scripts_branch", "master")
|
||||
queue_prefix = os.getenv("ph_queue_prefix", "")
|
||||
diff_id = os.getenv("ph_buildable_diff", "")
|
||||
|
||||
no_cache = os.getenv('ph_no_cache') is not None
|
||||
filter_output = '--filter-output' if os.getenv('ph_no_filter_output') is None else ''
|
||||
projects = os.getenv('ph_projects', 'detect')
|
||||
log_level = os.getenv('ph_log_level', 'WARNING')
|
||||
steps = []
|
||||
linux_buld_step = {
|
||||
'label': ':linux: build and test linux',
|
||||
'key': 'linux',
|
||||
'commands': [
|
||||
'set -euo pipefail',
|
||||
'ccache --clear' if no_cache else '',
|
||||
'ccache --zero-stats',
|
||||
'mkdir -p artifacts',
|
||||
'dpkg -l >> artifacts/packages.txt',
|
||||
'export SRC=${BUILDKITE_BUILD_PATH}/llvm-premerge-checks',
|
||||
'rm -rf ${SRC}',
|
||||
'git clone --depth 1 --branch ${scripts_branch} https://github.com/google/llvm-premerge-checks.git ${SRC}',
|
||||
f'git clone --depth 1 --branch {scripts_branch} https://github.com/google/llvm-premerge-checks.git '
|
||||
'${SRC}',
|
||||
'echo "llvm-premerge-checks commit"',
|
||||
'git rev-parse HEAD',
|
||||
'set +e',
|
||||
# Add link in review to the build.
|
||||
'${SRC}/scripts/phabtalk/add_url_artifact.py '
|
||||
'--phid="$ph_target_phid" '
|
||||
'--url="$BUILDKITE_BUILD_URL" '
|
||||
'--name="Buildkite build"',
|
||||
'${SRC}/scripts/premerge_checks.py --check-clang-format --check-clang-tidy',
|
||||
'${SRC}/scripts/premerge_checks.py --check-clang-format --check-clang-tidy '
|
||||
f'--projects="{projects}" --log-level={log_level} {filter_output}',
|
||||
'EXIT_STATUS=\\$?',
|
||||
'echo "--- ccache stats"',
|
||||
'ccache --show-stats',
|
||||
'exit \\$EXIT_STATUS',
|
||||
],
|
||||
'artifact_paths': ['artifacts/**/*', '*_result.json'],
|
||||
'agents': {'queue': f'{queue_prefix}linux'},
|
||||
'timeout_in_minutes': 120,
|
||||
}
|
||||
clear_sccache = 'powershell -command "sccache --stop-server; ' \
|
||||
'Remove-Item -Recurse -Force -ErrorAction Ignore $env:SCCACHE_DIR; ' \
|
||||
'sccache --start-server"'
|
||||
windows_buld_step = {
|
||||
'label': ':windows: build and test windows',
|
||||
'key': 'windows',
|
||||
'commands': [
|
||||
clear_sccache if no_cache else '',
|
||||
'sccache --zero-stats',
|
||||
'set SRC=%BUILDKITE_BUILD_PATH%/llvm-premerge-checks',
|
||||
'rm -rf %SRC%',
|
||||
'git clone --depth 1 --branch %scripts_branch% https://github.com/google/llvm-premerge-checks.git %SRC%',
|
||||
'powershell -command "%SRC%/scripts/premerge_checks.py; '
|
||||
f'git clone --depth 1 --branch {scripts_branch} https://github.com/google/llvm-premerge-checks.git %SRC%',
|
||||
'echo "llvm-premerge-checks commit"',
|
||||
'git rev-parse HEAD',
|
||||
'powershell -command "'
|
||||
f'%SRC%/scripts/premerge_checks.py --projects=\'{projects}\' --log-level={log_level} {filter_output}; '
|
||||
'\\$exit=\\$?;'
|
||||
'sccache --show-stats;'
|
||||
'if (\\$exit) {'
|
||||
|
@ -75,7 +97,8 @@ if __name__ == '__main__':
|
|||
'buildkite-agent artifact download "*_result.json" .',
|
||||
'export SRC=${BUILDKITE_BUILD_PATH}/llvm-premerge-checks',
|
||||
'rm -rf ${SRC}',
|
||||
'git clone --depth 1 --branch ${scripts_branch} https://github.com/google/llvm-premerge-checks.git ${SRC}',
|
||||
f'git clone --depth 1 --branch {scripts_branch} https://github.com/google/llvm-premerge-checks.git '
|
||||
'${SRC}',
|
||||
'${SRC}/scripts/buildkite/summary.py',
|
||||
],
|
||||
'allow_dependency_failure': True,
|
||||
|
|
|
@ -17,9 +17,10 @@ import os
|
|||
import yaml
|
||||
|
||||
if __name__ == '__main__':
|
||||
script_branch = os.getenv("scripts_branch", "master")
|
||||
scripts_branch = os.getenv("scripts_branch", "master")
|
||||
queue_prefix = os.getenv("ph_queue_prefix", "")
|
||||
no_cache = os.getenv('ph_no_cache', '') != ''
|
||||
no_cache = os.getenv('ph_no_cache') is not None
|
||||
filter_output = '--filter-output' if os.getenv('ph_no_filter_output') is None else ''
|
||||
projects = os.getenv('ph_projects', 'clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;openmp;polly')
|
||||
log_level = os.getenv('ph_log_level', 'WARNING')
|
||||
steps = []
|
||||
|
@ -34,9 +35,12 @@ if __name__ == '__main__':
|
|||
'dpkg -l >> artifacts/packages.txt',
|
||||
'export SRC=${BUILDKITE_BUILD_PATH}/llvm-premerge-checks',
|
||||
'rm -rf ${SRC}',
|
||||
'git clone --depth 1 --branch ${scripts_branch} https://github.com/google/llvm-premerge-checks.git ${SRC}',
|
||||
'set -eo pipefail',
|
||||
f'${{SRC}}/scripts/premerge_checks.py --projects="{projects}" --log-level={log_level}',
|
||||
f'git clone --depth 1 --branch {scripts_branch} https://github.com/google/llvm-premerge-checks.git '
|
||||
'${SRC}',
|
||||
'echo "llvm-premerge-checks commit"',
|
||||
'git rev-parse HEAD',
|
||||
'set +e',
|
||||
f'${{SRC}}/scripts/premerge_checks.py --projects="{projects}" --log-level={log_level} {filter_output}',
|
||||
'EXIT_STATUS=\\$?',
|
||||
'echo "--- ccache stats"',
|
||||
'ccache --show-stats',
|
||||
|
@ -59,8 +63,11 @@ if __name__ == '__main__':
|
|||
'sccache --zero-stats',
|
||||
'set SRC=%BUILDKITE_BUILD_PATH%/llvm-premerge-checks',
|
||||
'rm -rf %SRC%',
|
||||
'git clone --depth 1 --branch %scripts_branch% https://github.com/google/llvm-premerge-checks.git %SRC%',
|
||||
f'powershell -command "%SRC%/scripts/premerge_checks.py --projects=\'{projects}\' --log-level={log_level}; '
|
||||
f'git clone --depth 1 --branch {scripts_branch} https://github.com/google/llvm-premerge-checks.git %SRC%',
|
||||
'echo "llvm-premerge-checks commit"',
|
||||
'git rev-parse HEAD',
|
||||
'powershell -command "'
|
||||
f'%SRC%/scripts/premerge_checks.py --projects=\'{projects}\' --log-level={log_level} {filter_output}; '
|
||||
'\\$exit=\\$?;'
|
||||
'echo \'--- sccache stats\';'
|
||||
'sccache --show-stats;'
|
||||
|
@ -76,6 +83,8 @@ if __name__ == '__main__':
|
|||
'agents': {'queue': f'{queue_prefix}windows'},
|
||||
'timeout_in_minutes': 120,
|
||||
}
|
||||
steps.append(linux_buld_step)
|
||||
steps.append(windows_buld_step)
|
||||
if os.getenv('ph_skip_linux') is None:
|
||||
steps.append(linux_buld_step)
|
||||
if os.getenv('ph_skip_windows') is None:
|
||||
steps.append(windows_buld_step)
|
||||
print(yaml.dump({'steps': steps}))
|
||||
|
|
64
scripts/exec_utils.py
Normal file
64
scripts/exec_utils.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 Google LLC
|
||||
#
|
||||
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://llvm.org/LICENSE.txt
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from asyncio.subprocess import PIPE
|
||||
from typing import Callable, AnyStr
|
||||
|
||||
|
||||
async def read_stream_and_display(stream, display):
|
||||
while True:
|
||||
line = await stream.readline()
|
||||
if not line:
|
||||
break
|
||||
display(line) # assume it doesn't block
|
||||
|
||||
|
||||
async def read_and_display(write_stdout, write_stderr, *cmd, **kwargs):
|
||||
logging.debug(f'subprocess called with {cmd}; {kwargs}')
|
||||
process = await asyncio.create_subprocess_shell(*cmd, stdout=PIPE, stderr=PIPE, **kwargs)
|
||||
try:
|
||||
await asyncio.gather(
|
||||
read_stream_and_display(process.stdout, write_stdout),
|
||||
read_stream_and_display(process.stderr, write_stderr))
|
||||
except Exception:
|
||||
process.kill()
|
||||
raise
|
||||
finally:
|
||||
return await process.wait()
|
||||
|
||||
|
||||
def tee(s: AnyStr, write1: Callable[[AnyStr], None], write2: Callable[[AnyStr], None]):
|
||||
write1(s)
|
||||
write2(s)
|
||||
|
||||
|
||||
def if_not_matches(s: AnyStr, regexp, write: Callable[[AnyStr], None]):
|
||||
x = s
|
||||
if isinstance(s, (bytes, bytearray)):
|
||||
x = s.decode()
|
||||
if regexp.match(x) is None:
|
||||
write(s)
|
||||
|
||||
|
||||
def watch_shell(write_stdout, write_stderr, *cmd, **kwargs):
|
||||
if os.name == 'nt':
|
||||
loop = asyncio.ProactorEventLoop() # Windows
|
||||
asyncio.set_event_loop(loop)
|
||||
else:
|
||||
loop = asyncio.get_event_loop()
|
||||
rc = loop.run_until_complete(read_and_display(write_stdout, write_stderr, *cmd, **kwargs))
|
||||
return rc
|
|
@ -19,9 +19,11 @@ import json
|
|||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from functools import partial
|
||||
from typing import Callable
|
||||
|
||||
import clang_format_report
|
||||
|
@ -29,29 +31,41 @@ import clang_tidy_report
|
|||
import run_cmake
|
||||
import test_results_report
|
||||
from buildkite.utils import upload_file
|
||||
from exec_utils import watch_shell, if_not_matches, tee
|
||||
from phabtalk.add_url_artifact import maybe_add_url_artifact
|
||||
from phabtalk.phabtalk import Report, PhabTalk, Step
|
||||
|
||||
|
||||
def ninja_all_report(step: Step, _: Report):
|
||||
def ninja_all_report(step: Step, _: Report, filter_output: bool):
|
||||
print('Full log will be available in Artifacts "ninja-all.log"', flush=True)
|
||||
step.reproduce_commands.append('ninja all')
|
||||
r = subprocess.run(f'ninja all | '
|
||||
f'tee {artifacts_dir}/ninja-all.log | '
|
||||
f'grep -vE "\\[.*] (Linking|Linting|Copying|Generating|Creating)"',
|
||||
shell=True, cwd=build_dir)
|
||||
logging.debug(f'ninja all: returned {r.returncode}, stderr: "{r.stderr}"')
|
||||
step.set_status_from_exit_code(r.returncode)
|
||||
with open(f'{artifacts_dir}/ninja-all.log', 'wb') as f:
|
||||
w = sys.stdout.buffer.write
|
||||
if filter_output:
|
||||
r = re.compile(r'^\[.*] (Building|Linking|Linting|Copying|Generating|Creating)')
|
||||
w = partial(if_not_matches, write=sys.stdout.buffer.write, regexp=r)
|
||||
rc = watch_shell(
|
||||
partial(tee, write1=w, write2=f.write),
|
||||
partial(tee, write1=sys.stderr.buffer.write, write2=f.write),
|
||||
'ninja all', cwd=build_dir)
|
||||
logging.debug(f'ninja all: returned {rc}')
|
||||
step.set_status_from_exit_code(rc)
|
||||
|
||||
|
||||
def ninja_check_all_report(step: Step, _: Report):
|
||||
def ninja_check_all_report(step: Step, _: Report, filter_output: bool):
|
||||
print('Full log will be available in Artifacts "ninja-check-all.log"', flush=True)
|
||||
step.reproduce_commands.append('ninja check-all')
|
||||
r = subprocess.run(f'ninja check-all | tee {artifacts_dir}/ninja-check-all.log | '
|
||||
f'grep -vE "^\\[.*] (Building|Linking|Generating)" | '
|
||||
f'grep -vE "^(PASS|XFAIL|UNSUPPORTED):"', shell=True, cwd=build_dir)
|
||||
logging.debug(f'ninja check-all: returned {r.returncode}, stderr: "{r.stderr}"')
|
||||
step.set_status_from_exit_code(r.returncode)
|
||||
with open(f'{artifacts_dir}/ninja-check-all.log', 'wb') as f:
|
||||
w = sys.stdout.buffer.write
|
||||
if filter_output:
|
||||
r = re.compile(r'^(\[.*] (Building|Linking|Generating)|(PASS|XFAIL|UNSUPPORTED):)')
|
||||
w = partial(if_not_matches, write=sys.stdout.buffer.write, regexp=r)
|
||||
rc = watch_shell(
|
||||
partial(tee, write1=w, write2=f.write),
|
||||
partial(tee, write1=sys.stderr.buffer.write, write2=f.write),
|
||||
'ninja check-all', cwd=build_dir)
|
||||
logging.debug(f'ninja check-all: returned {rc}')
|
||||
step.set_status_from_exit_code(rc)
|
||||
test_results_report.run(build_dir, 'test-results.xml', step, report)
|
||||
|
||||
|
||||
|
@ -91,6 +105,7 @@ if __name__ == '__main__':
|
|||
parser.add_argument('--log-level', type=str, default='WARNING')
|
||||
parser.add_argument('--check-clang-format', action='store_true')
|
||||
parser.add_argument('--check-clang-tidy', action='store_true')
|
||||
parser.add_argument('--filter-output', action='store_true')
|
||||
parser.add_argument('--projects', type=str, default='detect',
|
||||
help="Projects to select, either a list or projects like 'clang;libc', or "
|
||||
"'detect' to automatically infer proejcts from the diff, or "
|
||||
|
@ -113,9 +128,9 @@ if __name__ == '__main__':
|
|||
report.success = True
|
||||
cmake = run_step('cmake', report, lambda s, r: cmake_report(args.projects, s, r))
|
||||
if cmake.success:
|
||||
ninja_all = run_step('ninja all', report, ninja_all_report)
|
||||
ninja_all = run_step('ninja all', report, partial(ninja_all_report, filter_output=args.filter_output))
|
||||
if ninja_all.success:
|
||||
run_step('ninja check-all', report, ninja_check_all_report)
|
||||
run_step('ninja check-all', report, partial(ninja_check_all_report, filter_output=args.filter_output))
|
||||
if args.check_clang_tidy:
|
||||
run_step('clang-tidy', report,
|
||||
lambda s, r: clang_tidy_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-tidy.ignore'), s, r))
|
||||
|
@ -159,6 +174,7 @@ if __name__ == '__main__':
|
|||
logging.warning('No phabricator phid is specified. Will not update the build status in Phabricator')
|
||||
with open(report_path, 'w') as f:
|
||||
json.dump(report.__dict__, f, default=as_dict)
|
||||
|
||||
if not report.success:
|
||||
print('Build completed with failures', flush=True)
|
||||
exit(1)
|
||||
|
|
Loading…
Reference in a new issue