commit
72156ea180
12 changed files with 734 additions and 56 deletions
148
.editorconfig
Normal file
148
.editorconfig
Normal file
|
@ -0,0 +1,148 @@
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
ij_formatter_off_tag = @formatter:off
|
||||||
|
ij_formatter_on_tag = @formatter:on
|
||||||
|
ij_formatter_tags_enabled = false
|
||||||
|
ij_smart_tabs = false
|
||||||
|
|
||||||
|
|
||||||
|
[{*.pyw,*.py,*.pi}]
|
||||||
|
indent_style = space
|
||||||
|
insert_final_newline = false
|
||||||
|
indent_size = 4
|
||||||
|
max_line_length = 120
|
||||||
|
tab_width = 4
|
||||||
|
ij_continuation_indent_size = 8
|
||||||
|
ij_wrap_on_typing = false
|
||||||
|
ij_python_align_collections_and_comprehensions = true
|
||||||
|
ij_python_align_multiline_imports = true
|
||||||
|
ij_python_align_multiline_parameters = true
|
||||||
|
ij_python_align_multiline_parameters_in_calls = true
|
||||||
|
ij_python_blank_line_at_file_end = true
|
||||||
|
ij_python_blank_lines_after_imports = 1
|
||||||
|
ij_python_blank_lines_after_local_imports = 0
|
||||||
|
ij_python_blank_lines_around_class = 1
|
||||||
|
ij_python_blank_lines_around_method = 1
|
||||||
|
ij_python_blank_lines_around_top_level_classes_functions = 2
|
||||||
|
ij_python_blank_lines_before_first_method = 0
|
||||||
|
ij_python_dict_alignment = 0
|
||||||
|
ij_python_dict_new_line_after_left_brace = false
|
||||||
|
ij_python_dict_new_line_before_right_brace = false
|
||||||
|
ij_python_dict_wrapping = 1
|
||||||
|
ij_python_from_import_new_line_after_left_parenthesis = false
|
||||||
|
ij_python_from_import_new_line_before_right_parenthesis = false
|
||||||
|
ij_python_from_import_parentheses_force_if_multiline = false
|
||||||
|
ij_python_from_import_trailing_comma_if_multiline = false
|
||||||
|
ij_python_from_import_wrapping = 1
|
||||||
|
ij_python_hang_closing_brackets = false
|
||||||
|
ij_python_keep_blank_lines_in_code = 1
|
||||||
|
ij_python_keep_blank_lines_in_declarations = 1
|
||||||
|
ij_python_keep_indents_on_empty_lines = false
|
||||||
|
ij_python_keep_line_breaks = true
|
||||||
|
ij_python_new_line_after_colon = false
|
||||||
|
ij_python_new_line_after_colon_multi_clause = true
|
||||||
|
ij_python_optimize_imports_always_split_from_imports = false
|
||||||
|
ij_python_optimize_imports_case_insensitive_order = false
|
||||||
|
ij_python_optimize_imports_join_from_imports_with_same_source = false
|
||||||
|
ij_python_optimize_imports_sort_by_type_first = true
|
||||||
|
ij_python_optimize_imports_sort_imports = true
|
||||||
|
ij_python_optimize_imports_sort_names_in_from_imports = false
|
||||||
|
ij_python_space_after_comma = true
|
||||||
|
ij_python_space_after_number_sign = true
|
||||||
|
ij_python_space_after_py_colon = true
|
||||||
|
ij_python_space_before_backslash = true
|
||||||
|
ij_python_space_before_comma = false
|
||||||
|
ij_python_space_before_for_semicolon = false
|
||||||
|
ij_python_space_before_lbracket = false
|
||||||
|
ij_python_space_before_method_call_parentheses = false
|
||||||
|
ij_python_space_before_method_parentheses = false
|
||||||
|
ij_python_space_before_number_sign = true
|
||||||
|
ij_python_space_before_py_colon = false
|
||||||
|
ij_python_space_within_empty_method_call_parentheses = false
|
||||||
|
ij_python_space_within_empty_method_parentheses = false
|
||||||
|
ij_python_spaces_around_additive_operators = true
|
||||||
|
ij_python_spaces_around_assignment_operators = true
|
||||||
|
ij_python_spaces_around_bitwise_operators = true
|
||||||
|
ij_python_spaces_around_eq_in_keyword_argument = false
|
||||||
|
ij_python_spaces_around_eq_in_named_parameter = false
|
||||||
|
ij_python_spaces_around_equality_operators = true
|
||||||
|
ij_python_spaces_around_multiplicative_operators = true
|
||||||
|
ij_python_spaces_around_power_operator = true
|
||||||
|
ij_python_spaces_around_relational_operators = true
|
||||||
|
ij_python_spaces_around_shift_operators = true
|
||||||
|
ij_python_spaces_within_braces = false
|
||||||
|
ij_python_spaces_within_brackets = false
|
||||||
|
ij_python_spaces_within_method_call_parentheses = false
|
||||||
|
ij_python_spaces_within_method_parentheses = false
|
||||||
|
ij_python_use_continuation_indent_for_arguments = false
|
||||||
|
ij_python_use_continuation_indent_for_collection_and_comprehensions = false
|
||||||
|
ij_python_wrap_long_lines = false
|
||||||
|
|
||||||
|
[{*.sht,*.htm,*.html,*.shtm,*.ng,*.shtml}]
|
||||||
|
ij_html_add_new_line_before_tags = body,div,p,form,h1,h2,h3
|
||||||
|
ij_html_align_attributes = true
|
||||||
|
ij_html_align_text = false
|
||||||
|
ij_html_attribute_wrap = normal
|
||||||
|
ij_html_block_comment_at_first_column = true
|
||||||
|
ij_html_do_not_align_children_of_min_lines = 0
|
||||||
|
ij_html_do_not_break_if_inline_tags = title,h1,h2,h3,h4,h5,h6,p
|
||||||
|
ij_html_do_not_indent_children_of_tags = html,body,thead,tbody,tfoot
|
||||||
|
ij_html_enforce_quotes = false
|
||||||
|
ij_html_inline_tags = a,abbr,acronym,b,basefont,bdo,big,br,cite,cite,code,dfn,em,font,i,img,input,kbd,label,q,s,samp,select,small,span,strike,strong,sub,sup,textarea,tt,u,var
|
||||||
|
ij_html_keep_blank_lines = 2
|
||||||
|
ij_html_keep_indents_on_empty_lines = false
|
||||||
|
ij_html_keep_line_breaks = true
|
||||||
|
ij_html_keep_line_breaks_in_text = true
|
||||||
|
ij_html_keep_whitespaces = false
|
||||||
|
ij_html_keep_whitespaces_inside = span,pre,textarea
|
||||||
|
ij_html_line_comment_at_first_column = true
|
||||||
|
ij_html_new_line_after_last_attribute = never
|
||||||
|
ij_html_new_line_before_first_attribute = never
|
||||||
|
ij_html_quote_style = double
|
||||||
|
ij_html_remove_new_line_before_tags = br
|
||||||
|
ij_html_space_after_tag_name = false
|
||||||
|
ij_html_space_around_equality_in_attribute = false
|
||||||
|
ij_html_space_inside_empty_tag = false
|
||||||
|
ij_html_text_wrap = normal
|
||||||
|
|
||||||
|
[{*.vsl,*.vm,*.ft}]
|
||||||
|
ij_vtl_keep_indents_on_empty_lines = false
|
||||||
|
|
||||||
|
[{*.xjsp,*.tag,*.jsp,*.jsf,*.jspf,*.tagf}]
|
||||||
|
ij_jsp_jsp_prefer_comma_separated_import_list = false
|
||||||
|
ij_jsp_keep_indents_on_empty_lines = false
|
||||||
|
|
||||||
|
[{*.yml,*.yaml}]
|
||||||
|
indent_size = 2
|
||||||
|
ij_yaml_keep_indents_on_empty_lines = false
|
||||||
|
ij_yaml_keep_line_breaks = true
|
||||||
|
|
||||||
|
[{*.zsh,*.bash,*.sh}]
|
||||||
|
indent_size = 2
|
||||||
|
tab_width = 2
|
||||||
|
ij_shell_binary_ops_start_line = false
|
||||||
|
ij_shell_keep_column_alignment_padding = false
|
||||||
|
ij_shell_minify_program = false
|
||||||
|
ij_shell_redirect_followed_by_space = false
|
||||||
|
ij_shell_switch_cases_indented = false
|
||||||
|
|
||||||
|
[{.eslintrc,.babelrc,.stylelintrc,jest.config,bowerrc,*.jsb3,*.jsb2,*.json}]
|
||||||
|
indent_size = 2
|
||||||
|
ij_json_keep_blank_lines_in_code = 0
|
||||||
|
ij_json_keep_indents_on_empty_lines = false
|
||||||
|
ij_json_keep_line_breaks = true
|
||||||
|
ij_json_space_after_colon = true
|
||||||
|
ij_json_space_after_comma = true
|
||||||
|
ij_json_space_before_colon = true
|
||||||
|
ij_json_space_before_comma = false
|
||||||
|
ij_json_spaces_within_braces = false
|
||||||
|
ij_json_spaces_within_brackets = false
|
||||||
|
ij_json_wrap_long_lines = false
|
||||||
|
|
||||||
|
[{BUILD,WORKSPACE,*.bzl}]
|
||||||
|
ij_continuation_indent_size = 4
|
||||||
|
ij_build_keep_indents_on_empty_lines = false
|
||||||
|
|
||||||
|
[{spring.schemas,spring.handlers,*.properties}]
|
||||||
|
ij_properties_align_group_field_declarations = false
|
|
@ -49,7 +49,7 @@ def build():
|
||||||
headers=headers)
|
headers=headers)
|
||||||
app.logger.info('buildkite response: %s %s', response.status_code, response.text)
|
app.logger.info('buildkite response: %s %s', response.status_code, response.text)
|
||||||
rjs = json.loads(response.text)
|
rjs = json.loads(response.text)
|
||||||
return rjs['id']
|
return rjs['web_url']
|
||||||
else:
|
else:
|
||||||
return "expected POST request"
|
return "expected POST request"
|
||||||
|
|
||||||
|
|
|
@ -23,14 +23,13 @@ if __name__ == '__main__':
|
||||||
steps = []
|
steps = []
|
||||||
# SCRIPT_DIR is defined in buildkite pipeline step.
|
# SCRIPT_DIR is defined in buildkite pipeline step.
|
||||||
linux_buld_step = {
|
linux_buld_step = {
|
||||||
'label': 'build linux',
|
'label': 'build linux',
|
||||||
'key': 'build-linux',
|
'key': 'build-linux',
|
||||||
'commands': [
|
'commands': [
|
||||||
'${SCRIPT_DIR}/run_cmake.py detect',
|
'${SCRIPT_DIR}/premerge_checks.py',
|
||||||
'${SCRIPT_DIR}/run_ninja.py all',
|
],
|
||||||
'${SCRIPT_DIR}/run_ninja.py check-all',
|
'artifact_paths': ['artifacts/**/*'],
|
||||||
'${SCRIPT_DIR}/lint.sh HEAD~1 ./'],
|
'agents': {'queue': queue, 'os': 'linux'}
|
||||||
'agents': {'queue': queue, 'os': 'linux'}
|
|
||||||
}
|
}
|
||||||
steps.append(linux_buld_step)
|
steps.append(linux_buld_step)
|
||||||
print(yaml.dump({'steps': steps}))
|
print(yaml.dump({'steps': steps}))
|
||||||
|
|
109
scripts/clang_format_report.py
Executable file
109
scripts/clang_format_report.py
Executable file
|
@ -0,0 +1,109 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright 2020 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# https://llvm.org/LICENSE.txt
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pathspec
|
||||||
|
import unidiff
|
||||||
|
|
||||||
|
from typing import Tuple, Optional
|
||||||
|
from phabtalk.phabtalk import Report, CheckResult
|
||||||
|
|
||||||
|
|
||||||
|
def get_diff(base_commit) -> Tuple[bool, str]:
|
||||||
|
r = subprocess.run(f'git-clang-format {base_commit}', shell=True)
|
||||||
|
logging.debug(f'git-clang-format {r}')
|
||||||
|
if r.returncode != 0:
|
||||||
|
logging.error(f'git-clang-format returned an non-zero exit code {r.returncode}')
|
||||||
|
r = subprocess.run(f'git checkout -- .', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||||
|
logging.debug(f'git reset {r}')
|
||||||
|
return False, ''
|
||||||
|
diff_run = subprocess.run(f'git diff -U0 --no-prefix --exit-code', capture_output=True, shell=True)
|
||||||
|
logging.debug(f'git diff {diff_run}')
|
||||||
|
r = subprocess.run(f'git checkout -- .', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||||
|
logging.debug(f'git reset {r}')
|
||||||
|
return True, diff_run.stdout.decode()
|
||||||
|
|
||||||
|
|
||||||
|
def run(base_commit, ignore_config, report: Optional[Report]):
|
||||||
|
"""Apply clang-format and return if no issues were found."""
|
||||||
|
if report is None:
|
||||||
|
report = Report() # For debugging.
|
||||||
|
r, patch = get_diff(base_commit)
|
||||||
|
if not r:
|
||||||
|
report.add_step('clang-format', CheckResult.FAILURE, '')
|
||||||
|
return
|
||||||
|
add_artifact = False
|
||||||
|
patches = unidiff.PatchSet(patch)
|
||||||
|
ignore_lines = []
|
||||||
|
if ignore_config is not None and os.path.exists(ignore_config):
|
||||||
|
ignore_lines = open(ignore_config, 'r').readlines()
|
||||||
|
ignore = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, ignore_lines)
|
||||||
|
patched_file: unidiff.PatchedFile
|
||||||
|
success = True
|
||||||
|
for patched_file in patches:
|
||||||
|
add_artifact = True
|
||||||
|
if ignore.match_file(patched_file.source_file) or ignore.match_file(patched_file.target_file):
|
||||||
|
logging.info(f'patch of {patched_file.patch_info} is ignored')
|
||||||
|
continue
|
||||||
|
hunk: unidiff.Hunk
|
||||||
|
for hunk in patched_file:
|
||||||
|
lines = [str(x) for x in hunk]
|
||||||
|
success = False
|
||||||
|
m = 10 # max number of lines to report.
|
||||||
|
description = 'please reformat the code\n```\n'
|
||||||
|
n = len(lines)
|
||||||
|
cut = n > m + 1
|
||||||
|
if cut:
|
||||||
|
lines = lines[:m]
|
||||||
|
description += ''.join(lines) + '\n```'
|
||||||
|
if cut:
|
||||||
|
description += f'\n{n - m} diff lines are omitted. See full path.'
|
||||||
|
report.add_lint({
|
||||||
|
'name': 'clang-format',
|
||||||
|
'severity': 'autofix',
|
||||||
|
'code': 'clang-format',
|
||||||
|
'path': patched_file.source_file,
|
||||||
|
'line': hunk.source_start,
|
||||||
|
'char': 1,
|
||||||
|
'description': description,
|
||||||
|
})
|
||||||
|
if add_artifact:
|
||||||
|
patch_file = 'clang-format.patch'
|
||||||
|
with open(patch_file, 'w') as f:
|
||||||
|
f.write(patch)
|
||||||
|
report.add_artifact(os.getcwd(), patch_file, 'clang-format')
|
||||||
|
if success:
|
||||||
|
report.add_step('clang-format', CheckResult.SUCCESS, message='')
|
||||||
|
else:
|
||||||
|
report.add_step(
|
||||||
|
'clang-format',
|
||||||
|
CheckResult.FAILURE,
|
||||||
|
'Please format your changes with clang-format by running `git-clang-format HEAD^` or applying patch.')
|
||||||
|
logging.debug(f'report: {report}')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description='Runs clang-format against given diff with given commit. '
|
||||||
|
'Produces patch and attaches linter comments to a review.')
|
||||||
|
parser.add_argument('--base', default='HEAD~1')
|
||||||
|
parser.add_argument('--ignore-config', default=None, help='path to file with patters of files to ignore')
|
||||||
|
parser.add_argument('--log-level', type=str, default='INFO')
|
||||||
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig(level=args.log_level)
|
||||||
|
run(args.base, args.ignore_config, None)
|
117
scripts/clang_tidy_report.py
Executable file
117
scripts/clang_tidy_report.py
Executable file
|
@ -0,0 +1,117 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright 2020 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# https://llvm.org/LICENSE.txt
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
|
import pathspec
|
||||||
|
|
||||||
|
import ignore_diff
|
||||||
|
from phabtalk.phabtalk import Report, CheckResult
|
||||||
|
|
||||||
|
|
||||||
|
def run(base_commit, ignore_config, report: Optional[Report]):
|
||||||
|
"""Apply clang-format and return if no issues were found."""
|
||||||
|
r = subprocess.run(f'git diff -U0 --no-prefix {base_commit}', shell=True, capture_output=True)
|
||||||
|
logging.debug(f'git diff {r}')
|
||||||
|
diff = r.stdout.decode()
|
||||||
|
if ignore_config is not None and os.path.exists(ignore_config):
|
||||||
|
ignore = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern,
|
||||||
|
open(ignore_config, 'r').readlines())
|
||||||
|
diff = ignore_diff.remove_ignored(diff.splitlines(keepends=True), open(ignore_config, 'r'))
|
||||||
|
logging.debug(f'filtered diff: {diff}')
|
||||||
|
else:
|
||||||
|
ignore = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, [])
|
||||||
|
p = subprocess.Popen(['clang-tidy-diff', '-p0', '-quiet'], stdout=subprocess.PIPE, stdin=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
a = ''.join(diff)
|
||||||
|
logging.info(f'clang-tidy input: {a}')
|
||||||
|
out = p.communicate(input=a.encode())[0].decode()
|
||||||
|
logging.debug(f'clang-tidy-diff {p}: {out}')
|
||||||
|
if report is None:
|
||||||
|
report = Report() # For debugging.
|
||||||
|
# Typical finding looks like:
|
||||||
|
# [cwd/]clang/include/clang/AST/DeclCXX.h:3058:20: error: ... [clang-diagnostic-error]
|
||||||
|
pattern = '^([^:]*):(\\d+):(\\d+): (.*): (.*)'
|
||||||
|
add_artifact = False
|
||||||
|
logging.debug("cwd", os.getcwd())
|
||||||
|
errors_count = 0
|
||||||
|
warn_count = 0
|
||||||
|
inline_comments = 0
|
||||||
|
for line in out.splitlines(keepends=False):
|
||||||
|
line = line.strip()
|
||||||
|
line = line.replace(os.getcwd() + os.sep, '')
|
||||||
|
logging.debug(line)
|
||||||
|
if len(line) == 0 or line == 'No relevant changes found.':
|
||||||
|
continue
|
||||||
|
add_artifact = True
|
||||||
|
match = re.search(pattern, line)
|
||||||
|
if match:
|
||||||
|
file_name = match.group(1)
|
||||||
|
line_pos = match.group(2)
|
||||||
|
char_pos = match.group(3)
|
||||||
|
severity = match.group(4)
|
||||||
|
text = match.group(5)
|
||||||
|
text += '\n[[{} | not useful]] '.format(
|
||||||
|
'https://github.com/google/llvm-premerge-checks/blob/master/docs/clang_tidy.md#warning-is-not-useful')
|
||||||
|
if severity in ['warning', 'error']:
|
||||||
|
if severity == 'warning':
|
||||||
|
warn_count += 1
|
||||||
|
if severity == 'error':
|
||||||
|
errors_count += 1
|
||||||
|
if ignore.match_file(file_name):
|
||||||
|
print('{} is ignored by pattern and no comment will be added'.format(file_name))
|
||||||
|
else:
|
||||||
|
inline_comments += 1
|
||||||
|
report.add_lint({
|
||||||
|
'name': 'clang-tidy',
|
||||||
|
'severity': 'warning',
|
||||||
|
'code': 'clang-tidy',
|
||||||
|
'path': file_name,
|
||||||
|
'line': int(line_pos),
|
||||||
|
'char': int(char_pos),
|
||||||
|
'description': '{}: {}'.format(severity, text),
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
logging.debug('does not match pattern')
|
||||||
|
if add_artifact:
|
||||||
|
p = 'clang-tidy.txt'
|
||||||
|
with open(p, 'w') as f:
|
||||||
|
f.write(out)
|
||||||
|
report.add_artifact(os.getcwd(), p, 'clang-tidy')
|
||||||
|
if errors_count + warn_count == 0:
|
||||||
|
report.add_step('clang-tidy', CheckResult.SUCCESS, message='')
|
||||||
|
else:
|
||||||
|
report.add_step(
|
||||||
|
'clang-tidy',
|
||||||
|
CheckResult.FAILURE,
|
||||||
|
f'clang-tidy found {errors_count} errors and {warn_count} warnings. {inline_comments} of them are added '
|
||||||
|
f'as review comments. See'
|
||||||
|
f'https://github.com/google/llvm-premerge-checks/blob/master/docs/clang_tidy.md#review-comments.')
|
||||||
|
logging.debug(f'report: {report}')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description='Runs clang-format against given diff with given commit. '
|
||||||
|
'Produces patch and attaches linter comments to a review.')
|
||||||
|
parser.add_argument('--base', default='HEAD~1')
|
||||||
|
parser.add_argument('--ignore-config', default=None, help='path to file with patters of files to ignore')
|
||||||
|
parser.add_argument('--log-level', type=str, default='INFO')
|
||||||
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
||||||
|
run(args.base, args.ignore_config, None)
|
|
@ -12,31 +12,38 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import pathspec
|
import pathspec
|
||||||
|
|
||||||
|
|
||||||
# Takes an output of git diff and removes files ignored by patten specified by ignore file.
|
def remove_ignored(diff_lines, ignore_patterns_lines):
|
||||||
def main():
|
logging.debug(f'ignore pattern {ignore_patterns_lines}')
|
||||||
# FIXME: use argparse for parsing commandline parameters
|
ignore = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, ignore_patterns_lines)
|
||||||
# Maybe FIXME: Replace path to file with flags for tidy/format, use paths relative to `__file__`
|
|
||||||
argv = sys.argv[1:]
|
|
||||||
if not argv:
|
|
||||||
print("Please provide a path to .ignore file.")
|
|
||||||
sys.exit(1)
|
|
||||||
ignore = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern,
|
|
||||||
open(argv[0], 'r').readlines())
|
|
||||||
good = True
|
good = True
|
||||||
for line in sys.stdin:
|
result = []
|
||||||
match = re.search(r'^diff --git a/(.*) b/(.*)$', line)
|
for line in diff_lines:
|
||||||
|
match = re.search(r'^diff --git (.*) (.*)$', line)
|
||||||
if match:
|
if match:
|
||||||
good = not (ignore.match_file(match.group(1)) and ignore.match_file(match.group(2)))
|
good = not (ignore.match_file(match.group(1)) and ignore.match_file(match.group(2)))
|
||||||
if not good:
|
if not good:
|
||||||
|
logging.debug(f'skip {line.rstrip()}')
|
||||||
continue
|
continue
|
||||||
sys.stdout.write(line)
|
result.append(line)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
# Maybe FIXME: Replace this tool usage with flags for tidy/format, use paths relative to `__file__`
|
||||||
|
parser = argparse.ArgumentParser(description='Takes an output of git diff and removes files ignored by patten '
|
||||||
|
'specified by ignore file')
|
||||||
|
parser.add_argument('ignore_config', default=None,
|
||||||
|
help='path to file with patters of files to ignore')
|
||||||
|
parser.add_argument('--log-level', type=str, default='WARNING')
|
||||||
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig(level=args.log_level)
|
||||||
|
filtered = remove_ignored([x for x in sys.stdin], open(args.ignore_config, 'r').readlines())
|
||||||
|
for x in filtered:
|
||||||
|
sys.stdout.write(x)
|
||||||
|
|
|
@ -39,12 +39,12 @@ fi
|
||||||
# Let clang format apply patches --diff doesn't produces results in the format we want.
|
# Let clang format apply patches --diff doesn't produces results in the format we want.
|
||||||
git-clang-format "${COMMIT}"
|
git-clang-format "${COMMIT}"
|
||||||
set +e
|
set +e
|
||||||
git diff -U0 --exit-code | "${DIR}/ignore_diff.py" "${DIR}/clang-format.ignore" > "${OUTPUT_DIR}"/clang-format.patch
|
git diff -U0 --exit-code --no-prefix | "${DIR}/ignore_diff.py" "${DIR}/clang-format.ignore" > "${OUTPUT_DIR}"/clang-format.patch
|
||||||
set -e
|
set -e
|
||||||
# Revert changes of git-clang-format.
|
# Revert changes of git-clang-format.
|
||||||
git checkout -- .
|
git checkout -- .
|
||||||
|
|
||||||
# clang-tidy
|
# clang-tidy
|
||||||
git diff -U0 "${COMMIT}" | "${DIR}/ignore_diff.py" "${DIR}/clang-tidy.ignore" | clang-tidy-diff -p1 -quiet | sed "/^[[:space:]]*$/d" > "${OUTPUT_DIR}"/clang-tidy.txt
|
git diff -U0 --no-prefix "${COMMIT}" | "${DIR}/ignore_diff.py" "${DIR}/clang-tidy.ignore" | clang-tidy-diff -p0 -quiet | sed "/^[[:space:]]*$/d" > "${OUTPUT_DIR}"/clang-tidy.txt
|
||||||
|
|
||||||
echo "linters completed ======================================"
|
echo "linters completed ======================================"
|
||||||
|
|
|
@ -25,10 +25,10 @@ import time
|
||||||
import urllib
|
import urllib
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Optional, List, Dict
|
from typing import Optional, List, Dict
|
||||||
|
|
||||||
import pathspec
|
import pathspec
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from phabricator import Phabricator
|
from phabricator import Phabricator
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class PhabTalk:
|
class PhabTalk:
|
||||||
|
@ -126,24 +126,28 @@ class PhabTalk:
|
||||||
print('Uploaded build status {}, {} test results and {} lint results'.format(
|
print('Uploaded build status {}, {} test results and {} lint results'.format(
|
||||||
result_type, len(unit), len(lint_messages)))
|
result_type, len(unit), len(lint_messages)))
|
||||||
|
|
||||||
|
# TODO: deprecate
|
||||||
def add_artifact(self, phid: str, file: str, name: str, results_url: str):
|
def add_artifact(self, phid: str, file: str, name: str, results_url: str):
|
||||||
artifactKey = str(uuid.uuid4())
|
artifactKey = str(uuid.uuid4())
|
||||||
artifactType = 'uri'
|
artifactType = 'uri'
|
||||||
artifactData = {'uri': '{}/{}'.format(results_url, file),
|
artifactData = {'uri': '{}/{}'.format(results_url, file),
|
||||||
'ui.external': True,
|
'ui.external': True,
|
||||||
'name': name}
|
'name': name}
|
||||||
|
self.create_artefact(phid, artifactKey, artifactType, artifactData)
|
||||||
|
print('Created artifact "{}"'.format(name))
|
||||||
|
|
||||||
|
def create_artifact(self, phid, artifact_key, artifact_type, artifact_data):
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
print('harbormaster.createartifact =================')
|
print('harbormaster.createartifact =================')
|
||||||
print('artifactKey: {}'.format(artifactKey))
|
print('artifactKey: {}'.format(artifact_key))
|
||||||
print('artifactType: {}'.format(artifactType))
|
print('artifactType: {}'.format(artifact_type))
|
||||||
print('artifactData: {}'.format(artifactData))
|
print('artifactData: {}'.format(artifact_data))
|
||||||
return
|
return
|
||||||
_try_call(lambda: self._phab.harbormaster.createartifact(
|
_try_call(lambda: self._phab.harbormaster.createartifact(
|
||||||
buildTargetPHID=phid,
|
buildTargetPHID=phid,
|
||||||
artifactKey=artifactKey,
|
artifactKey=artifact_key,
|
||||||
artifactType=artifactType,
|
artifactType=artifact_type,
|
||||||
artifactData=artifactData))
|
artifactData=artifact_data))
|
||||||
print('Created artifact "{}"'.format(name))
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_patch(patch) -> List[Dict[str, str]]:
|
def _parse_patch(patch) -> List[Dict[str, str]]:
|
||||||
|
@ -189,6 +193,47 @@ def _parse_patch(patch) -> List[Dict[str, str]]:
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
class CheckResult(Enum):
|
||||||
|
UNKNOWN = 0
|
||||||
|
SUCCESS = 1
|
||||||
|
FAILURE = 2
|
||||||
|
|
||||||
|
|
||||||
|
class Report:
|
||||||
|
def __init__(self):
|
||||||
|
self.comments = []
|
||||||
|
self.success = True
|
||||||
|
self.working = False
|
||||||
|
self.unit = [] # type: List
|
||||||
|
self.lint = {}
|
||||||
|
self.test_stats = {
|
||||||
|
'pass': 0,
|
||||||
|
'fail': 0,
|
||||||
|
'skip': 0
|
||||||
|
} # type: Dict[str, int]
|
||||||
|
self.steps = [] # type: List
|
||||||
|
self.artifacts = [] # type: List
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.__dict__)
|
||||||
|
|
||||||
|
def add_lint(self, m):
|
||||||
|
key = '{}:{}'.format(m['path'], m['line'])
|
||||||
|
if key not in self.lint:
|
||||||
|
self.lint[key] = []
|
||||||
|
self.lint[key].append(m)
|
||||||
|
|
||||||
|
def add_step(self, title: str, result: CheckResult, message: str):
|
||||||
|
self.steps.append({
|
||||||
|
'title': title,
|
||||||
|
'result': result,
|
||||||
|
'message': message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def add_artifact(self, dir: str, file: str, name: str):
|
||||||
|
self.artifacts.append({'dir': dir, 'file': file, 'name': name})
|
||||||
|
|
||||||
|
|
||||||
class BuildReport:
|
class BuildReport:
|
||||||
|
|
||||||
def __init__(self, args):
|
def __init__(self, args):
|
||||||
|
@ -295,7 +340,7 @@ class BuildReport:
|
||||||
diffs = _parse_patch(open(p, 'r'))
|
diffs = _parse_patch(open(p, 'r'))
|
||||||
success = len(diffs) == 0
|
success = len(diffs) == 0
|
||||||
for d in diffs:
|
for d in diffs:
|
||||||
lines = d['diff'].splitlines(True)
|
lines = d['diff'].splitlines(keepends=True)
|
||||||
m = 10 # max number of lines to report.
|
m = 10 # max number of lines to report.
|
||||||
description = 'please reformat the code\n```\n'
|
description = 'please reformat the code\n```\n'
|
||||||
n = len(lines)
|
n = len(lines)
|
||||||
|
@ -325,10 +370,10 @@ class BuildReport:
|
||||||
self.success = success and self.success
|
self.success = success and self.success
|
||||||
|
|
||||||
def add_clang_tidy(self):
|
def add_clang_tidy(self):
|
||||||
# Typical message looks like
|
|
||||||
# [..]/clang/include/clang/AST/DeclCXX.h:3058:20: error: no member named 'LifetimeExtendedTemporary' in 'clang::Decl' [clang-diagnostic-error]
|
|
||||||
if self.clang_tidy_result is None:
|
if self.clang_tidy_result is None:
|
||||||
return
|
return
|
||||||
|
# Typical message looks like
|
||||||
|
# [..]/clang/include/clang/AST/DeclCXX.h:3058:20: error: no member named 'LifetimeExtendedTemporary' in 'clang::Decl' [clang-diagnostic-error]
|
||||||
pattern = '^{}/([^:]*):(\\d+):(\\d+): (.*): (.*)'.format(self.workspace)
|
pattern = '^{}/([^:]*):(\\d+):(\\d+): (.*): (.*)'.format(self.workspace)
|
||||||
errors_count = 0
|
errors_count = 0
|
||||||
warn_count = 0
|
warn_count = 0
|
||||||
|
|
175
scripts/premerge_checks.py
Executable file
175
scripts/premerge_checks.py
Executable file
|
@ -0,0 +1,175 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright 2020 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# https://llvm.org/LICENSE.txt
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# Runs all check on buildkite agent.
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from typing import Callable, Optional
|
||||||
|
|
||||||
|
import clang_format_report
|
||||||
|
import clang_tidy_report
|
||||||
|
import run_cmake
|
||||||
|
import test_results_report
|
||||||
|
from phabtalk.phabtalk import Report, CheckResult, PhabTalk
|
||||||
|
|
||||||
|
|
||||||
|
def upload_file(base_dir: str, file: str):
|
||||||
|
"""
|
||||||
|
Uploads artifact to buildkite and returns URL to it
|
||||||
|
"""
|
||||||
|
r = subprocess.run(f'buildkite-agent artifact upload "{file}"', shell=True, capture_output=True, cwd=base_dir)
|
||||||
|
logging.debug(f'upload-artifact {r}')
|
||||||
|
match = re.search('Uploading artifact ([^ ]*) ', r.stderr.decode())
|
||||||
|
logging.debug(f'match {match}')
|
||||||
|
if match:
|
||||||
|
url = f'https://buildkite.com/organizations/llvm-project/pipelines/premerge-checks/builds/{os.getenv("BUILDKITE_BUILD_NUMBER")}/jobs/{os.getenv("BUILDKITE_JOB_ID")}/artifacts/{match.group(1)}'
|
||||||
|
logging.info(f'uploaded {file} to {url}')
|
||||||
|
return url
|
||||||
|
else:
|
||||||
|
logging.warning(f'could not find artifact {base_dir}/{file}')
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_add_url_artifact(phab: PhabTalk, url: str, name: str):
|
||||||
|
phid = os.getenv('ph_target_phid')
|
||||||
|
if phid is None:
|
||||||
|
return
|
||||||
|
phab.create_artifact(phid, str(uuid.uuid4()), 'uri', {'uri': url, 'ui.external': True, 'name': name})
|
||||||
|
|
||||||
|
|
||||||
|
def add_shell_result(report: Report, name: str, exit_code: int) -> CheckResult:
|
||||||
|
logging.info(f'"{name}" exited with {exit_code}')
|
||||||
|
z = CheckResult.SUCCESS
|
||||||
|
if exit_code != 0:
|
||||||
|
z = CheckResult.FAILURE
|
||||||
|
report.add_step(name, z, '')
|
||||||
|
return z
|
||||||
|
|
||||||
|
|
||||||
|
def ninja_all_report(report: Report) -> CheckResult:
|
||||||
|
print('Full will be available in Artifacts "ninja-all.log"')
|
||||||
|
r = subprocess.run(f'ninja all | '
|
||||||
|
f'tee {artifacts_dir}/ninja-all.log | '
|
||||||
|
f'grep -vE "\\[.*] (Building|Linking|Copying|Generating|Creating)"',
|
||||||
|
shell=True, cwd=build_dir)
|
||||||
|
return add_shell_result(report, 'ninja all', r.returncode)
|
||||||
|
|
||||||
|
|
||||||
|
def ninja_check_all_report(report: Report) -> CheckResult:
|
||||||
|
# TODO: merge running ninja check all and analysing results in one step?
|
||||||
|
print('Full will be available in Artifacts "ninja-check-all.log"')
|
||||||
|
r = subprocess.run(f'ninja check-all | tee {artifacts_dir}/ninja-check-all.log | '
|
||||||
|
f'grep -vE "^\\[.*] (Building|Linking)" | '
|
||||||
|
f'grep -vE "^(PASS|XFAIL|UNSUPPORTED):"', shell=True, cwd=build_dir)
|
||||||
|
z = add_shell_result(report, 'ninja check all', r.returncode)
|
||||||
|
# TODO: check if test-results are present.
|
||||||
|
report.add_artifact(build_dir, 'test-results.xml', 'test results')
|
||||||
|
test_results_report.run(os.path.join(build_dir, 'test-results.xml'), report)
|
||||||
|
return z
|
||||||
|
|
||||||
|
|
||||||
|
def run_step(name: str, report: Report, thunk: Callable[[Report], CheckResult]) -> CheckResult:
|
||||||
|
global timings
|
||||||
|
start = time.time()
|
||||||
|
print(f'--- {name}') # New section in Buildkite log.
|
||||||
|
result = thunk(report)
|
||||||
|
timings[name] = time.time() - start
|
||||||
|
# Expand section if it failed.
|
||||||
|
if result == CheckResult.FAILURE:
|
||||||
|
print('^^^ +++')
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def cmake_report(report: Report) -> CheckResult:
|
||||||
|
global build_dir
|
||||||
|
cmake_result, build_dir, cmake_artifacts = run_cmake.run('detect', os.getcwd())
|
||||||
|
for file in cmake_artifacts:
|
||||||
|
if os.path.exists(file):
|
||||||
|
shutil.copy2(file, artifacts_dir)
|
||||||
|
return add_shell_result(report, 'cmake', cmake_result)
|
||||||
|
|
||||||
|
|
||||||
|
def furl(url: str, name: Optional[str] = None):
|
||||||
|
if name is None:
|
||||||
|
name = url
|
||||||
|
return f"\033]1339;url='{url}';content='{name}'\a\n"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
build_dir = ''
|
||||||
|
logging.basicConfig(level=logging.WARNING, format='%(levelname)-7s %(message)s')
|
||||||
|
scripts_dir = pathlib.Path(__file__).parent.absolute()
|
||||||
|
phab = PhabTalk(os.getenv('CONDUIT_TOKEN'), 'https://reviews.llvm.org/api/', False)
|
||||||
|
maybe_add_url_artifact(phab, os.getenv('BUILDKITE_BUILD_URL'), 'Buildkite build')
|
||||||
|
artifacts_dir = os.path.join(os.getcwd(), 'artifacts')
|
||||||
|
os.makedirs(artifacts_dir, exist_ok=True)
|
||||||
|
report = Report()
|
||||||
|
timings = {}
|
||||||
|
cmake_result = run_step('cmake', report, cmake_report)
|
||||||
|
if cmake_result == CheckResult.SUCCESS:
|
||||||
|
compile_result = run_step('ninja all', report, ninja_all_report)
|
||||||
|
if compile_result == CheckResult.SUCCESS:
|
||||||
|
run_step('ninja check all', report, ninja_check_all_report)
|
||||||
|
run_step('clang-tidy', report,
|
||||||
|
lambda x: clang_tidy_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-tidy.ignore'), x))
|
||||||
|
run_step('clang-format', report,
|
||||||
|
lambda x: clang_format_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-format.ignore'), x))
|
||||||
|
print('+++ summary')
|
||||||
|
print(f'Branch {os.getenv("BUILDKITE_BRANCH")} at {os.getenv("BUILDKITE_REPO")}')
|
||||||
|
ph_buildable_diff = os.getenv('ph_buildable_diff')
|
||||||
|
if ph_buildable_diff is not None:
|
||||||
|
url = f'https://reviews.llvm.org/D{os.getenv("ph_buildable_revision")}?id={ph_buildable_diff}'
|
||||||
|
print(f'Review: {furl(url)}')
|
||||||
|
if os.getenv('BUILDKITE_TRIGGERED_FROM_BUILD_NUMBER') is not None:
|
||||||
|
url = f'https://buildkite.com/llvm-project/' \
|
||||||
|
f'{os.getenv("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG")}/'\
|
||||||
|
f'builds/{os.getenv("BUILDKITE_TRIGGERED_FROM_BUILD_NUMBER")}'
|
||||||
|
print(f'Triggered from build {furl(url)}')
|
||||||
|
logging.debug(report)
|
||||||
|
success = True
|
||||||
|
for s in report.steps:
|
||||||
|
mark = 'V'
|
||||||
|
if s['result'] == CheckResult.UNKNOWN:
|
||||||
|
mark = '?'
|
||||||
|
if s['result'] == CheckResult.FAILURE:
|
||||||
|
success = False
|
||||||
|
mark = 'X'
|
||||||
|
msg = s['message']
|
||||||
|
if len(msg):
|
||||||
|
msg = ': ' + msg
|
||||||
|
print(f'{mark} {s["title"]}{msg}')
|
||||||
|
|
||||||
|
# TODO: dump the report and deduplicate tests and other reports later (for multiple OS) in a separate step.
|
||||||
|
ph_target_phid = os.getenv('ph_target_phid')
|
||||||
|
if ph_target_phid is not None:
|
||||||
|
build_url = f'https://reviews.llvm.org/harbormaster/build/{os.getenv("ph_build_id")}'
|
||||||
|
print(f'Reporting results to Phabricator build {furl(build_url)}')
|
||||||
|
phab.update_build_status(ph_buildable_diff, ph_target_phid, False, success, report.lint, report.unit)
|
||||||
|
for a in report.artifacts:
|
||||||
|
url = upload_file(a['dir'], a['file'])
|
||||||
|
if url is not None:
|
||||||
|
maybe_add_url_artifact(phab, url, a['name'])
|
||||||
|
else:
|
||||||
|
logging.warning('No phabricator phid is specified. Will not update the build status in Phabricator')
|
||||||
|
# TODO: add link to report issue on github
|
||||||
|
with open(os.path.join(artifacts_dir, 'step-timings.json'), 'w') as f:
|
||||||
|
f.write(json.dumps(timings))
|
|
@ -21,6 +21,7 @@ import platform
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import stat
|
import stat
|
||||||
|
import sys
|
||||||
from typing import List, Dict
|
from typing import List, Dict
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
@ -44,7 +45,8 @@ class Configuration:
|
||||||
config = yaml.load(config_file, Loader=yaml.SafeLoader)
|
config = yaml.load(config_file, Loader=yaml.SafeLoader)
|
||||||
self._environment = config['environment'] # type: Dict[OperatingSystem, Dict[str, str]]
|
self._environment = config['environment'] # type: Dict[OperatingSystem, Dict[str, str]]
|
||||||
self.general_cmake_arguments = config['arguments']['general'] # type: List[str]
|
self.general_cmake_arguments = config['arguments']['general'] # type: List[str]
|
||||||
self._specific_cmake_arguments = config['arguments'] # type: Dict[OperatingSystem, List[str]]
|
self._specific_cmake_arguments = config[
|
||||||
|
'arguments'] # type: Dict[OperatingSystem, List[str]]
|
||||||
self.operating_system = self._detect_os() # type: OperatingSystem
|
self.operating_system = self._detect_os() # type: OperatingSystem
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -126,8 +128,10 @@ def _create_args(config: Configuration, llvm_enable_projects: str) -> List[str]:
|
||||||
return arguments
|
return arguments
|
||||||
|
|
||||||
|
|
||||||
def run_cmake(projects: str, repo_path: str, config_file_path: str = None, *, dryrun: bool = False):
|
def run(projects: str, repo_path: str, config_file_path: str = None, *, dry_run: bool = False):
|
||||||
"""Use cmake to configure the project.
|
"""Use cmake to configure the project and create build directory.
|
||||||
|
|
||||||
|
Returns build directory and path to created artifacts.
|
||||||
|
|
||||||
This version works on all operating systems.
|
This version works on all operating systems.
|
||||||
"""
|
"""
|
||||||
|
@ -137,7 +141,7 @@ def run_cmake(projects: str, repo_path: str, config_file_path: str = None, *, dr
|
||||||
config = Configuration(config_file_path)
|
config = Configuration(config_file_path)
|
||||||
|
|
||||||
build_dir = os.path.abspath(os.path.join(repo_path, 'build'))
|
build_dir = os.path.abspath(os.path.join(repo_path, 'build'))
|
||||||
if not dryrun:
|
if not dry_run:
|
||||||
secure_delete(build_dir)
|
secure_delete(build_dir)
|
||||||
os.makedirs(build_dir)
|
os.makedirs(build_dir)
|
||||||
|
|
||||||
|
@ -146,13 +150,15 @@ def run_cmake(projects: str, repo_path: str, config_file_path: str = None, *, dr
|
||||||
print('Enabled projects: {}'.format(llvm_enable_projects))
|
print('Enabled projects: {}'.format(llvm_enable_projects))
|
||||||
arguments = _create_args(config, llvm_enable_projects)
|
arguments = _create_args(config, llvm_enable_projects)
|
||||||
cmd = 'cmake ' + ' '.join(arguments)
|
cmd = 'cmake ' + ' '.join(arguments)
|
||||||
|
|
||||||
print('Running cmake with these arguments:\n{}'.format(cmd), flush=True)
|
print('Running cmake with these arguments:\n{}'.format(cmd), flush=True)
|
||||||
if dryrun:
|
if dry_run:
|
||||||
print('Dryrun, not invoking CMake!')
|
print('Dry run, not invoking CMake!')
|
||||||
else:
|
return 0, build_dir, []
|
||||||
subprocess.check_call(cmd, env=env, shell=True, cwd=build_dir)
|
|
||||||
_link_compile_commands(config, repo_path, build_dir)
|
result = subprocess.call(cmd, env=env, shell=True, cwd=build_dir)
|
||||||
|
_link_compile_commands(config, repo_path, build_dir)
|
||||||
|
return result, build_dir, [os.path.join(build_dir, 'CMakeCache.txt')]
|
||||||
|
|
||||||
|
|
||||||
def secure_delete(path: str):
|
def secure_delete(path: str):
|
||||||
|
@ -187,4 +193,5 @@ if __name__ == '__main__':
|
||||||
parser.add_argument('repo_path', type=str, nargs='?', default=os.getcwd())
|
parser.add_argument('repo_path', type=str, nargs='?', default=os.getcwd())
|
||||||
parser.add_argument('--dryrun', action='store_true')
|
parser.add_argument('--dryrun', action='store_true')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
run_cmake(args.projects, args.repo_path, dryrun=args.dryrun)
|
result, _, _ = run(args.projects, args.repo_path, dry_run=args.dryrun)
|
||||||
|
sys.exit(result)
|
||||||
|
|
|
@ -18,9 +18,10 @@ import os
|
||||||
import platform
|
import platform
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def check_sccache(dryrun:bool):
|
def check_sccache(dryrun: bool):
|
||||||
"""check if sccache can be started
|
"""check if sccache can be started
|
||||||
|
|
||||||
Wipe local cache folder if it fails with a timeout.
|
Wipe local cache folder if it fails with a timeout.
|
||||||
|
@ -40,18 +41,18 @@ def check_sccache(dryrun:bool):
|
||||||
print('sccache failed with timeout. Wiping local cache dir {}'.format(sccache_dir))
|
print('sccache failed with timeout. Wiping local cache dir {}'.format(sccache_dir))
|
||||||
if dryrun:
|
if dryrun:
|
||||||
print('Dryrun. Not deleting anything.')
|
print('Dryrun. Not deleting anything.')
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(sccache_dir)
|
shutil.rmtree(sccache_dir)
|
||||||
|
|
||||||
|
|
||||||
def run_ninja(target: str, repo_path: str, *, dryrun:bool = False):
|
def run_ninja(target: str, work_dir: str, *, dryrun: bool = False):
|
||||||
check_sccache(dryrun)
|
check_sccache(dryrun)
|
||||||
build_dir = os.path.join(repo_path, 'build')
|
|
||||||
cmd = 'ninja {}'.format(target)
|
cmd = 'ninja {}'.format(target)
|
||||||
if dryrun:
|
if dryrun:
|
||||||
print('Dryrun. Command would have been:\n{}'.format(cmd))
|
print('Dryrun. Command would have been:\n{}'.format(cmd))
|
||||||
|
return 0
|
||||||
else:
|
else:
|
||||||
subprocess.check_call(cmd, shell=True, cwd=build_dir)
|
return subprocess.call(cmd, shell=True, cwd=work_dir)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -60,4 +61,4 @@ if __name__ == '__main__':
|
||||||
parser.add_argument('repo_path', type=str, nargs='?', default=os.getcwd())
|
parser.add_argument('repo_path', type=str, nargs='?', default=os.getcwd())
|
||||||
parser.add_argument('--dryrun', action='store_true')
|
parser.add_argument('--dryrun', action='store_true')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
run_ninja(args.target, args.repo_path, dryrun=args.dryrun)
|
sys.exit(run_ninja(args.target, os.path.join(args.repo_path, 'build'), dryrun=args.dryrun))
|
||||||
|
|
70
scripts/test_results_report.py
Executable file
70
scripts/test_results_report.py
Executable file
|
@ -0,0 +1,70 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright 2020 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# https://llvm.org/LICENSE.txt
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
from lxml import etree
|
||||||
|
from phabtalk.phabtalk import Report, CheckResult
|
||||||
|
|
||||||
|
|
||||||
|
def run(test_results, report: Optional[Report]):
|
||||||
|
"""Apply clang-format and return if no issues were found."""
|
||||||
|
if report is None:
|
||||||
|
report = Report() # For debugging.
|
||||||
|
if not os.path.exists(test_results):
|
||||||
|
logging.warning(f'{test_results} not found')
|
||||||
|
report.add_step('clang-format', CheckResult.UNKNOWN, 'test report is not found')
|
||||||
|
return
|
||||||
|
success = True
|
||||||
|
root_node = etree.parse(test_results)
|
||||||
|
for test_case in root_node.xpath('//testcase'):
|
||||||
|
test_result = 'pass'
|
||||||
|
if test_case.find('failure') is not None:
|
||||||
|
test_result = 'fail'
|
||||||
|
if test_case.find('skipped') is not None:
|
||||||
|
test_result = 'skip'
|
||||||
|
report.test_stats[test_result] += 1
|
||||||
|
if test_result == 'fail':
|
||||||
|
success = False
|
||||||
|
failure = test_case.find('failure')
|
||||||
|
test_result = {
|
||||||
|
'name': test_case.attrib['name'],
|
||||||
|
'namespace': test_case.attrib['classname'],
|
||||||
|
'result': test_result,
|
||||||
|
'duration': float(test_case.attrib['time']),
|
||||||
|
'details': failure.text
|
||||||
|
}
|
||||||
|
report.unit.append(test_result)
|
||||||
|
|
||||||
|
msg = f'{report.test_stats["pass"]} tests passed, {report.test_stats["fail"]} failed and' \
|
||||||
|
f'{report.test_stats["skip"]} were skipped.\n'
|
||||||
|
if success:
|
||||||
|
report.add_step('test results', CheckResult.SUCCESS, msg)
|
||||||
|
else:
|
||||||
|
for test_case in report.unit:
|
||||||
|
if test_case['result'] == 'fail':
|
||||||
|
msg += f'{test_case["namespace"]}/{test_case["name"]}\n'
|
||||||
|
report.add_step('unit tests', CheckResult.FAILURE, msg)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description='Processes results from xml report')
|
||||||
|
parser.add_argument('test_report', default='build/test-results.xml')
|
||||||
|
parser.add_argument('--log-level', type=str, default='INFO')
|
||||||
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig(level=args.log_level)
|
||||||
|
run(args.test_report, None)
|
Loading…
Reference in a new issue