Add windows build
- configured sccache on buildkite windows machines - final result is sent to phabricator from "summary.py" that waits for both builds to complete - extracted "add_url_artifact" to a runnable script - reorganized code and fixed some of TODOs
This commit is contained in:
parent
654b6e5da8
commit
017ca44a75
20 changed files with 364 additions and 205 deletions
Binary file not shown.
|
@ -1,4 +1,9 @@
|
|||
c:\credentials\buildkite-env.ps1
|
||||
# Install Buildkite agent.
|
||||
iex ((New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/buildkite/agent/master/install.ps1'))
|
||||
|
||||
$env:SCCACHE_DIR="C:\ws\sccache"
|
||||
Remove-Item -Recurse -Force -ErrorAction Ignore $env:SCCACHE_DIR
|
||||
sccache --start-server
|
||||
|
||||
C:\buildkite-agent\bin\buildkite-agent.exe start
|
|
@ -113,21 +113,34 @@ To spawn a new windows agent:
|
|||
1. Go to the [GCP page](https://pantheon.corp.google.com/compute/instances?project=llvm-premerge-checks&instancessize=50) and pick a new number for the agent.
|
||||
1. Run `kubernetes/windows_agent_create.sh agent-windows-<number>`
|
||||
1. Go to the [GCP page](https://pantheon.corp.google.com/compute/instances?project=llvm-premerge-checks&instancessize=50) again
|
||||
1. login to the new machine via RDP (you will need a RDP client, e.g. Chrome app).
|
||||
1. Login to the new machine via RDP (you will need a RDP client, e.g. Chrome app).
|
||||
1. In the RDP session: run these commands in the CMD window under Administrator to bootstrap the Windows machine:
|
||||
```powershell
|
||||
Invoke-WebRequest -uri 'https://raw.githubusercontent.com/google/llvm-premerge-checks/master/scripts/windows_agent_bootstrap.ps1' -OutFile windows_agent_bootstrap.ps1
|
||||
./windows_agent_bootstrap.ps1
|
||||
```
|
||||
Ignore the pop-up to format the new disk andw wait for the machine to reboot.
|
||||
1. Create `c:\credentials` folder with the agent credentials:
|
||||
For *Buildkite* add file `buildkite-env.ps1`:
|
||||
|
||||
### Buildkite
|
||||
|
||||
1. Create `c:\credentials` folder with file `buildkite-env.ps1`:
|
||||
```powershell
|
||||
$Env:buildkiteAgentToken = "secret-token"
|
||||
$Env:BUILDKITE_AGENT_TAGS = "queue=premerge;os=windows"
|
||||
$Env:BUILDKITE_AGENT_TAGS = "queue=premerge,os=windows"
|
||||
$Env:CONDUIT_TOKEN = "conduit-api-token"
|
||||
```
|
||||
1. Run
|
||||
```powershell
|
||||
C:\llvm-premerge-checks\scripts\windows_agent_start_buildkite.ps1 [-workdir D:/] [-testing] [-version latest]
|
||||
```
|
||||
|
||||
### Jenkins
|
||||
1. Create `c:\credentials` folder with `build-agent-results_key.json` to access cloud storage copy from one of the existing machines.
|
||||
1. Run
|
||||
```powershell
|
||||
git clone https://github.com/google/llvm-premerge-checks.git "c:\llvm-premerge-checks"
|
||||
C:\llvm-premerge-checks\scripts\windows_agent_start_buildkite.ps1 [-testing] [-version latest]
|
||||
```
|
||||
For *Jenkins*: `build-agent-results_key.json` to access cloud storage copy from one of the existing machines.
|
||||
1. Start the container `C:\llvm-premerge-checks\scripts\windows_agent_start_[buildkite|jenkins].ps1 `
|
||||
|
||||
## Testing scripts locally
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ spec:
|
|||
- name: BUILDKITE_AGENT_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: agent-token
|
||||
name: buildkite-agent-token
|
||||
key: token
|
||||
- name: BUILDKITE_AGENT_TAGS
|
||||
value: "queue=release,os=linux"
|
||||
|
|
|
@ -18,7 +18,7 @@ metadata:
|
|||
name: premerge-debian
|
||||
namespace: buildkite
|
||||
spec:
|
||||
replicas: 1
|
||||
replicas: 2
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
|
@ -43,7 +43,7 @@ spec:
|
|||
- name: BUILDKITE_AGENT_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: agent-token
|
||||
name: buildkite-agent-token
|
||||
key: token
|
||||
- name: BUILDKITE_AGENT_TAGS
|
||||
value: "queue=premerge,os=linux"
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2019 Google LLC
|
||||
#
|
||||
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://llvm.org/LICENSE.txt
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# store the buildkite token as kubernetes secret
|
||||
#
|
||||
# Get the token from the website [1] and store it in this file locally in
|
||||
# ~/.llvm-premerge-checks/buildkite-token
|
||||
# Do not share this token with anyone!
|
||||
# [1] https://buildkite.com/organizations/llvm-project/agents
|
||||
|
||||
kubectl create secret generic buildkite-token --namespace jenkins --from-file ~/.llvm-premerge-checks/buildkite-token
|
0
scripts/__init__.py
Normal file
0
scripts/__init__.py
Normal file
|
@ -21,15 +21,61 @@ if __name__ == '__main__':
|
|||
queue = os.getenv("BUILDKITE_AGENT_META_DATA_QUEUE", "default")
|
||||
diff_id = os.getenv("ph_buildable_diff", "")
|
||||
steps = []
|
||||
# SCRIPT_DIR is defined in buildkite pipeline step.
|
||||
linux_buld_step = {
|
||||
'label': 'build linux',
|
||||
'key': 'build-linux',
|
||||
'label': ':linux: build and test linux',
|
||||
'key': 'linux',
|
||||
'commands': [
|
||||
'${SCRIPT_DIR}/premerge_checks.py',
|
||||
'export SRC=${BUILDKITE_BUILD_PATH}/llvm-premerge-checks',
|
||||
'rm -rf ${SRC}',
|
||||
'git clone --depth 1 --branch ${scripts_branch} https://github.com/google/llvm-premerge-checks.git ${SRC}',
|
||||
# Add link in review to the build.
|
||||
'${SRC}/scripts/phabtalk/add_url_artifact.py '
|
||||
'--phid="$ph_target_phid" '
|
||||
'--url="$BUILDKITE_BUILD_URL" '
|
||||
'--name="Buildkite build"',
|
||||
'${SRC}/scripts/premerge_checks.py --check-clang-format --check-clang-tidy',
|
||||
],
|
||||
'artifact_paths': ['artifacts/**/*', '*_result.json'],
|
||||
'agents': {'queue': queue, 'os': 'linux'}
|
||||
}
|
||||
windows_buld_step = {
|
||||
'label': ':windows: build and test windows',
|
||||
'key': 'windows',
|
||||
'commands': [
|
||||
'sccache --show-stats',
|
||||
'set SRC=%BUILDKITE_BUILD_PATH%/llvm-premerge-checks',
|
||||
'rm -rf %SRC%',
|
||||
'git clone --depth 1 --branch %scripts_branch% https://github.com/google/llvm-premerge-checks.git %SRC%',
|
||||
'powershell -command "%SRC%/scripts/premerge_checks.py; '
|
||||
'\\$exit=\\$?;'
|
||||
'sccache --show-stats;'
|
||||
'if (\\$exit) {'
|
||||
' echo "success";'
|
||||
' exit 0; } '
|
||||
'else {'
|
||||
' echo "failure";'
|
||||
' exit 1;'
|
||||
'}',
|
||||
],
|
||||
'artifact_paths': ['artifacts/**/*', '*_result.json'],
|
||||
'agents': {'queue': queue, 'os': 'windows'},
|
||||
}
|
||||
steps.append(linux_buld_step)
|
||||
steps.append(windows_buld_step)
|
||||
report_step = {
|
||||
'label': ':spiral_note_pad: report',
|
||||
'depends_on': [linux_buld_step['key'], windows_buld_step['key']],
|
||||
'commands': [
|
||||
'mkdir -p artifacts',
|
||||
'buildkite-agent artifact download "*_result.json" .',
|
||||
'export SRC=${BUILDKITE_BUILD_PATH}/llvm-premerge-checks',
|
||||
'rm -rf ${SRC}',
|
||||
'git clone --depth 1 --branch ${scripts_branch} https://github.com/google/llvm-premerge-checks.git ${SRC}',
|
||||
'${SRC}/scripts/buildkite/summary.py',
|
||||
],
|
||||
'allow_dependency_failure': True,
|
||||
'artifact_paths': ['artifacts/**/*'],
|
||||
'agents': {'queue': queue, 'os': 'linux'}
|
||||
}
|
||||
steps.append(linux_buld_step)
|
||||
steps.append(report_step)
|
||||
print(yaml.dump({'steps': steps}))
|
||||
|
|
|
@ -26,9 +26,9 @@ if __name__ == '__main__':
|
|||
'commands': ['scripts/buildkite/apply_patch.sh'],
|
||||
'agents': {'queue': queue, 'os': 'linux'}
|
||||
}
|
||||
run_build_step = {
|
||||
build_linux_step = {
|
||||
'trigger': 'premerge-checks',
|
||||
'label': ':rocket: build',
|
||||
'label': ':rocket: build and test',
|
||||
'async': False,
|
||||
'depends_on': 'create-branch',
|
||||
'build': {
|
||||
|
@ -38,7 +38,7 @@ if __name__ == '__main__':
|
|||
}
|
||||
for e in os.environ:
|
||||
if e.startswith('ph_'):
|
||||
run_build_step['build']['env'][e] = os.getenv(e)
|
||||
build_linux_step['build']['env'][e] = os.getenv(e)
|
||||
steps.append(create_branch_step)
|
||||
steps.append(run_build_step)
|
||||
steps.append(build_linux_step)
|
||||
print(yaml.dump({'steps': steps}))
|
||||
|
|
71
scripts/buildkite/summary.py
Executable file
71
scripts/buildkite/summary.py
Executable file
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 Google LLC
|
||||
#
|
||||
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://llvm.org/LICENSE.txt
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from phabtalk.phabtalk import PhabTalk
|
||||
from buildkite.utils import format_url
|
||||
|
||||
|
||||
def maybe_add_url_artifact(phab: PhabTalk, phid: str, url: str, name: str):
|
||||
if phid is None:
|
||||
logging.warning('PHID is not provided, cannot create URL artifact')
|
||||
return
|
||||
phab.create_artifact(phid, str(uuid.uuid4()), 'uri', {'uri': url, 'ui.external': True, 'name': name})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--log-level', type=str, default='WARNING')
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
||||
|
||||
print(f'Branch {os.getenv("BUILDKITE_BRANCH")} at {os.getenv("BUILDKITE_REPO")}')
|
||||
ph_buildable_diff = os.getenv('ph_buildable_diff')
|
||||
if ph_buildable_diff is not None:
|
||||
url = f'https://reviews.llvm.org/D{os.getenv("ph_buildable_revision")}?id={ph_buildable_diff}'
|
||||
print(f'Review: {format_url(url)}')
|
||||
if os.getenv('BUILDKITE_TRIGGERED_FROM_BUILD_NUMBER') is not None:
|
||||
url = f'https://buildkite.com/llvm-project/' \
|
||||
f'{os.getenv("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG")}/' \
|
||||
f'builds/{os.getenv("BUILDKITE_TRIGGERED_FROM_BUILD_NUMBER")}'
|
||||
print(f'Triggered from build {format_url(url)}')
|
||||
|
||||
success = True
|
||||
for path in glob.glob("*_result.json"):
|
||||
logging.info(f'analysing {path}')
|
||||
with open(path, 'r') as f:
|
||||
report = json.load(f)
|
||||
logging.info(report)
|
||||
success = success and report['success']
|
||||
phabtalk = PhabTalk(os.getenv('CONDUIT_TOKEN'), 'https://reviews.llvm.org/api/', False)
|
||||
build_url = f'https://reviews.llvm.org/harbormaster/build/{os.getenv("ph_build_id")}'
|
||||
print(f'Reporting results to Phabricator build {format_url(build_url)}')
|
||||
ph_buildable_diff = os.getenv('ph_buildable_diff')
|
||||
ph_target_phid = os.getenv('ph_target_phid')
|
||||
phabtalk.update_build_status(ph_buildable_diff, ph_target_phid, False, success)
|
||||
bug_url = f'https://github.com/google/llvm-premerge-checks/issues/new?assignees=&labels=bug' \
|
||||
f'&template=bug_report.md&title=buildkite build {os.getenv("BUILDKITE_PIPELINE_SLUG")} ' \
|
||||
f'{os.getenv("BUILDKITE_BUILD_NUMBER")}'
|
||||
print(f'{format_url(bug_url, "report issue")}')
|
28
scripts/buildkite/utils.py
Normal file
28
scripts/buildkite/utils.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
|
||||
def upload_file(base_dir: str, file: str):
|
||||
"""
|
||||
Uploads artifact to buildkite and returns URL to it
|
||||
"""
|
||||
r = subprocess.run(f'buildkite-agent artifact upload "{file}"', shell=True, capture_output=True, cwd=base_dir)
|
||||
logging.debug(f'upload-artifact {r}')
|
||||
match = re.search('Uploading artifact ([^ ]*) ', r.stderr.decode())
|
||||
logging.debug(f'match {match}')
|
||||
if match:
|
||||
url = f'https://buildkite.com/organizations/llvm-project/pipelines/premerge-checks/builds/{os.getenv("BUILDKITE_BUILD_NUMBER")}/jobs/{os.getenv("BUILDKITE_JOB_ID")}/artifacts/{match.group(1)}'
|
||||
logging.info(f'uploaded {file} to {url}')
|
||||
return url
|
||||
else:
|
||||
logging.warning(f'could not find artifact {base_dir}/{file}')
|
||||
return None
|
||||
|
||||
|
||||
def format_url(url: str, name: Optional[str] = None):
|
||||
if name is None:
|
||||
name = url
|
||||
return f"\033]1339;url='{url}';content='{name}'\a\n"
|
||||
|
|
@ -14,15 +14,14 @@
|
|||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
from typing import Tuple, Optional
|
||||
import pathspec
|
||||
import unidiff
|
||||
|
||||
from typing import Tuple, Optional
|
||||
from phabtalk.phabtalk import Report, CheckResult
|
||||
from phabtalk.phabtalk import Report, Step
|
||||
|
||||
|
||||
def get_diff(base_commit) -> Tuple[bool, str]:
|
||||
|
@ -40,13 +39,15 @@ def get_diff(base_commit) -> Tuple[bool, str]:
|
|||
return True, diff_run.stdout.decode()
|
||||
|
||||
|
||||
def run(base_commit, ignore_config, report: Optional[Report]):
|
||||
def run(base_commit, ignore_config, step: Optional[Step], report: Optional[Report]):
|
||||
"""Apply clang-format and return if no issues were found."""
|
||||
if report is None:
|
||||
report = Report() # For debugging.
|
||||
if step is None:
|
||||
step = Step() # For debugging.
|
||||
r, patch = get_diff(base_commit)
|
||||
if not r:
|
||||
report.add_step('clang-format', CheckResult.FAILURE, '')
|
||||
step.success = False
|
||||
return
|
||||
add_artifact = False
|
||||
patches = unidiff.PatchSet(patch)
|
||||
|
@ -88,14 +89,12 @@ def run(base_commit, ignore_config, report: Optional[Report]):
|
|||
with open(patch_file, 'w') as f:
|
||||
f.write(patch)
|
||||
report.add_artifact(os.getcwd(), patch_file, 'clang-format')
|
||||
if success:
|
||||
report.add_step('clang-format', CheckResult.SUCCESS, message='')
|
||||
else:
|
||||
report.add_step(
|
||||
'clang-format',
|
||||
CheckResult.FAILURE,
|
||||
if not success:
|
||||
step.success = False
|
||||
step.messages.append(
|
||||
'Please format your changes with clang-format by running `git-clang-format HEAD^` or applying patch.')
|
||||
logging.debug(f'report: {report}')
|
||||
logging.debug(f'step: {step}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -22,11 +22,15 @@ from typing import Optional
|
|||
import pathspec
|
||||
|
||||
import ignore_diff
|
||||
from phabtalk.phabtalk import Report, CheckResult
|
||||
from phabtalk.phabtalk import Report, Step
|
||||
|
||||
|
||||
def run(base_commit, ignore_config, report: Optional[Report]):
|
||||
def run(base_commit, ignore_config, step: Optional[Step], report: Optional[Report]):
|
||||
"""Apply clang-format and return if no issues were found."""
|
||||
if report is None:
|
||||
report = Report() # For debugging.
|
||||
if step is None:
|
||||
step = Step() # For debugging.
|
||||
r = subprocess.run(f'git diff -U0 --no-prefix {base_commit}', shell=True, capture_output=True)
|
||||
logging.debug(f'git diff {r}')
|
||||
diff = r.stdout.decode()
|
||||
|
@ -43,8 +47,6 @@ def run(base_commit, ignore_config, report: Optional[Report]):
|
|||
logging.info(f'clang-tidy input: {a}')
|
||||
out = p.communicate(input=a.encode())[0].decode()
|
||||
logging.debug(f'clang-tidy-diff {p}: {out}')
|
||||
if report is None:
|
||||
report = Report() # For debugging.
|
||||
# Typical finding looks like:
|
||||
# [cwd/]clang/include/clang/AST/DeclCXX.h:3058:20: error: ... [clang-diagnostic-error]
|
||||
pattern = '^([^:]*):(\\d+):(\\d+): (.*): (.*)'
|
||||
|
@ -94,16 +96,14 @@ def run(base_commit, ignore_config, report: Optional[Report]):
|
|||
with open(p, 'w') as f:
|
||||
f.write(out)
|
||||
report.add_artifact(os.getcwd(), p, 'clang-tidy')
|
||||
if errors_count + warn_count == 0:
|
||||
report.add_step('clang-tidy', CheckResult.SUCCESS, message='')
|
||||
else:
|
||||
report.add_step(
|
||||
'clang-tidy',
|
||||
CheckResult.FAILURE,
|
||||
if errors_count + warn_count != 0:
|
||||
step.success = False
|
||||
step.messages.append(
|
||||
f'clang-tidy found {errors_count} errors and {warn_count} warnings. {inline_comments} of them are added '
|
||||
f'as review comments. See'
|
||||
f'https://github.com/google/llvm-premerge-checks/blob/master/docs/clang_tidy.md#review-comments.')
|
||||
logging.debug(f'report: {report}')
|
||||
logging.debug(f'step: {step}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -114,4 +114,4 @@ if __name__ == '__main__':
|
|||
parser.add_argument('--log-level', type=str, default='INFO')
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
||||
run(args.base, args.ignore_config, None)
|
||||
run(args.base, args.ignore_config, None, None)
|
||||
|
|
0
scripts/phabtalk/__init__.py
Normal file
0
scripts/phabtalk/__init__.py
Normal file
46
scripts/phabtalk/add_url_artifact.py
Executable file
46
scripts/phabtalk/add_url_artifact.py
Executable file
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 Google LLC
|
||||
#
|
||||
# Licensed under the the Apache License v2.0 with LLVM Exceptions (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://llvm.org/LICENSE.txt
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
# from phabtalk import PhabTalk
|
||||
# else:
|
||||
from phabtalk.phabtalk import PhabTalk
|
||||
|
||||
|
||||
def maybe_add_url_artifact(phab: PhabTalk, phid: str, url: str, name: str):
|
||||
if phid is None:
|
||||
logging.warning('PHID is not provided, cannot create URL artifact')
|
||||
return
|
||||
phab.create_artifact(phid, str(uuid.uuid4()), 'uri', {'uri': url, 'ui.external': True, 'name': name})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Runs premerge checks8')
|
||||
parser.add_argument('--url', type=str)
|
||||
parser.add_argument('--name', type=str)
|
||||
parser.add_argument('--phid', type=str)
|
||||
parser.add_argument('--log-level', type=str, default='WARNING')
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
||||
phabtalk = PhabTalk(os.getenv('CONDUIT_TOKEN'), 'https://reviews.llvm.org/api/', False)
|
||||
maybe_add_url_artifact(phabtalk, args.phid, args.url, args.name)
|
|
@ -28,7 +28,7 @@ from typing import Optional, List, Dict
|
|||
import pathspec
|
||||
from lxml import etree
|
||||
from phabricator import Phabricator
|
||||
from enum import Enum
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class PhabTalk:
|
||||
|
@ -81,7 +81,7 @@ class PhabTalk:
|
|||
transactions=transactions)
|
||||
print('Uploaded comment to Revision D{}:{}'.format(revision, text))
|
||||
|
||||
def update_build_status(self, diff_id: str, phid: str, working: bool, success: bool, lint: {}, unit: []):
|
||||
def update_build_status(self, diff_id: str, phid: str, working: bool, success: bool, lint: {} = {}, unit: [] = []):
|
||||
"""Submit collected report to Phabricator.
|
||||
"""
|
||||
|
||||
|
@ -193,14 +193,22 @@ def _parse_patch(patch) -> List[Dict[str, str]]:
|
|||
return entries
|
||||
|
||||
|
||||
class CheckResult(Enum):
|
||||
UNKNOWN = 0
|
||||
SUCCESS = 1
|
||||
FAILURE = 2
|
||||
class Step:
|
||||
def __init__(self):
|
||||
self.name = ''
|
||||
self.success = True
|
||||
self.duration = 0.0
|
||||
self.messages = []
|
||||
|
||||
def set_status_from_exit_code(self, exit_code: int):
|
||||
if exit_code != 0:
|
||||
self.success = False
|
||||
|
||||
|
||||
class Report:
|
||||
def __init__(self):
|
||||
self.os = ''
|
||||
self.name = ''
|
||||
self.comments = []
|
||||
self.success = True
|
||||
self.working = False
|
||||
|
@ -211,7 +219,7 @@ class Report:
|
|||
'fail': 0,
|
||||
'skip': 0
|
||||
} # type: Dict[str, int]
|
||||
self.steps = [] # type: List
|
||||
self.steps = [] # type: List[Step]
|
||||
self.artifacts = [] # type: List
|
||||
|
||||
def __str__(self):
|
||||
|
@ -223,13 +231,6 @@ class Report:
|
|||
self.lint[key] = []
|
||||
self.lint[key].append(m)
|
||||
|
||||
def add_step(self, title: str, result: CheckResult, message: str):
|
||||
self.steps.append({
|
||||
'title': title,
|
||||
'result': result,
|
||||
'message': message,
|
||||
})
|
||||
|
||||
def add_artifact(self, dir: str, file: str, name: str):
|
||||
self.artifacts.append({'dir': dir, 'file': file, 'name': name})
|
||||
|
||||
|
|
|
@ -14,162 +14,133 @@
|
|||
# limitations under the License.
|
||||
|
||||
# Runs all check on buildkite agent.
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import uuid
|
||||
from typing import Callable, Optional
|
||||
from typing import Callable
|
||||
|
||||
import clang_format_report
|
||||
import clang_tidy_report
|
||||
import run_cmake
|
||||
import test_results_report
|
||||
from phabtalk.phabtalk import Report, CheckResult, PhabTalk
|
||||
from buildkite.utils import upload_file
|
||||
from phabtalk.add_url_artifact import maybe_add_url_artifact
|
||||
from phabtalk.phabtalk import Report, PhabTalk, Step
|
||||
|
||||
|
||||
def upload_file(base_dir: str, file: str):
|
||||
"""
|
||||
Uploads artifact to buildkite and returns URL to it
|
||||
"""
|
||||
r = subprocess.run(f'buildkite-agent artifact upload "{file}"', shell=True, capture_output=True, cwd=base_dir)
|
||||
logging.debug(f'upload-artifact {r}')
|
||||
match = re.search('Uploading artifact ([^ ]*) ', r.stderr.decode())
|
||||
logging.debug(f'match {match}')
|
||||
if match:
|
||||
url = f'https://buildkite.com/organizations/llvm-project/pipelines/premerge-checks/builds/{os.getenv("BUILDKITE_BUILD_NUMBER")}/jobs/{os.getenv("BUILDKITE_JOB_ID")}/artifacts/{match.group(1)}'
|
||||
logging.info(f'uploaded {file} to {url}')
|
||||
return url
|
||||
else:
|
||||
logging.warning(f'could not find artifact {base_dir}/{file}')
|
||||
return None
|
||||
|
||||
|
||||
def maybe_add_url_artifact(phab: PhabTalk, url: str, name: str):
|
||||
phid = os.getenv('ph_target_phid')
|
||||
if phid is None:
|
||||
return
|
||||
phab.create_artifact(phid, str(uuid.uuid4()), 'uri', {'uri': url, 'ui.external': True, 'name': name})
|
||||
|
||||
|
||||
def add_shell_result(report: Report, name: str, exit_code: int) -> CheckResult:
|
||||
logging.info(f'"{name}" exited with {exit_code}')
|
||||
z = CheckResult.SUCCESS
|
||||
if exit_code != 0:
|
||||
z = CheckResult.FAILURE
|
||||
report.add_step(name, z, '')
|
||||
return z
|
||||
|
||||
|
||||
def ninja_all_report(report: Report) -> CheckResult:
|
||||
def ninja_all_report(step: Step, _: Report):
|
||||
print('Full will be available in Artifacts "ninja-all.log"')
|
||||
r = subprocess.run(f'ninja all | '
|
||||
f'tee {artifacts_dir}/ninja-all.log | '
|
||||
f'grep -vE "\\[.*] (Building|Linking|Copying|Generating|Creating)"',
|
||||
shell=True, cwd=build_dir)
|
||||
return add_shell_result(report, 'ninja all', r.returncode)
|
||||
logging.debug(f'ninja all: returned {r.returncode}, stderr: "{r.stderr}"')
|
||||
step.set_status_from_exit_code(r.returncode)
|
||||
|
||||
|
||||
def ninja_check_all_report(report: Report) -> CheckResult:
|
||||
# TODO: merge running ninja check all and analysing results in one step?
|
||||
def ninja_check_all_report(step: Step, _: Report):
|
||||
print('Full will be available in Artifacts "ninja-check-all.log"')
|
||||
r = subprocess.run(f'ninja check-all | tee {artifacts_dir}/ninja-check-all.log | '
|
||||
f'grep -vE "^\\[.*] (Building|Linking)" | '
|
||||
f'grep -vE "^(PASS|XFAIL|UNSUPPORTED):"', shell=True, cwd=build_dir)
|
||||
z = add_shell_result(report, 'ninja check all', r.returncode)
|
||||
# TODO: check if test-results are present.
|
||||
report.add_artifact(build_dir, 'test-results.xml', 'test results')
|
||||
test_results_report.run(os.path.join(build_dir, 'test-results.xml'), report)
|
||||
return z
|
||||
logging.debug(f'ninja check-all: returned {r.returncode}, stderr: "{r.stderr}"')
|
||||
step.set_status_from_exit_code(r.returncode)
|
||||
test_results_report.run(build_dir, 'test-results.xml', step, report)
|
||||
|
||||
|
||||
def run_step(name: str, report: Report, thunk: Callable[[Report], CheckResult]) -> CheckResult:
|
||||
global timings
|
||||
def run_step(name: str, report: Report, thunk: Callable[[Step, Report], None]) -> Step:
|
||||
start = time.time()
|
||||
print(f'--- {name}') # New section in Buildkite log.
|
||||
result = thunk(report)
|
||||
timings[name] = time.time() - start
|
||||
step = Step()
|
||||
step.name = name
|
||||
thunk(step, report)
|
||||
step.duration = time.time() - start
|
||||
# Expand section if it failed.
|
||||
if result == CheckResult.FAILURE:
|
||||
if not step.success:
|
||||
print('^^^ +++')
|
||||
return result
|
||||
report.steps.append(step)
|
||||
return step
|
||||
|
||||
|
||||
def cmake_report(report: Report) -> CheckResult:
|
||||
def cmake_report(step: Step, _: Report):
|
||||
global build_dir
|
||||
cmake_result, build_dir, cmake_artifacts = run_cmake.run('detect', os.getcwd())
|
||||
for file in cmake_artifacts:
|
||||
if os.path.exists(file):
|
||||
shutil.copy2(file, artifacts_dir)
|
||||
return add_shell_result(report, 'cmake', cmake_result)
|
||||
step.set_status_from_exit_code(cmake_result)
|
||||
|
||||
|
||||
def furl(url: str, name: Optional[str] = None):
|
||||
if name is None:
|
||||
name = url
|
||||
return f"\033]1339;url='{url}';content='{name}'\a\n"
|
||||
def as_dict(obj):
|
||||
try:
|
||||
return obj.toJSON()
|
||||
except:
|
||||
return obj.__dict__
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Runs premerge checks8')
|
||||
parser.add_argument('--log-level', type=str, default='WARNING')
|
||||
parser.add_argument('--check-clang-format', action='store_true')
|
||||
parser.add_argument('--check-clang-tidy', action='store_true')
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
||||
build_dir = ''
|
||||
logging.basicConfig(level=logging.WARNING, format='%(levelname)-7s %(message)s')
|
||||
step_key = os.getenv("BUILDKITE_STEP_KEY")
|
||||
scripts_dir = pathlib.Path(__file__).parent.absolute()
|
||||
phab = PhabTalk(os.getenv('CONDUIT_TOKEN'), 'https://reviews.llvm.org/api/', False)
|
||||
maybe_add_url_artifact(phab, os.getenv('BUILDKITE_BUILD_URL'), 'Buildkite build')
|
||||
artifacts_dir = os.path.join(os.getcwd(), 'artifacts')
|
||||
os.makedirs(artifacts_dir, exist_ok=True)
|
||||
report_path = f'{step_key}_result.json'
|
||||
report = Report()
|
||||
timings = {}
|
||||
cmake_result = run_step('cmake', report, cmake_report)
|
||||
if cmake_result == CheckResult.SUCCESS:
|
||||
compile_result = run_step('ninja all', report, ninja_all_report)
|
||||
if compile_result == CheckResult.SUCCESS:
|
||||
run_step('ninja check all', report, ninja_check_all_report)
|
||||
report.os = f'{os.getenv("BUILDKITE_AGENT_META_DATA_OS")}'
|
||||
report.name = step_key
|
||||
report.success = False
|
||||
# Create report with failure in case something below fails.
|
||||
with open(report_path, 'w') as f:
|
||||
json.dump(report.__dict__, f, default=as_dict)
|
||||
report.success = True
|
||||
cmake = run_step('cmake', report, cmake_report)
|
||||
if cmake.success:
|
||||
ninja_all = run_step('ninja all', report, ninja_all_report)
|
||||
if ninja_all.success:
|
||||
run_step('ninja check-all', report, ninja_check_all_report)
|
||||
if args.check_clang_tidy:
|
||||
run_step('clang-tidy', report,
|
||||
lambda x: clang_tidy_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-tidy.ignore'), x))
|
||||
lambda s, r: clang_tidy_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-tidy.ignore'), s, r))
|
||||
if args.check_clang_format:
|
||||
run_step('clang-format', report,
|
||||
lambda x: clang_format_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-format.ignore'), x))
|
||||
print('+++ summary')
|
||||
print(f'Branch {os.getenv("BUILDKITE_BRANCH")} at {os.getenv("BUILDKITE_REPO")}')
|
||||
ph_buildable_diff = os.getenv('ph_buildable_diff')
|
||||
if ph_buildable_diff is not None:
|
||||
url = f'https://reviews.llvm.org/D{os.getenv("ph_buildable_revision")}?id={ph_buildable_diff}'
|
||||
print(f'Review: {furl(url)}')
|
||||
if os.getenv('BUILDKITE_TRIGGERED_FROM_BUILD_NUMBER') is not None:
|
||||
url = f'https://buildkite.com/llvm-project/' \
|
||||
f'{os.getenv("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG")}/'\
|
||||
f'builds/{os.getenv("BUILDKITE_TRIGGERED_FROM_BUILD_NUMBER")}'
|
||||
print(f'Triggered from build {furl(url)}')
|
||||
lambda s, r: clang_format_report.run('HEAD~1', os.path.join(scripts_dir, 'clang-format.ignore'), s, r))
|
||||
logging.debug(report)
|
||||
success = True
|
||||
for s in report.steps:
|
||||
mark = 'V'
|
||||
if s['result'] == CheckResult.UNKNOWN:
|
||||
mark = '?'
|
||||
if s['result'] == CheckResult.FAILURE:
|
||||
success = False
|
||||
mark = 'X'
|
||||
msg = s['message']
|
||||
if len(msg):
|
||||
msg = ': ' + msg
|
||||
print(f'{mark} {s["title"]}{msg}')
|
||||
mark = 'OK '
|
||||
if not s.success:
|
||||
report.success = False
|
||||
mark = 'FAIL '
|
||||
msg = ''
|
||||
if len(s.messages):
|
||||
msg = ': ' + '\n '.join(s.messages)
|
||||
print(f'{mark} {s.name}{msg}')
|
||||
|
||||
# TODO: dump the report and deduplicate tests and other reports later (for multiple OS) in a separate step.
|
||||
ph_target_phid = os.getenv('ph_target_phid')
|
||||
ph_buildable_diff = os.getenv('ph_buildable_diff')
|
||||
if ph_target_phid is not None:
|
||||
build_url = f'https://reviews.llvm.org/harbormaster/build/{os.getenv("ph_build_id")}'
|
||||
print(f'Reporting results to Phabricator build {furl(build_url)}')
|
||||
phab.update_build_status(ph_buildable_diff, ph_target_phid, False, success, report.lint, report.unit)
|
||||
phabtalk = PhabTalk(os.getenv('CONDUIT_TOKEN'), 'https://reviews.llvm.org/api/', False)
|
||||
for u in report.unit:
|
||||
u['engine'] = step_key
|
||||
phabtalk.update_build_status(ph_buildable_diff, ph_target_phid, True, report.success, report.lint, report.unit)
|
||||
for a in report.artifacts:
|
||||
url = upload_file(a['dir'], a['file'])
|
||||
if url is not None:
|
||||
maybe_add_url_artifact(phab, url, a['name'])
|
||||
maybe_add_url_artifact(phabtalk, ph_target_phid, url, f'{a["name"]} (${step_key})')
|
||||
else:
|
||||
logging.warning('No phabricator phid is specified. Will not update the build status in Phabricator')
|
||||
# TODO: add link to report issue on github
|
||||
with open(os.path.join(artifacts_dir, 'step-timings.json'), 'w') as f:
|
||||
f.write(json.dumps(timings))
|
||||
with open(report_path, 'w') as f:
|
||||
json.dump(report.__dict__, f, default=as_dict)
|
||||
if not report.success:
|
||||
print('Build completed with failures')
|
||||
exit(1)
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
from enum import Enum
|
||||
from git import Repo
|
||||
import os
|
||||
|
@ -114,12 +115,14 @@ def _create_args(config: Configuration, llvm_enable_projects: str) -> List[str]:
|
|||
|
||||
# enable sccache
|
||||
if 'SCCACHE_DIR' in os.environ:
|
||||
logging.info("using sccache")
|
||||
arguments.extend([
|
||||
'-DCMAKE_C_COMPILER_LAUNCHER=sccache',
|
||||
'-DCMAKE_CXX_COMPILER_LAUNCHER=sccache',
|
||||
])
|
||||
# enable ccache if the path is set in the environment
|
||||
elif 'CCACHE_PATH' in os.environ:
|
||||
logging.info("using ccache")
|
||||
arguments.extend([
|
||||
'-D LLVM_CCACHE_BUILD=ON',
|
||||
'-D LLVM_CCACHE_DIR={}'.format(os.environ['CCACHE_PATH']),
|
||||
|
|
|
@ -14,23 +14,27 @@
|
|||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
from lxml import etree
|
||||
from phabtalk.phabtalk import Report, CheckResult
|
||||
from phabtalk.phabtalk import Report, Step
|
||||
|
||||
|
||||
def run(test_results, report: Optional[Report]):
|
||||
"""Apply clang-format and return if no issues were found."""
|
||||
def run(working_dir: str, test_results: str, step: Optional[Step], report: Optional[Report]):
|
||||
if report is None:
|
||||
report = Report() # For debugging.
|
||||
if not os.path.exists(test_results):
|
||||
logging.warning(f'{test_results} not found')
|
||||
report.add_step('clang-format', CheckResult.UNKNOWN, 'test report is not found')
|
||||
if step is None:
|
||||
step = Step()
|
||||
path = os.path.join(working_dir, test_results)
|
||||
if not os.path.exists(path):
|
||||
logging.warning(f'{path} is not found')
|
||||
step.success = False
|
||||
step.messages.append(f'test report "{path}" is not found')
|
||||
return
|
||||
report.add_artifact(working_dir, test_results, 'test results')
|
||||
success = True
|
||||
root_node = etree.parse(test_results)
|
||||
root_node = etree.parse(path)
|
||||
for test_case in root_node.xpath('//testcase'):
|
||||
test_result = 'pass'
|
||||
if test_case.find('failure') is not None:
|
||||
|
@ -50,21 +54,21 @@ def run(test_results, report: Optional[Report]):
|
|||
}
|
||||
report.unit.append(test_result)
|
||||
|
||||
msg = f'{report.test_stats["pass"]} tests passed, {report.test_stats["fail"]} failed and' \
|
||||
msg = f'{report.test_stats["pass"]} tests passed, {report.test_stats["fail"]} failed and ' \
|
||||
f'{report.test_stats["skip"]} were skipped.\n'
|
||||
if success:
|
||||
report.add_step('test results', CheckResult.SUCCESS, msg)
|
||||
else:
|
||||
if not success:
|
||||
step.success = False
|
||||
for test_case in report.unit:
|
||||
if test_case['result'] == 'fail':
|
||||
msg += f'{test_case["namespace"]}/{test_case["name"]}\n'
|
||||
report.add_step('unit tests', CheckResult.FAILURE, msg)
|
||||
logging.debug(f'report: {report}')
|
||||
logging.debug(f'step: {step}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Processes results from xml report')
|
||||
parser.add_argument('test_report', default='build/test-results.xml')
|
||||
parser.add_argument('test-report', default='build/test-results.xml')
|
||||
parser.add_argument('--log-level', type=str, default='INFO')
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig(level=args.log_level)
|
||||
run(args.test_report, None)
|
||||
run(os.getcwd(), args.test_report, None, None)
|
||||
|
|
|
@ -17,7 +17,8 @@
|
|||
|
||||
param(
|
||||
[string]$version = "latest",
|
||||
[switch]$testing = $false
|
||||
[switch]$testing = $false,
|
||||
[string]$workdir = "D:\"
|
||||
)
|
||||
|
||||
$NAME="agent-windows-buildkite"
|
||||
|
@ -28,26 +29,21 @@ Write-Output "y`n" | gcloud auth configure-docker
|
|||
|
||||
Write-Output "Pulling new image..."
|
||||
docker pull ${IMAGE}
|
||||
|
||||
Write-Output "Stopping old container..."
|
||||
docker stop ${NAME}
|
||||
docker rm ${NAME}
|
||||
|
||||
Write-Output "Starting container..."
|
||||
if (${testing}) {
|
||||
docker run -it `
|
||||
-v D:\:C:\ws `
|
||||
-v ${workdir}:C:\ws `
|
||||
-v C:\credentials:C:\credentials `
|
||||
-e BUILDKITE_AGENT_NAME=$env:computername `
|
||||
-e BUILDKITE_BUILD_PATH=C:\ws `
|
||||
--restart unless-stopped `
|
||||
--name ${NAME} `
|
||||
${IMAGE} powershell
|
||||
} else {
|
||||
docker run -d `
|
||||
-v D:\:C:\ws `
|
||||
-v ${workdir}:C:\ws `
|
||||
-v C:\credentials:C:\credentials `
|
||||
-e BUILDKITE_AGENT_NAME=$env:computername `
|
||||
-e BUILDKITE_BUILD_PATH=C:\ws `
|
||||
--restart unless-stopped `
|
||||
--name ${NAME} `
|
||||
|
|
Loading…
Reference in a new issue