fix project resolutions
and some of the typing issues found by mypy
This commit is contained in:
parent
018510ccef
commit
df318ccf4f
8 changed files with 136 additions and 130 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,3 +7,4 @@ containers/workspace
|
||||||
**/.DS_Store
|
**/.DS_Store
|
||||||
**/.ipynb_checkpoints
|
**/.ipynb_checkpoints
|
||||||
scripts/metrics/cache.json
|
scripts/metrics/cache.json
|
||||||
|
Pipfile.lock
|
||||||
|
|
21
Pipfile
Normal file
21
Pipfile
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
[[source]]
|
||||||
|
url = "https://pypi.org/simple"
|
||||||
|
verify_ssl = true
|
||||||
|
name = "pypi"
|
||||||
|
|
||||||
|
[packages]
|
||||||
|
backoff = "==1.10.0"
|
||||||
|
lxml = "==4.6.3"
|
||||||
|
pathspec = "==0.8.1"
|
||||||
|
phabricator = "==0.8.1"
|
||||||
|
pyaml = "==20.4.0"
|
||||||
|
requests = "==2.25.1"
|
||||||
|
unidiff = "==0.6.0"
|
||||||
|
python-benedict = "==0.23.2"
|
||||||
|
GitPython = "==3.1.14"
|
||||||
|
types-pyyaml = "*"
|
||||||
|
|
||||||
|
[dev-packages]
|
||||||
|
|
||||||
|
[requires]
|
||||||
|
python_version = "3.9"
|
2
scripts/.gitignore
vendored
2
scripts/.gitignore
vendored
|
@ -1 +1 @@
|
||||||
benchmark/
|
benchmark/
|
||||||
|
|
|
@ -13,10 +13,10 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""Compute the LLVM_ENABLE_PROJECTS for Cmake from diff.
|
"""Compute the LLVM_ENABLE_PROJECTS for cmake from diff.
|
||||||
|
|
||||||
This script will compute which projects are affected by the diff proviaded via STDIN.
|
This script will compute which projects are affected by the diff provided via STDIN.
|
||||||
It gets the modified files in the patch, assings them to projects and based on a
|
It gets the modified files in the patch, assigns them to projects and based on a
|
||||||
project dependency graph it will get the transitively affected projects.
|
project dependency graph it will get the transitively affected projects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -25,56 +25,67 @@ import logging
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, List, Set, Tuple, Optional
|
from typing import Any, Dict, List, Set, TextIO, Tuple, Optional, Union
|
||||||
|
from unidiff import PatchSet # type: ignore
|
||||||
from unidiff import PatchSet
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
# TODO: We could also try to avoid running tests for llvm for projects that
|
|
||||||
# only need various cmake scripts and don't actually depend on llvm (e.g.
|
|
||||||
# libcxx does not need to run llvm tests, but may still need to include llvm).
|
|
||||||
|
|
||||||
|
|
||||||
class ChooseProjects:
|
class ChooseProjects:
|
||||||
|
|
||||||
# file where dependencies are defined
|
# file where dependencies are defined
|
||||||
SCRIPT_DIR = os.path.dirname(__file__)
|
SCRIPT_DIR = os.path.dirname(__file__)
|
||||||
DEPENDENCIES_FILE = os.path.join(SCRIPT_DIR, 'llvm-dependencies.yaml')
|
DEPENDENCIES_FILE = os.path.join(SCRIPT_DIR, 'llvm-dependencies.yaml')
|
||||||
# projects used if anything goes wrong
|
|
||||||
FALLBACK_PROJECTS = ['all']
|
|
||||||
|
|
||||||
def __init__(self, llvm_dir: Optional[str]):
|
def __init__(self, llvm_dir: Optional[str]):
|
||||||
self.llvm_dir = llvm_dir # type: Optional[str]
|
self.llvm_dir = llvm_dir
|
||||||
self.defaultProjects = dict() # type: Dict[str, Dict[str, str]]
|
self.defaultProjects: Dict[str, Dict[str, str]] = {}
|
||||||
self.dependencies = dict() # type: Dict[str,List[str]]
|
# List of projects this project depends on, transitive closure.
|
||||||
self.usages = dict() # type: Dict[str,List[str]]
|
# E.g. compiler-rt -> [llvm, clang].
|
||||||
self.all_projects = [] # type: List[str]
|
self.dependencies: Dict[str,Set[str]] = {}
|
||||||
self.config = {}
|
# List of projects that depends on this project. It's a full closure.
|
||||||
|
# E.g. llvm -> [clang, libcxx, ...]
|
||||||
|
self.usages: Dict[str, Set[str]] = dict()
|
||||||
|
self.all_projects: List[str] = ['all']
|
||||||
|
self.config: Dict[str, Any] = {}
|
||||||
self._load_config()
|
self._load_config()
|
||||||
|
|
||||||
def _load_config(self):
|
def _load_config(self):
|
||||||
logging.info('loading project config from {}'.format(self.DEPENDENCIES_FILE))
|
logging.info('loading project config from {}'.format(self.DEPENDENCIES_FILE))
|
||||||
with open(self.DEPENDENCIES_FILE) as dependencies_file:
|
with open(self.DEPENDENCIES_FILE) as dependencies_file:
|
||||||
self.config = yaml.load(dependencies_file, Loader=yaml.SafeLoader)
|
self.config = yaml.load(dependencies_file, Loader=yaml.SafeLoader)
|
||||||
self.dependencies = self.config['dependencies']
|
for k, v in self.config['dependencies'].items():
|
||||||
for user, used_list in self.dependencies.items():
|
self.dependencies[k] = set(v)
|
||||||
for used in used_list:
|
# Closure of dependencies.
|
||||||
self.usages.setdefault(used, []).append(user)
|
while True:
|
||||||
|
updated = False
|
||||||
|
for s in self.dependencies.values():
|
||||||
|
n = len(s)
|
||||||
|
extend = set()
|
||||||
|
for d in s:
|
||||||
|
extend.update(self.dependencies.get(d, set()))
|
||||||
|
s.update(extend)
|
||||||
|
if len(s) > n:
|
||||||
|
updated = True
|
||||||
|
if not updated:
|
||||||
|
break
|
||||||
|
# Usages don't need to be closed as dependencies already are.
|
||||||
|
for project, deps in self.dependencies.items():
|
||||||
|
for d in deps:
|
||||||
|
self.usages.setdefault(d, set()).add(project)
|
||||||
|
logging.info(f'computed dependencies: {self.dependencies}')
|
||||||
|
logging.info(f'computed usages: {self.usages}')
|
||||||
self.all_projects = self.config['allprojects'].keys()
|
self.all_projects = self.config['allprojects'].keys()
|
||||||
|
|
||||||
def get_excluded(self, target: str) -> Set[str]:
|
def get_excluded(self, os: str) -> Set[str]:
|
||||||
excluded = self.config['excludedProjects'][target]
|
"""Returns transitive closure for excluded projects"""
|
||||||
return set(excluded if excluded is not None else [])
|
return self.get_affected_projects(set(self.config['excludedProjects'].get(os, [])))
|
||||||
|
|
||||||
def get_check_targets(self, affected_projects: Set[str]) -> Set[str]:
|
def get_check_targets(self, projects: Set[str]) -> Set[str]:
|
||||||
"""Return the `check-xxx` targets to pass to ninja for the given list of projects"""
|
"""Return the `check-xxx` targets to pass to ninja for the given list of projects"""
|
||||||
|
if 'all' in projects:
|
||||||
|
return set(["check-all"])
|
||||||
targets = set()
|
targets = set()
|
||||||
all_projects = self.config['allprojects']
|
all_projects = self.config['allprojects']
|
||||||
for project in affected_projects:
|
for project in projects:
|
||||||
if project == "all":
|
targets.update(set(all_projects.get(project, [])))
|
||||||
targets = set(["check-all"])
|
|
||||||
return targets
|
|
||||||
targets.update(set(all_projects[project]))
|
|
||||||
return targets
|
return targets
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -84,59 +95,50 @@ class ChooseProjects:
|
||||||
return 'windows'
|
return 'windows'
|
||||||
return 'linux'
|
return 'linux'
|
||||||
|
|
||||||
def choose_projects(self, patch: str = None, current_os: str = None) -> List[str]:
|
def choose_projects(self, patch: str = None, os_name: Optional[str] = None) -> List[str]:
|
||||||
"""List all touched project with all projects that they depend on and also
|
"""List all touched project with all projects that they depend on and also
|
||||||
all projects that depend on them"""
|
all projects that depend on them"""
|
||||||
if self.llvm_dir is None:
|
if self.llvm_dir is None:
|
||||||
raise ValueError('path to llvm folder must be set in ChooseProject.')
|
raise ValueError('path to llvm folder must be set in ChooseProject.')
|
||||||
|
|
||||||
llvm_dir = os.path.abspath(os.path.expanduser(self.llvm_dir))
|
llvm_dir = os.path.abspath(os.path.expanduser(self.llvm_dir))
|
||||||
logging.info('Scanning LLVM in {}'.format(llvm_dir))
|
logging.info('Scanning LLVM in {}'.format(llvm_dir))
|
||||||
if not self.match_projects_dirs():
|
if not self.match_projects_dirs():
|
||||||
return self.FALLBACK_PROJECTS, set()
|
logging.warning(f'{llvm_dir} does not look like a llvm-project directory')
|
||||||
|
return self.get_all_enabled_projects(os_name)
|
||||||
changed_files = self.get_changed_files(patch)
|
changed_files = self.get_changed_files(patch)
|
||||||
changed_projects, unmapped_changes = self.get_changed_projects(changed_files)
|
changed_projects, unmapped_changes = self.get_changed_projects(changed_files)
|
||||||
|
|
||||||
if unmapped_changes:
|
if unmapped_changes:
|
||||||
logging.warning('There were changes that could not be mapped to a project.'
|
logging.warning('There were changes that could not be mapped to a project.'
|
||||||
'Building all projects instead!')
|
'Building all projects instead!')
|
||||||
return self.FALLBACK_PROJECTS, set()
|
return self.get_all_enabled_projects(os_name)
|
||||||
return self.extend_projects(changed_projects, current_os)
|
return self.extend_projects(changed_projects, os_name)
|
||||||
|
|
||||||
def extend_projects(self, projects: Set[str], current_os : str = None) -> List[str]:
|
def extend_projects(self, projects: Set[str], os_name : Optional[str] = None) -> List[str]:
|
||||||
|
"""Given a set of projects returns a set of projects to be tested taking
|
||||||
|
in account exclusions from llvm-dependencies.yaml.
|
||||||
|
"""
|
||||||
logging.info(f'projects: {projects}')
|
logging.info(f'projects: {projects}')
|
||||||
if not current_os:
|
if not os_name:
|
||||||
current_os = self._detect_os()
|
os_name = self._detect_os()
|
||||||
excluded_projects = self.get_excluded(current_os)
|
# Find all affected by current set.
|
||||||
affected_projects = self.get_affected_projects(projects)
|
affected_projects = self.get_affected_projects(projects)
|
||||||
logging.info(f'with affected projects: {affected_projects}')
|
logging.info(f'all affected projects(*) {affected_projects}')
|
||||||
to_exclude = affected_projects.intersection(excluded_projects)
|
# Exclude everything that is affected by excluded.
|
||||||
if len(to_exclude):
|
excluded_projects = self.get_excluded(os_name)
|
||||||
logging.warning(f'{to_exclude} projects are excluded on {current_os}')
|
logging.info(f'all excluded projects(*) {excluded_projects}')
|
||||||
affected_projects = affected_projects - to_exclude
|
affected_projects = affected_projects - excluded_projects
|
||||||
all_dependencies = set()
|
logging.info(f'effective projects list {affected_projects}')
|
||||||
for project in affected_projects:
|
return sorted(affected_projects)
|
||||||
dependencies = self.get_dependencies(affected_projects)
|
|
||||||
logging.debug(f'> project {project} with dependencies: {dependencies}')
|
|
||||||
to_exclude = dependencies.intersection(excluded_projects)
|
|
||||||
if len(to_exclude) != 0:
|
|
||||||
logging.warning(f'Excluding project {project} because of excluded dependencies {to_exclude}')
|
|
||||||
affected_projects = affected_projects - project
|
|
||||||
else:
|
|
||||||
all_dependencies.update(dependencies)
|
|
||||||
logging.info(f'full dependencies: {all_dependencies}')
|
|
||||||
return sorted(affected_projects), sorted(all_dependencies)
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
affected_projects, dependencies = self.choose_projects()
|
affected_projects = self.choose_projects()
|
||||||
print("Affected:", ';'.join(affected_projects))
|
print("Affected:", ';'.join(affected_projects))
|
||||||
print("Dependencies:", ';'.join(dependencies))
|
print("Dependencies:", ';'.join(self.get_dependencies(affected_projects)))
|
||||||
print("Check targets:", ';'.join(self.get_check_targets(affected_projects)))
|
print("Check targets:", ';'.join(self.get_check_targets(affected_projects)))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def match_projects_dirs(self) -> bool:
|
def match_projects_dirs(self) -> bool:
|
||||||
"""Make sure that all projects are folders in the LLVM dir.
|
"""Make sure that all projects are folders in the LLVM dir.
|
||||||
Otherwise we can't create the regex...
|
|
||||||
"""
|
"""
|
||||||
subdirs = os.listdir(self.llvm_dir)
|
subdirs = os.listdir(self.llvm_dir)
|
||||||
for project in self.all_projects:
|
for project in self.all_projects:
|
||||||
|
@ -146,7 +148,7 @@ class ChooseProjects:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_changed_files(patch_str: str = None) -> Set[str]:
|
def get_changed_files(patch_str: Union[str, TextIO, None] = None) -> Set[str]:
|
||||||
"""get list of changed files from the patch or from STDIN.
|
"""get list of changed files from the patch or from STDIN.
|
||||||
e.g. ['compiler-rt/lib/tsan/CMakeLists.txt']"""
|
e.g. ['compiler-rt/lib/tsan/CMakeLists.txt']"""
|
||||||
if patch_str is None:
|
if patch_str is None:
|
||||||
|
@ -179,49 +181,35 @@ class ChooseProjects:
|
||||||
|
|
||||||
def get_affected_projects(self, changed_projects: Set[str]) -> Set[str]:
|
def get_affected_projects(self, changed_projects: Set[str]) -> Set[str]:
|
||||||
"""Compute transitive closure of affected projects based on the
|
"""Compute transitive closure of affected projects based on the
|
||||||
dependencies between the projects."""
|
dependencies between the projects (including initially passed)."""
|
||||||
affected_projects = set(changed_projects)
|
affected: Set[str] = set(changed_projects)
|
||||||
last_len = -1
|
for p in changed_projects:
|
||||||
while len(affected_projects) != last_len:
|
affected.update(self.usages.get(p, set()))
|
||||||
last_len = len(affected_projects)
|
logging.info(f'added {affected - changed_projects} projects as they are affected')
|
||||||
changes = set()
|
return affected
|
||||||
for project in affected_projects:
|
|
||||||
if project in self.usages:
|
|
||||||
changes.update(self.usages[project])
|
|
||||||
affected_projects.update(changes)
|
|
||||||
|
|
||||||
logging.info(f'added {affected_projects - changed_projects} projects as they are affected')
|
|
||||||
return affected_projects
|
|
||||||
|
|
||||||
def get_dependencies(self, projects: Set[str]) -> Set[str]:
|
def get_dependencies(self, projects: Set[str]) -> Set[str]:
|
||||||
"""Return transitive dependencies for a given project.
|
"""Return transitive dependencies for a given projects (including the projects themself).
|
||||||
|
|
||||||
These are the required dependencies for given `projects` so that they can be built.
|
These are the required dependencies for given `projects` so that they can be built.
|
||||||
"""
|
"""
|
||||||
all_dependencies = set()
|
affected: Set[str] = set(projects)
|
||||||
# Recursive call to add a project and all its dependencies to `all_dependencies`.
|
for p in projects:
|
||||||
def add_dependencies_for_project(project: str):
|
affected.update(self.dependencies.get(p, set()))
|
||||||
if project in all_dependencies:
|
return affected
|
||||||
return
|
|
||||||
if project in self.dependencies:
|
|
||||||
for dependent in self.dependencies[project]:
|
|
||||||
if dependent not in projects:
|
|
||||||
all_dependencies.add(dependent)
|
|
||||||
add_dependencies_for_project(dependent)
|
|
||||||
for project in projects:
|
|
||||||
add_dependencies_for_project(project)
|
|
||||||
return all_dependencies
|
|
||||||
|
|
||||||
def get_all_enabled_projects(self) -> List[str]:
|
def get_all_enabled_projects(self, os_name: Optional[str] = None) -> List[str]:
|
||||||
"""Get list of all not-excluded projects for current platform."""
|
"""Get list of all not-excluded projects for current platform."""
|
||||||
return self.extend_projects(set(self.all_projects))
|
return self.extend_projects(set(self.all_projects), os_name)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
logging.basicConfig(filename='choose_projects.log', level=logging.INFO)
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Compute the projects affected by a change. A patch file is expected on stdin.')
|
description='Compute the projects affected by a change. A patch file is expected on stdin.')
|
||||||
parser.add_argument('llvmdir', default='.')
|
parser.add_argument('--llvmdir', type=str, default='.')
|
||||||
|
parser.add_argument('--log-level', type=str, default='INFO')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig(level=args.log_level, format='%(levelname)-7s %(message)s')
|
||||||
|
logging.info(f'checking changes in {args.llvmdir}')
|
||||||
chooser = ChooseProjects(args.llvmdir)
|
chooser = ChooseProjects(args.llvmdir)
|
||||||
sys.exit(chooser.run())
|
sys.exit(chooser.run())
|
||||||
|
|
|
@ -20,8 +20,8 @@ dependencies:
|
||||||
- mlir
|
- mlir
|
||||||
- clang
|
- clang
|
||||||
libc:
|
libc:
|
||||||
- clang
|
- clang
|
||||||
- clang-tools-extra
|
- clang-tools-extra
|
||||||
# FIXME: not sure about 'libcxx' and 'libcxxabi'
|
# FIXME: not sure about 'libcxx' and 'libcxxabi'
|
||||||
libcxx: []
|
libcxx: []
|
||||||
libcxxabi: []
|
libcxxabi: []
|
||||||
|
@ -46,8 +46,7 @@ dependencies:
|
||||||
|
|
||||||
# List of all projects in the LLVM monorepository. This list is taken from
|
# List of all projects in the LLVM monorepository. This list is taken from
|
||||||
# llvm/CMakeLists.txt in "set(LLVM_ALL_PROJECTS ..."
|
# llvm/CMakeLists.txt in "set(LLVM_ALL_PROJECTS ..."
|
||||||
# The value for all project is the list of targets to tests when this project
|
# The value for all project is the list of ninja targets to run.
|
||||||
# is affected by a patch.
|
|
||||||
allprojects:
|
allprojects:
|
||||||
clang: ["check-clang"]
|
clang: ["check-clang"]
|
||||||
clang-tools-extra: ["check-clang-tools"]
|
clang-tools-extra: ["check-clang-tools"]
|
||||||
|
|
|
@ -16,11 +16,12 @@
|
||||||
# Script runs in checked out llvm-project directory.
|
# Script runs in checked out llvm-project directory.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from typing import Dict
|
||||||
from steps import generic_linux, generic_windows, from_shell_output, extend_steps_env, bazel
|
from steps import generic_linux, generic_windows, from_shell_output, extend_steps_env, bazel
|
||||||
from sync_fork import sync_fork
|
from sync_fork import sync_fork
|
||||||
import git
|
import git
|
||||||
import yaml
|
import yaml
|
||||||
|
from choose_projects import ChooseProjects
|
||||||
|
|
||||||
steps_generators = [
|
steps_generators = [
|
||||||
'${BUILDKITE_BUILD_CHECKOUT_PATH}/libcxx/utils/ci/buildkite-pipeline-snapshot.sh',
|
'${BUILDKITE_BUILD_CHECKOUT_PATH}/libcxx/utils/ci/buildkite-pipeline-snapshot.sh',
|
||||||
|
@ -31,33 +32,30 @@ if __name__ == '__main__':
|
||||||
no_cache = os.getenv('ph_no_cache') is not None
|
no_cache = os.getenv('ph_no_cache') is not None
|
||||||
log_level = os.getenv('ph_log_level', 'WARNING')
|
log_level = os.getenv('ph_log_level', 'WARNING')
|
||||||
notify_emails = list(filter(None, os.getenv('ph_notify_emails', '').split(',')))
|
notify_emails = list(filter(None, os.getenv('ph_notify_emails', '').split(',')))
|
||||||
# Syncing LLVM fork so any pipelines started from upstream llvm-project#
|
# Syncing LLVM fork so any pipelines started from upstream llvm-project
|
||||||
# but then triggered a build on fork will observe the commit.
|
# but then triggered a build on fork will observe the commit.
|
||||||
sync_fork(os.path.join(os.getenv('BUILDKITE_BUILD_PATH'), 'llvm-project-fork'), [os.getenv('BUILDKITE_BRANCH'), 'main'])
|
sync_fork(os.path.join(os.getenv('BUILDKITE_BUILD_PATH', ''), 'llvm-project-fork'), [os.getenv('BUILDKITE_BRANCH'), 'main'])
|
||||||
steps = []
|
steps = []
|
||||||
|
|
||||||
env = {}
|
env: Dict[str, str] = {}
|
||||||
for e in os.environ:
|
for e in os.environ:
|
||||||
if e.startswith('ph_'):
|
if e.startswith('ph_'):
|
||||||
env[e] = os.getenv(e)
|
env[e] = os.getenv(e, '')
|
||||||
repo = git.Repo('.')
|
repo = git.Repo('.')
|
||||||
steps.extend(generic_linux(
|
|
||||||
os.getenv('ph_projects', 'llvm;clang;clang-tools-extra;libc;libcxx;libcxxabi;lld;libunwind;mlir;openmp;polly;flang'),
|
|
||||||
False))
|
|
||||||
# FIXME: openmp is removed as it constantly fails.
|
|
||||||
|
|
||||||
# TODO: Make this project list be evaluated through "choose_projects"(? as now we define "all" and exclusions in
|
cp = ChooseProjects(None)
|
||||||
# two placess).
|
linux_projects, _ = cp.get_all_enabled_projects('linux')
|
||||||
steps.extend(generic_windows(
|
steps.extend(generic_linux(os.getenv('ph_projects', ';'.join(linux_projects)), check_diff=False))
|
||||||
os.getenv('ph_projects', 'llvm;clang;clang-tools-extra;libc;lld;mlir;polly;flang')))
|
windows_projects, _ = cp.get_all_enabled_projects('windows')
|
||||||
|
steps.extend(generic_windows(os.getenv('ph_projects', ';'.join(windows_projects))))
|
||||||
steps.extend(bazel([], force=True))
|
steps.extend(bazel([], force=True))
|
||||||
if os.getenv('ph_skip_generated') is None:
|
if os.getenv('ph_skip_generated') is None:
|
||||||
e = os.environ.copy()
|
env = os.environ.copy()
|
||||||
# BUILDKITE_COMMIT might be an alias, e.g. "HEAD". Resolve it to make the build hermetic.
|
# BUILDKITE_COMMIT might be an alias, e.g. "HEAD". Resolve it to make the build hermetic.
|
||||||
if ('BUILDKITE_COMMIT' not in env) or (env['BUILDKITE_COMMIT'] == "HEAD"):
|
if ('BUILDKITE_COMMIT' not in env) or (env['BUILDKITE_COMMIT'] == "HEAD"):
|
||||||
env['BUILDKITE_COMMIT'] = repo.head.commit.hexsha
|
env['BUILDKITE_COMMIT'] = repo.head.commit.hexsha
|
||||||
for gen in steps_generators:
|
for gen in steps_generators:
|
||||||
steps.extend(from_shell_output(gen, env=e))
|
steps.extend(from_shell_output(gen, env=env))
|
||||||
|
|
||||||
notify = []
|
notify = []
|
||||||
for e in notify_emails:
|
for e in notify_emails:
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
from buildkite_utils import annotate, feedback_url, set_metadata
|
from buildkite_utils import annotate, feedback_url, set_metadata
|
||||||
from choose_projects import ChooseProjects
|
from choose_projects import ChooseProjects
|
||||||
|
@ -45,33 +46,33 @@ if __name__ == '__main__':
|
||||||
set_metadata('ph_buildable_revision', os.getenv('ph_buildable_revision'))
|
set_metadata('ph_buildable_revision', os.getenv('ph_buildable_revision'))
|
||||||
set_metadata('ph_build_id', os.getenv("ph_build_id"))
|
set_metadata('ph_build_id', os.getenv("ph_build_id"))
|
||||||
|
|
||||||
env = {}
|
env: Dict[str, str] = {}
|
||||||
for e in os.environ:
|
for e in os.environ:
|
||||||
if e.startswith('ph_'):
|
if e.startswith('ph_'):
|
||||||
env[e] = os.getenv(e)
|
env[e] = os.getenv(e, '')
|
||||||
repo = git.Repo('.')
|
repo = git.Repo('.')
|
||||||
steps = []
|
steps = []
|
||||||
# List all affected projects.
|
# List all affected projects.
|
||||||
patch = repo.git.diff("HEAD~1")
|
patch = repo.git.diff("HEAD~1")
|
||||||
cp = ChooseProjects('.')
|
cp = ChooseProjects('.')
|
||||||
|
|
||||||
linux_projects, dependencies = cp.choose_projects(patch = patch, current_os = "linux")
|
linux_projects = cp.choose_projects(patch = patch, os_name = "linux")
|
||||||
logging.info(f'linux_projects: {linux_projects} (dependencies: {dependencies}')
|
logging.info(f'linux_projects: {linux_projects}')
|
||||||
if len(linux_projects) > 0:
|
if len(linux_projects) > 0:
|
||||||
steps.extend(generic_linux(';'.join(sorted(linux_projects)), True))
|
steps.extend(generic_linux(';'.join(linux_projects), check_diff=True))
|
||||||
|
|
||||||
windows_projects, dependencies = cp.choose_projects(patch = patch, current_os = "windows")
|
windows_projects = cp.choose_projects(patch = patch, os_name = "windows")
|
||||||
logging.info(f'windows_projects: {windows_projects} (dependencies: {dependencies}')
|
logging.info(f'windows_projects: {windows_projects}')
|
||||||
if len(windows_projects) > 0:
|
if len(windows_projects) > 0:
|
||||||
steps.extend(generic_windows(';'.join(sorted(windows_projects))))
|
steps.extend(generic_windows(';'.join(windows_projects)))
|
||||||
|
|
||||||
# Add custom checks.
|
# Add custom checks.
|
||||||
if os.getenv('ph_skip_generated') is None:
|
if os.getenv('ph_skip_generated') is None:
|
||||||
e = os.environ.copy()
|
env = os.environ.copy()
|
||||||
# BUILDKITE_COMMIT might be an alias, e.g. "HEAD". Resolve it to make the build hermetic.
|
# BUILDKITE_COMMIT might be an alias, e.g. "HEAD". Resolve it to make the build hermetic.
|
||||||
e["BUILDKITE_COMMIT"] = repo.head.commit.hexsha
|
env["BUILDKITE_COMMIT"] = repo.head.commit.hexsha
|
||||||
for gen in steps_generators:
|
for gen in steps_generators:
|
||||||
steps.extend(from_shell_output(gen, env=e))
|
steps.extend(from_shell_output(gen, env=env))
|
||||||
modified_files = cp.get_changed_files(patch)
|
modified_files = cp.get_changed_files(patch)
|
||||||
steps.extend(bazel(modified_files))
|
steps.extend(bazel(modified_files))
|
||||||
|
|
||||||
|
|
|
@ -23,9 +23,7 @@ import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from functools import partial
|
from typing import Callable, List, Type
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
import clang_format_report
|
import clang_format_report
|
||||||
import clang_tidy_report
|
import clang_tidy_report
|
||||||
import run_cmake
|
import run_cmake
|
||||||
|
@ -146,7 +144,7 @@ if __name__ == '__main__':
|
||||||
else:
|
else:
|
||||||
checks = " ".join(cp.get_check_targets(projects))
|
checks = " ".join(cp.get_check_targets(projects))
|
||||||
logging.info(f"Running checks: {checks}")
|
logging.info(f"Running checks: {checks}")
|
||||||
report_lambda: Callable[Step, Report] = lambda s, r: ninja_check_projects_report(s, r, checks)
|
report_lambda: Callable[[Step, Report], None] = lambda s, r: ninja_check_projects_report(s, r, checks)
|
||||||
run_step(f"ninja {checks}", report, report_lambda)
|
run_step(f"ninja {checks}", report, report_lambda)
|
||||||
if args.check_clang_tidy:
|
if args.check_clang_tidy:
|
||||||
if commands_in_build:
|
if commands_in_build:
|
||||||
|
|
Loading…
Reference in a new issue