1
0
Fork 0

added agent detection

This commit is contained in:
Christian Kühnel 2020-05-06 12:43:35 +02:00
parent 971d4b8b74
commit 0e12d53def

View file

@ -7,6 +7,7 @@ import json
import numpy import numpy
import requests import requests
import os import os
import re
import sys import sys
from typing import Optional, List, Dict from typing import Optional, List, Dict
from urllib.parse import urljoin from urllib.parse import urljoin
@ -30,6 +31,7 @@ class Build:
self.start_time = datetime.datetime.fromtimestamp(build_dict['timestamp']/1000) self.start_time = datetime.datetime.fromtimestamp(build_dict['timestamp']/1000)
self.duration = datetime.timedelta(milliseconds=build_dict['duration']) self.duration = datetime.timedelta(milliseconds=build_dict['duration'])
self.stages = [] # type: List[Stage] self.stages = [] # type: List[Stage]
self.agent = None # type: Optional[str]
@property @property
def hour(self) -> datetime.datetime: def hour(self) -> datetime.datetime:
@ -48,6 +50,10 @@ class Build:
day=self.start_time.day, day=self.start_time.day,
) )
@property
def success(self):
return self.result.lower() == 'success'
def update_from_wfdata(self, wfdata: Dict): def update_from_wfdata(self, wfdata: Dict):
self.stages = [Stage(s) for s in wfdata['stages']] self.stages = [Stage(s) for s in wfdata['stages']]
@ -80,19 +86,22 @@ class JenkinsStatsReader:
jobnames = self.fetch_jobsnames() jobnames = self.fetch_jobsnames()
print('Found {} jobs: {}'.format(len(jobnames), jobnames)) print('Found {} jobs: {}'.format(len(jobnames), jobnames))
self.get_builds(jobnames) self.get_builds(jobnames)
self.get_workflow_data() # self.get_workflow_data()
self.get_build_agents()
self.create_statistics('hour') self.create_statistics('hour')
self.create_statistics('day') self.create_statistics('day')
self.write_all_builds()
def cached_get(self, url) -> Dict: def cached_get(self, url, as_json: bool = True):
m = hashlib.sha256() m = hashlib.sha256()
m.update(url.encode('utf-8')) m.update(url.encode('utf-8'))
filename = m.digest().hex() filename = m.digest().hex()
cache_file = os.path.join(self._TMP_DIR, filename) cache_file = os.path.join(self._TMP_DIR, filename)
if os.path.isfile(cache_file): if os.path.isfile(cache_file):
with open(cache_file, 'r') as json_file: with open(cache_file, 'r') as json_file:
data = json.load(json_file) if as_json:
return data return json.load(json_file)
return json_file.read()
response = self._session.get(urljoin(self.jenkins_url, url)) response = self._session.get(urljoin(self.jenkins_url, url))
if response.status_code != 200: if response.status_code != 200:
@ -100,7 +109,9 @@ class JenkinsStatsReader:
os.makedirs(self._TMP_DIR, exist_ok=True) os.makedirs(self._TMP_DIR, exist_ok=True)
with open(cache_file, 'w') as jenkins_data_file: with open(cache_file, 'w') as jenkins_data_file:
jenkins_data_file.write(response.text) jenkins_data_file.write(response.text)
if as_json:
return response.json() return response.json()
return response.text
def fetch_jobsnames(self) -> List[str]: def fetch_jobsnames(self) -> List[str]:
data = self.cached_get('api/json?tree=jobs[name]') data = self.cached_get('api/json?tree=jobs[name]')
@ -114,6 +125,7 @@ class JenkinsStatsReader:
print('{} has {} builds'.format(job_name, len(self.builds[job_name]))) print('{} has {} builds'.format(job_name, len(self.builds[job_name])))
def get_workflow_data(self): def get_workflow_data(self):
print('Getting workflow data...')
for job_name, builds in self.builds.items(): for job_name, builds in self.builds.items():
for i, build in enumerate(builds): for i, build in enumerate(builds):
wfdata = self.cached_get('job/{}/{}/wfapi/'.format(job_name, build.number)) wfdata = self.cached_get('job/{}/{}/wfapi/'.format(job_name, build.number))
@ -121,6 +133,17 @@ class JenkinsStatsReader:
sys.stdout.write('\r{} [{}/{}]'.format(job_name, i, len(builds))) sys.stdout.write('\r{} [{}/{}]'.format(job_name, i, len(builds)))
sys.stdout.flush() sys.stdout.flush()
def get_build_agents(self):
print('Getting agent names...')
for job_name, builds in self.builds.items():
for i, build in enumerate(builds):
console_log = self.cached_get('job/{}/{}/consoleText'.format(job_name, build.number), as_json=False)
match = re.search(r'Running on ([\w-]+) in', console_log)
if match:
build.agent = match.group(1)
sys.stdout.write('\r{} [{}/{}]'.format(job_name, i, len(builds)))
sys.stdout.flush()
def create_statistics(self, group_by: str): def create_statistics(self, group_by: str):
for job_name, builds in self.builds.items(): for job_name, builds in self.builds.items():
print('Writing data for {}'.format(job_name)) print('Writing data for {}'.format(job_name))
@ -145,6 +168,22 @@ class JenkinsStatsReader:
'max duration': numpy.max(durations)/60, 'max duration': numpy.max(durations)/60,
}) })
def write_all_builds(self):
for job_name, builds in self.builds.items():
fieldnames = ['date', 'job_name', 'build_number', 'duration', 'agent', 'success']
csv_file = open('tmp/jenkins_all_builds.csv', 'w')
writer = csv.DictWriter(csv_file, fieldnames=fieldnames, dialect=csv.excel)
writer.writeheader()
for build in builds:
writer.writerow({
'date': build.start_time,
'job_name': job_name,
'build_number': build.number,
'duration': build.duration,
'agent': build.agent,
'success': build.success,
})
if __name__ == '__main__': if __name__ == '__main__':
jsr = JenkinsStatsReader() jsr = JenkinsStatsReader()