* [gentoo-commits] proj/tinderbox-cluster:master commit in: buildbot_gentoo_ci/steps/, py/
@ 2023-02-21 1:40 Magnus Granberg
0 siblings, 0 replies; 3+ messages in thread
From: Magnus Granberg @ 2023-02-21 1:40 UTC (permalink / raw
To: gentoo-commits
commit: 021a017b6e7ba89d28fac0e40db2ebb8baad9ce2
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Tue Feb 21 01:39:54 2023 +0000
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Tue Feb 21 01:39:54 2023 +0000
URL: https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=021a017b
Get all search pattern that match
Signed-off-by: Magnus Granberg <zorry <AT> gentoo.org>
buildbot_gentoo_ci/steps/bugs.py | 4 +-
buildbot_gentoo_ci/steps/logs.py | 178 ++++-----------------------------------
py/log_parser.py | 53 +++++++-----
3 files changed, 49 insertions(+), 186 deletions(-)
diff --git a/buildbot_gentoo_ci/steps/bugs.py b/buildbot_gentoo_ci/steps/bugs.py
index 801fc98..6c85aac 100644
--- a/buildbot_gentoo_ci/steps/bugs.py
+++ b/buildbot_gentoo_ci/steps/bugs.py
@@ -96,8 +96,8 @@ class GetBugs(BuildStep):
for match_word in match_search_text:
if match_word in match_bug_text:
matches = matches + 1
- if matches >= 10:
- print(f"Bug: {str(bug['id'])} Summary: {bug['summary']}")
+ print(f"Bug: {str(bug['id'])} Matched words: {str(matches)} Summary: {bug['summary']}")
+ if matches >= 5:
match = {}
match['match'] = True
match['id'] = bug['id']
diff --git a/buildbot_gentoo_ci/steps/logs.py b/buildbot_gentoo_ci/steps/logs.py
index 2a52308..e4fc951 100644
--- a/buildbot_gentoo_ci/steps/logs.py
+++ b/buildbot_gentoo_ci/steps/logs.py
@@ -19,6 +19,7 @@ from buildbot.process.results import FAILURE
from buildbot.process.results import WARNINGS
from buildbot.process.results import SKIPPED
from buildbot.plugins import steps
+from buildbot.plugins import util
from buildbot_gentoo_ci.steps import minio
from buildbot_gentoo_ci.steps import master as master_steps
@@ -35,11 +36,15 @@ def PersOutputOfLogParser(rc, stdout, stderr):
for k, v in json.loads(line).items():
summary_log_dict[int(k)] = {
'text' : v['text'],
- 'type' : v['type'],
- 'status' : v['status'],
- 'id' : v['id'],
- 'search_pattern' : v['search_pattern']
+ 'pattern_infos' : [],
}
+ for s in v['pattern_info']:
+ summary_log_dict[int(k)]['pattern_infos'].append({
+ 'type' : s['type'],
+ 'status' : s['status'],
+ 'id' : s['id'],
+ 'search_pattern' : s['search_pattern'],
+ })
build_summery_output['summary_log_dict'] = summary_log_dict
#FIXME: Handling of stderr output
return {
@@ -140,6 +145,8 @@ class SetupParserBuildLoger(BuildStep):
command.append(log_cpv['full_logname'])
command.append('-u')
command.append(self.getProperty('project_data')['uuid'])
+ command.append('-d')
+ command.append(util.Secret("log_parser_database"))
self.aftersteps_list.append(steps.SetPropertyFromCommand(
name = 'RunBuildLogParser',
haltOnFailure = True,
@@ -152,159 +159,6 @@ class SetupParserBuildLoger(BuildStep):
yield self.build.addStepsAfterCurrentStep(self.aftersteps_list)
return SUCCESS
-class ParserBuildLog(BuildStep):
-
- name = 'ParserBuildLog'
- description = 'Running'
- descriptionDone = 'Ran'
- descriptionSuffix = None
- haltOnFailure = True
- flunkOnFailure = True
-
- def __init__(self, **kwargs):
- self.logfile_text_dict = {}
- self.summery_dict = {}
- self.index = 1
- self.log_search_pattern_list = []
- self.max_text_lines = 0
- super().__init__(**kwargs)
-
- #FIXME: ansifilter
- def ansiFilter(self, text):
- return text
-
- @defer.inlineCallbacks
- def get_log_search_pattern(self):
- # get pattern from the projects
- # add that to log_search_pattern_list
- for project_pattern in (yield self.gentooci.db.projects.getProjectLogSearchPatternByUuid(self.getProperty('project_data')['uuid'])):
- # check if the search pattern is vaild
- try:
- re.compile(project_pattern['search'])
- except re.error:
- print("Non valid regex pattern")
- print(project_pattern)
- else:
- self.log_search_pattern_list.append(project_pattern)
- # get the default project pattern
- # add if not pattern is in project ignore
- self.project_pattern_ignore = yield self.gentooci.db.projects.getProjectLogSearchPatternByUuidAndIgnore(self.getProperty('project_data')['uuid'])
- for project_pattern in (yield self.gentooci.db.projects.getProjectLogSearchPatternByUuid(self.getProperty('default_project_data')['uuid'])):
- if not project_pattern['search'] in self.project_pattern_ignore:
- # check if the search pattern is vaild
- try:
- re.compile(project_pattern['search'])
- except re.error:
- print("Non valid regex pattern")
- print(project_pattern)
- else:
- self.log_search_pattern_list.append(project_pattern)
-
- def search_buildlog(self, tmp_index):
- # get text line to search
- text_line = self.ansiFilter(self.logfile_text_dict[tmp_index])
- # loop true the pattern list for match
- for search_pattern in self.log_search_pattern_list:
- search_hit = False
- if search_pattern['search_type'] == 'in':
- if search_pattern['search'] in text_line:
- search_hit = True
- if search_pattern['search_type'] == 'startswith':
- if text_line.startswith(search_pattern['search']):
- search_hit = True
- if search_pattern['search_type'] == 'endswith':
- if text_line.endswith(search_pattern['search']):
- search_hit = True
- if search_pattern['search_type'] == 'search':
- if re.search(search_pattern['search'], text_line):
- search_hit = True
- # add the line if the pattern match
- if search_hit:
- print(text_line)
- print(search_pattern)
- print(tmp_index)
- self.summery_dict[tmp_index] = {}
- self.summery_dict[tmp_index]['text'] = text_line
- self.summery_dict[tmp_index]['type'] = search_pattern['type']
- self.summery_dict[tmp_index]['status'] = search_pattern['status']
- self.summery_dict[tmp_index]['search_pattern_id'] = search_pattern['id']
- # add upper text lines if requested
- # max 5
- if search_pattern['start'] != 0:
- i = tmp_index - search_pattern['start'] - 1
- match = True
- while match:
- i = i + 1
- if i < (tmp_index - 9) or i == tmp_index:
- match = False
- else:
- if not i in self.summery_dict:
- self.summery_dict[i] = {}
- self.summery_dict[i]['text'] = self.ansiFilter(self.logfile_text_dict[i])
- self.summery_dict[i]['type'] = 'info'
- self.summery_dict[i]['status'] = 'info'
- # add lower text lines if requested
- # max 5
- if search_pattern['end'] != 0:
- i = tmp_index
- end = tmp_index + search_pattern['end']
- match = True
- while match:
- i = i + 1
- if i > self.max_text_lines or i > end:
- match = False
- else:
- if not i in self.summery_dict:
- self.summery_dict[i] = {}
- self.summery_dict[i]['text'] = self.ansiFilter(self.logfile_text_dict[i])
- self.summery_dict[i]['type'] = 'info'
- self.summery_dict[i]['status'] = 'info'
- else:
- # we add all line that start with ' * ' as info
- # we add all line that start with '>>>' but not '>>> /' as info
- if text_line.startswith(' * ') or (text_line.startswith('>>>') and not text_line.startswith('>>> /')):
- if not tmp_index in self.summery_dict:
- self.summery_dict[tmp_index] = {}
- self.summery_dict[tmp_index]['text'] = text_line
- self.summery_dict[tmp_index]['type'] = 'info'
- self.summery_dict[tmp_index]['status'] = 'info'
-
- @defer.inlineCallbacks
- def run(self):
- self.gentooci = self.master.namedServices['services'].namedServices['gentooci']
- yield self.get_log_search_pattern()
- # open the log file
- # read it to a buffer
- # make a dict of the buffer
- # maybe use mulitiprocces to speed up the search
- print(self.getProperty('log_build_data'))
- if self.getProperty('faild_cpv'):
- log_cpv = self.getProperty('log_build_data')[self.getProperty('faild_cpv')]
- else:
- log_cpv = self.getProperty('log_build_data')[self.getProperty('cpv')]
- file_path = yield os.path.join(self.master.basedir, 'workers', self.getProperty('build_workername'), str(self.getProperty("project_build_data")['buildbot_build_id']) ,log_cpv['full_logname'])
- #FIXME: decode it to utf-8
- with io.TextIOWrapper(io.BufferedReader(gzip.open(file_path, 'rb'))) as f:
- for text_line in f:
- self.logfile_text_dict[self.index] = text_line.strip('\n')
- # run the parse patten on the line
- # have a buffer on 10 before we run pattern check
- if self.index >= 10:
- yield self.search_buildlog(self.index - 9)
- # remove text line that we don't need any more
- if self.index >= 20:
- del self.logfile_text_dict[self.index - 19]
- self.index = self.index + 1
- self.max_text_lines = self.index
- f.close()
- # check last 10 lines in logfile_text_dict
- yield self.search_buildlog(self.index - 10)
- print(self.summery_dict)
- # remove all lines with ignore in the dict
- # setProperty summery_dict
- self.setProperty("summary_log_dict", self.summery_dict, 'summary_log_dict')
- return SUCCESS
-
class MakeIssue(BuildStep):
name = 'MakeIssue'
@@ -353,8 +207,9 @@ class MakeIssue(BuildStep):
text_phase_list = []
for k, v in sorted(self.summary_log_dict.items()):
# get the issue error
- if v['type'] == self.error_dict['phase'] and v['status'] == 'error':
- text_issue_list.append(v['text'])
+ for s in v['pattern_infos']:
+ if s['type'] == self.error_dict['phase'] and s['status'] == 'error':
+ text_issue_list.append(v['text'])
# add the issue error
if text_issue_list != []:
self.error_dict['title_issue'] = text_issue_list[0].replace('*', '').strip()
@@ -379,8 +234,9 @@ class MakeIssue(BuildStep):
for k, v in sorted(self.summary_log_dict.items()):
self.summary_log_list.append(v['text'])
#self.error_dict['hash'].update(v['text'].encode('utf-8'))
- if v['status'] == 'warning':
- warning = True
+ for s in v['pattern_infos']:
+ if s['status'] == 'warning':
+ warning = True
# check if the build did fail
if v['text'].startswith(' * ERROR:') and v['text'].endswith(' phase):'):
# get phase error
diff --git a/py/log_parser.py b/py/log_parser.py
index dd48295..f5c4eb5 100644
--- a/py/log_parser.py
+++ b/py/log_parser.py
@@ -64,37 +64,41 @@ def get_log_search_pattern(Session, uuid, default_uuid):
return log_search_pattern
def get_search_pattern_match(log_search_pattern, text_line):
+ match_list = []
for search_pattern in log_search_pattern:
if re.search(search_pattern['search'], text_line):
- return search_pattern
- return False
+ match_list.append(search_pattern)
+ return match_list
def search_buildlog(log_search_pattern, text_line, index):
summary = {}
#FIXME: add check for test
# don't log ignore lines
- if get_search_pattern_match(log_search_pattern['ignore'], text_line):
+ if get_search_pattern_match(log_search_pattern['ignore'], text_line) != []:
return False
# search default pattern
- search_pattern_match = get_search_pattern_match(log_search_pattern['default'], text_line)
- if search_pattern_match:
- summary[index] = dict(
- text = text_line,
- type = search_pattern_match['type'],
- status = search_pattern_match['status'],
- id = search_pattern_match['id'],
- search_pattern = search_pattern_match['search']
- )
- return summary
+ summary[index] = {
+ 'text' : text_line,
+ 'pattern_info' : [],
+ }
+ search_pattern_match_list = get_search_pattern_match(log_search_pattern['default'], text_line)
+ if search_pattern_match_list != []:
+ for search_pattern_match in search_pattern_match_list:
+ summary[index]['pattern_info'].append({
+ 'type' : search_pattern_match['type'],
+ 'status' : search_pattern_match['status'],
+ 'id' : search_pattern_match['id'],
+ 'search_pattern' : search_pattern_match['search'],
+ })
# we add all line that start with ' * ' or '>>>' as info
if text_line.startswith(' * ') or text_line.startswith('>>>'):
- summary[index] = dict(
- text = text_line,
- type = 'info',
- status = 'info',
- id = 0,
- search_pattern = 'auto'
- )
+ summary[index]['pattern_info'].append({
+ 'type' : 'info',
+ 'status' : 'info',
+ 'id' : 0,
+ 'search_pattern' : 'auto',
+ })
+ if summary[index]['pattern_info'] != []:
return summary
return False
@@ -104,8 +108,8 @@ def getConfigSettings():
config = json.load(f)
return config
-def getDBSession(config):
- engine = sa.create_engine(config['database'])
+def getDBSession(args):
+ engine = sa.create_engine(args.database)
Session = sa.orm.sessionmaker(bind = engine)
return Session()
@@ -126,7 +130,7 @@ def runLogParser(args):
index = 1
logfile_text_dict = {}
config = getConfigSettings()
- Session = getDBSession(config)
+ Session = getDBSession(args)
#mp_pool = getMultiprocessingPool(config)
summary = {}
#NOTE: The patten is from https://github.com/toralf/tinderbox/tree/master/data files.
@@ -151,6 +155,9 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", required=True)
parser.add_argument("-u", "--uuid", required=True)
+ parser.add_argument("-e", "--default-uuid", required=False)
+ parser.add_argument("-c", "--cpu", required=False)
+ parser.add_argument("-d", "--database", required=True)
args = parser.parse_args()
runLogParser(args)
sys.exit()
^ permalink raw reply related [flat|nested] 3+ messages in thread
* [gentoo-commits] proj/tinderbox-cluster:master commit in: buildbot_gentoo_ci/steps/, py/
@ 2024-02-21 21:16 Magnus Granberg
0 siblings, 0 replies; 3+ messages in thread
From: Magnus Granberg @ 2024-02-21 21:16 UTC (permalink / raw
To: gentoo-commits
commit: 6d8986da642b5d3b5670be40d8fe8883086c7678
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Wed Feb 21 21:15:49 2024 +0000
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Wed Feb 21 21:15:49 2024 +0000
URL: https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=6d8986da
Move search pattern db to settings repository
Signed-off-by: Magnus Granberg <zorry <AT> gentoo.org>
buildbot_gentoo_ci/steps/logs.py | 34 ++++---
py/log_parser.py | 206 ++++++++++++++++++++-------------------
2 files changed, 127 insertions(+), 113 deletions(-)
diff --git a/buildbot_gentoo_ci/steps/logs.py b/buildbot_gentoo_ci/steps/logs.py
index 85017a5..50e3f3f 100644
--- a/buildbot_gentoo_ci/steps/logs.py
+++ b/buildbot_gentoo_ci/steps/logs.py
@@ -42,10 +42,13 @@ def PersOutputOfLogParser(rc, stdout, stderr):
}
for s in v['pattern_info']:
summary_log_dict[int(k)]['pattern_infos'].append({
- 'type' : s['type'],
+ 'search_type' : s['search_type'],
'status' : s['status'],
- 'id' : s['id'],
+ 'line' : s['line'],
'search_pattern' : s['search_pattern'],
+ 'phase' : s['phase'],
+ 'uuid' : s['uuid'],
+ 'description' : s['description'],
})
build_summery_output['summary_log_dict'] = summary_log_dict
#FIXME: Handling of stderr output
@@ -118,14 +121,23 @@ class SetupParserBuildLoger(BuildStep):
@defer.inlineCallbacks
def run(self):
+ self.gentooci = self.master.namedServices['services'].namedServices['gentooci']
self.aftersteps_list = []
log_cpv = self.getProperty('log_build_data')[self.getProperty('log_cpv')]
build_log_file_compressed = log_cpv['full_logname'] + '.xz'
mastersrc_log = yield os.path.join(self.getProperty('logsdir'), build_log_file_compressed)
log_py = 'log_parser.py'
- config_log_py = 'logparser.json'
mastersrc_py = yield os.path.join(self.master.basedir, log_py)
- mastersrc_config = yield os.path.join(self.master.basedir, config_log_py)
+ repository_data = yield self.gentooci.db.repositorys.getRepositoryByUuid(self.getProperty("project_data")['settings_repository_uuid'])
+ # Git clone settings repo
+ self.aftersteps_list.append(steps.GitLab(repourl=repository_data['url'],
+ name = 'RunGit',
+ descriptionDone=repository_data['name'],
+ mode=repository_data['mode'],
+ method=repository_data['method'],
+ submodules=True,
+ alwaysUseLatest=repository_data['alwaysuselatest']
+ ))
# Upload logfile to worker
self.aftersteps_list.append(steps.FileDownload(
mastersrc=mastersrc_log,
@@ -136,11 +148,6 @@ class SetupParserBuildLoger(BuildStep):
mastersrc=mastersrc_py,
workerdest=log_py
))
- # Upload log parser py config
- self.aftersteps_list.append(steps.FileDownload(
- mastersrc=mastersrc_config,
- workerdest=config_log_py
- ))
#Untar the log
shell_commad_list = []
shell_commad_list.append('xz')
@@ -159,8 +166,11 @@ class SetupParserBuildLoger(BuildStep):
command.append(log_cpv['full_logname'])
command.append('-u')
command.append(self.getProperty('project_data')['uuid'])
- command.append('-d')
- command.append(util.Secret("log_parser_database"))
+ command.append('-c')
+ #FIXME: set it by images/flavors
+ command.append('8')
+ #FIXME: debug
+ #command.append('-d')
self.aftersteps_list.append(steps.SetPropertyFromCommand(
name = 'RunBuildLogParser',
haltOnFailure = True,
@@ -211,7 +221,7 @@ class MakeIssue(BuildStep):
for k, v in sorted(self.summary_log_dict.items()):
# get the issue error
for s in v['pattern_infos']:
- if s['type'] == self.error_dict['phase'] and s['status'] == 'error':
+ if s['search_type'] == self.error_dict['phase'] and s['status'] == 'error':
text_issue_list.append(v['text'])
# add the issue error
self.error_dict['cpv'] = self.getProperty('log_cpv')
diff --git a/py/log_parser.py b/py/log_parser.py
index eb081a9..e3725bb 100644
--- a/py/log_parser.py
+++ b/py/log_parser.py
@@ -7,113 +7,113 @@ import re
import io
import json
import os
-from sqlalchemy.ext.declarative import declarative_base
-import sqlalchemy as sa
import argparse
-Base = declarative_base()
-
-class ProjectsPattern(Base):
- __tablename__ = "projects_pattern"
- id = sa.Column(sa.Integer, primary_key=True)
- project_uuid = sa.Column(sa.String(36), nullable=False)
- search = sa.Column(sa.String(50), nullable=False)
- start = sa.Column(sa.Integer, default=0)
- end = sa.Column(sa.Integer, default=0)
- status = sa.Column(sa.Enum('info', 'warning', 'ignore', 'error'), default='info')
- type = sa.Column(sa.Enum('info', 'qa', 'compile', 'configure', 'install', 'postinst', 'prepare', 'pretend', 'setup', 'test', 'unpack', 'ignore', 'issues', 'misc', 'elog'), default='info')
- search_type = sa.Column(sa.Enum('in', 'startswith', 'endswith', 'search'), default='in')
+def getJsonFromFile(path, phase):
+ with open(path) as f:
+ try:
+ JsonList = json.load(f)
+ except json.decoder.JSONDecodeError as e:
+ print(f"{e} in file: {path}")
+ return []
+ return JsonList[phase]
-def get_pattern_dict(project_pattern):
+def get_pattern_dict(catchissue, i, uuid):
+ #print(catchissue)
patten_dict = {}
- patten_dict['id'] = project_pattern.id
- patten_dict['project_uuid'] = project_pattern.project_uuid
- patten_dict['search'] = project_pattern.search
- patten_dict['status'] = project_pattern.status
- patten_dict['type'] = project_pattern.type
+ patten_dict['line'] = i
+ patten_dict['uuid'] = uuid
+ patten_dict['string'] = catchissue[0]
+ patten_dict['start'] = catchissue[1]
+ patten_dict['end'] = catchissue[2]
+ patten_dict['status'] = catchissue[3]
+ patten_dict['search_type'] = catchissue[4]
+ patten_dict['url'] = catchissue[5]
+ patten_dict['description'] = catchissue[6]
return patten_dict
-def addPatternToList(Session, log_search_pattern, uuid):
- for project_pattern in Session.query(ProjectsPattern).filter_by(project_uuid=uuid).all():
- # check if the search pattern is vaild
- project_pattern_search = project_pattern.search
- try:
- re.compile(project_pattern_search)
- except re.error:
- print("Non valid regex pattern")
- print(project_pattern.search)
- print(project_pattern.id)
+def addPatternToList(phaseList, log_search_patterns, uuid):
+ for phase in phaseList:
+ if uuid == '00000000-0000-0000-000000000000':
+ path = 'LogPattern'
else:
- if project_pattern.type == 'ignore':
- log_search_pattern['ignore'].append(get_pattern_dict(project_pattern))
- if project_pattern.type == 'test':
- log_search_pattern['test'].append(get_pattern_dict(project_pattern))
+ path = os.path.join('Project', uuid, 'LogPattern')
+ CatchIssueFile = os.path.join(path, 'CatchIssues' + phase + '.json')
+ CatchIssueList = getJsonFromFile(CatchIssueFile, phase)
+ i = 3
+ for catchissue in CatchIssueList:
+ search_pattern = get_pattern_dict(catchissue, i, uuid)
+ try:
+ re.compile(search_pattern['string'])
+ except re.error:
+ print(f"Non valid regex pattern in line: {str(search_pattern['line'])} String: {search_pattern['string']} Project: {search_pattern['uuid']} Phase: {phase}")
else:
- log_search_pattern['default'].append(get_pattern_dict(project_pattern))
- return log_search_pattern
+ log_search_patterns[phase].append(search_pattern)
+ i = i + 1
+ return log_search_patterns
-def get_log_search_pattern(Session, uuid, default_uuid):
+def get_log_search_patterns(uuid):
+ path = os.path.join('LogPattern', 'PhaseList.json')
+ PhaseList = getJsonFromFile(path,'PhaseList')
# get pattern from the projects and add that to log_search_pattern
- log_search_pattern = {}
- log_search_pattern['ignore'] = []
- log_search_pattern['default'] = []
- log_search_pattern['test'] = []
- log_search_pattern = addPatternToList(Session, log_search_pattern, uuid)
- log_search_pattern = addPatternToList(Session, log_search_pattern, default_uuid)
- return log_search_pattern
+ log_search_patterns = {}
+ for phase in PhaseList:
+ log_search_patterns[phase] = []
+ uuid_default = '00000000-0000-0000-000000000000'
+ log_search_patterns = addPatternToList(PhaseList, log_search_patterns, uuid_default)
+ #log_search_pattern = addPatternToList(PhaseList, log_search_pattern, uuid)
+ return log_search_patterns
-def get_search_pattern_match(log_search_pattern, text_line):
- match_list = []
- for search_pattern in log_search_pattern:
- if re.search(search_pattern['search'], text_line):
- match_list.append(search_pattern)
- return match_list
+def get_search_pattern_match(search_pattern, text_line):
+ #print(f"Text: {text_line}")
+ if search_pattern['search_type'] == 'search':
+ if re.search(search_pattern['string'], text_line):
+ #print(f"Match string: {search_pattern['string']} Type: {search_pattern['search_type']}")
+ return True
+ elif search_pattern['search_type'] == 'startswith':
+ if text_line.startswith(search_pattern['string']):
+ #print(f"Match string: {search_pattern['string']} Type: {search_pattern['search_type']}")
+ return True
+ elif search_pattern['search_type'] == 'endswith':
+ if text_line.endswith(search_pattern['string']):
+ #print(f"Match string: {search_pattern['string']} Type: {search_pattern['search_type']}")
+ return True
+ elif search_pattern['search_type'] == 'in':
+ if search_pattern['string'] in text_line:
+ #print(f"Match string: {search_pattern['string']} Type: {search_pattern['search_type']}")
+ return True
+ else:
+ return False
-def search_buildlog(log_search_pattern, text_line, index):
+def search_buildlog(log_search_patterns, text_line, index):
summary = {}
- #FIXME: add check for test
- # don't log ignore lines
- if get_search_pattern_match(log_search_pattern['ignore'], text_line) != []:
- return False
- # search default pattern
summary[index] = {
'text' : text_line,
'pattern_info' : [],
}
- search_pattern_match_list = get_search_pattern_match(log_search_pattern['default'], text_line)
- if search_pattern_match_list != []:
- for search_pattern_match in search_pattern_match_list:
- summary[index]['pattern_info'].append({
- 'type' : search_pattern_match['type'],
- 'status' : search_pattern_match['status'],
- 'id' : search_pattern_match['id'],
- 'search_pattern' : search_pattern_match['search'],
- })
- # we add all line that start with ' * ' or '>>>' as info
- if text_line.startswith(' * ') or text_line.startswith('>>>'):
- summary[index]['pattern_info'].append({
- 'type' : 'info',
- 'status' : 'info',
- 'id' : 0,
- 'search_pattern' : 'auto',
- })
+ for phase, search_patterns in log_search_patterns.items():
+ for search_pattern in search_patterns:
+ match = get_search_pattern_match(search_pattern, text_line)
+ if phase == 'Ignore' and match:
+ return False
+ elif phase != 'Ignore' and match:
+ summary[index]['pattern_info'].append({
+ 'search_type' : search_pattern['search_type'],
+ 'status' : search_pattern['status'],
+ 'line' : search_pattern['line'],
+ 'search_pattern' : search_pattern['string'],
+ 'phase' : phase,
+ 'uuid' : search_pattern['uuid'],
+ 'url' : search_pattern['url'],
+ 'description' : search_pattern['description'],
+ })
if summary[index]['pattern_info'] != []:
+ #print(f"summary: {summary}")
return summary
return False
-def getConfigSettings():
- #configpath = os.getcwd()
- with open('logparser.json') as f:
- config = json.load(f)
- return config
-
-def getDBSession(args):
- engine = sa.create_engine(args.database)
- Session = sa.orm.sessionmaker(bind = engine)
- return Session()
-
-def getMultiprocessingPool(config):
- return Pool(processes = int(config['core']))
+def getMultiprocessingPool(args):
+ return Pool(processes = int(args.cpu))
def getJsonResult(results):
for r in results:
@@ -128,14 +128,11 @@ def getJsonResult(results):
def runLogParser(args):
index = 1
logfile_text_dict = {}
- config = getConfigSettings()
- Session = getDBSession(args)
- #mp_pool = getMultiprocessingPool(config)
summary = {}
#NOTE: The patten is from https://github.com/toralf/tinderbox/tree/master/data files.
- # Is stored in a db instead of files.
- log_search_pattern = get_log_search_pattern(Session, args.uuid, config['default_uuid'])
- Session.close()
+ # Is stored in json files
+ # make dict with it
+ log_search_patterns = get_log_search_patterns(args.uuid)
# read the log file to dict
with open(args.file, encoding='utf8', errors='ignore') as f:
for text_line in f:
@@ -146,21 +143,28 @@ def runLogParser(args):
# index = index + 1
# run the search parse pattern on the text lines
#params = [(log_search_pattern, text, line_index,) for line_index, text in logfile_text_dict.items()]
- with getMultiprocessingPool(config) as pool:
- results = list(pool.apply_async(search_buildlog, args=(log_search_pattern, text, line_index,)) for line_index, text in logfile_text_dict.items())
- #results = pool.starmap(search_buildlog, params)
- getJsonResult(results)
- pool.close()
- pool.join()
+ if not args.debug:
+ with getMultiprocessingPool(args) as pool:
+ results = list(pool.apply_async(search_buildlog, args=(log_search_patterns, text, line_index,)) for line_index, text in logfile_text_dict.items())
+ getJsonResult(results)
+ pool.close()
+ pool.join()
+ else:
+ results = []
+ for line_index, text in logfile_text_dict.items():
+ results.append(search_buildlog(log_search_pattern, text, line_index))
+ #FIXME: Json output
+ #getJsonResult(results)
def main():
# get filename, project_uuid default_project_uuid
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", required=True)
parser.add_argument("-u", "--uuid", required=True)
- parser.add_argument("-e", "--default-uuid", required=False)
- parser.add_argument("-c", "--cpu", required=False)
- parser.add_argument("-d", "--database", required=True)
+ parser.add_argument("-c", "--cpu", required=True)
+ #FIXME: add If args.debug .... wear is needed
+ parser.add_argument("-d", "--debug", action="store_true", required=False)
+
args = parser.parse_args()
runLogParser(args)
sys.exit()
^ permalink raw reply related [flat|nested] 3+ messages in thread
* [gentoo-commits] proj/tinderbox-cluster:master commit in: buildbot_gentoo_ci/steps/, py/
@ 2024-02-22 23:58 Magnus Granberg
0 siblings, 0 replies; 3+ messages in thread
From: Magnus Granberg @ 2024-02-22 23:58 UTC (permalink / raw
To: gentoo-commits
commit: 92b796f7c2580f9bd93a225072d56cbf8f13deaa
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Thu Feb 22 23:58:06 2024 +0000
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Thu Feb 22 23:58:06 2024 +0000
URL: https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=92b796f7
Fix phase_error and pattern_infos
Signed-off-by: Magnus Granberg <zorry <AT> gentoo.org>
buildbot_gentoo_ci/steps/logs.py | 14 ++++++++++----
py/log_parser.py | 4 ++--
2 files changed, 12 insertions(+), 6 deletions(-)
diff --git a/buildbot_gentoo_ci/steps/logs.py b/buildbot_gentoo_ci/steps/logs.py
index 50e3f3f..aacb8a5 100644
--- a/buildbot_gentoo_ci/steps/logs.py
+++ b/buildbot_gentoo_ci/steps/logs.py
@@ -46,9 +46,10 @@ def PersOutputOfLogParser(rc, stdout, stderr):
'status' : s['status'],
'line' : s['line'],
'search_pattern' : s['search_pattern'],
- 'phase' : s['phase'],
+ 'phase' : s['phase'].lower(),
'uuid' : s['uuid'],
'description' : s['description'],
+ 'url' : s['url'],
})
build_summery_output['summary_log_dict'] = summary_log_dict
#FIXME: Handling of stderr output
@@ -221,7 +222,7 @@ class MakeIssue(BuildStep):
for k, v in sorted(self.summary_log_dict.items()):
# get the issue error
for s in v['pattern_infos']:
- if s['search_type'] == self.error_dict['phase'] and s['status'] == 'error':
+ if s['phase'] == self.error_dict['phase'] and s['status'] == 'error':
text_issue_list.append(v['text'])
# add the issue error
self.error_dict['cpv'] = self.getProperty('log_cpv')
@@ -259,9 +260,14 @@ class MakeIssue(BuildStep):
# check if the build did fail
if v['text'].startswith(' * ERROR:') and v['text'].endswith(' phase):'):
# get phase error
- phase_error = v['text'].split(' (')[1].split(' phase')[0]
- self.error_dict['phase'] = phase_error
+ self.error_dict['phase'] = v['text'].split(' (')[1].split(' phase')[0]
error = True
+ if v['text'].startswith(' * Maintainer:'):
+ Maintainers = []
+ for email in v['text'].split(':')[1].split(' '):
+ if email != '':
+ Maintainers.append(email)
+ self.setProperty("Maintainers", Maintainers, 'Maintainers')
#FIXME: write summary_log_list to a file
# add issue/bug/pr report
if error:
diff --git a/py/log_parser.py b/py/log_parser.py
index e3725bb..e8f319b 100644
--- a/py/log_parser.py
+++ b/py/log_parser.py
@@ -28,8 +28,8 @@ def get_pattern_dict(catchissue, i, uuid):
patten_dict['end'] = catchissue[2]
patten_dict['status'] = catchissue[3]
patten_dict['search_type'] = catchissue[4]
- patten_dict['url'] = catchissue[5]
- patten_dict['description'] = catchissue[6]
+ patten_dict['url'] = catchissue[6]
+ patten_dict['description'] = catchissue[7]
return patten_dict
def addPatternToList(phaseList, log_search_patterns, uuid):
^ permalink raw reply related [flat|nested] 3+ messages in thread
end of thread, other threads:[~2024-02-22 23:58 UTC | newest]
Thread overview: 3+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2023-02-21 1:40 [gentoo-commits] proj/tinderbox-cluster:master commit in: buildbot_gentoo_ci/steps/, py/ Magnus Granberg
-- strict thread matches above, loose matches on Subject: below --
2024-02-21 21:16 Magnus Granberg
2024-02-22 23:58 Magnus Granberg
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox