From c5d1a135763f8c3019d1310444d2596ffa193932 Mon Sep 17 00:00:00 2001 From: Marcelo Robert Santos Date: Wed, 22 Apr 2026 11:37:51 -0300 Subject: [PATCH 1/4] refactor: ruff auto-formatting Running ruff auto-formatting and auto-linting --- regzbot/__init__.py | 2812 ++++++++++++++++++++++-------- regzbot/_rbcmd.py | 168 +- regzbot/_repsources/_bugzilla.py | 269 ++- regzbot/_repsources/_github.py | 172 +- regzbot/_repsources/_gitlab.py | 227 ++- regzbot/_repsources/_lore.py | 153 +- regzbot/_repsources/_trackers.py | 34 +- regzbot/commandl.py | 36 +- regzbot/export_csv.py | 87 +- regzbot/export_mail.py | 335 ++-- regzbot/export_web.py | 727 +++++--- regzbot/testing.py | 30 +- regzbot/testing_offline.py | 1292 +++++++++----- regzbot/testing_online.py | 24 +- regzbot/testing_trackers.py | 79 +- 15 files changed, 4584 insertions(+), 1861 deletions(-) diff --git a/regzbot/__init__.py b/regzbot/__init__.py index 7ab2a14..dff5526 100644 --- a/regzbot/__init__.py +++ b/regzbot/__init__.py @@ -55,7 +55,9 @@ def getby_commit_header(cls, content): @classmethod def getby_content(cls, content, subject=None): def checkfor_diff(content): - if re.search(r'^\-\-\- .*\n\+\+\+.*\n@@', content, re.MULTILINE | re.DOTALL): + if re.search( + r'^\-\-\- .*\n\+\+\+.*\n@@', content, re.MULTILINE | re.DOTALL + ): return PatchKind.DIFF return 0 @@ -74,14 +76,14 @@ def checkfor_subject(content, subject): return patchkind -class RegzbotDbMeta(): +class RegzbotDbMeta: def db_create(version, dbcursor): logger.debug('Initializing new dbtable "RegzbotMeta"') - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE RegzbotMeta ( name TEXT UNIQUE, version INTEGER - )''') + )""") @staticmethod def init(databasedir): @@ -105,7 +107,9 @@ def table_exists(tablename, dbcursor=None): if dbcursor is None: dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND name=(?)", (tablename, )).fetchone() + "SELECT name FROM sqlite_master WHERE type='table' AND name=(?)", + (tablename,), + ).fetchone() if dbresult: return True return False @@ -114,27 +118,31 @@ def table_exists(tablename, dbcursor=None): def set_tableversion(tablename, version, dbcursor=None): if dbcursor is None: dbcursor = DBCON.cursor() - dbcursor.execute(''' + dbcursor.execute( + """ INSERT INTO RegzbotMeta - VALUES(?, ?)''', (tablename, version)) + VALUES(?, ?)""", + (tablename, version), + ) -class RegzbotState(): +class RegzbotState: @staticmethod def db_create(version, dbcursor): logger.debug('Initializing new dbtable "RegzbotState"') - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE RegzbotState ( attribute TEXT UNIQUE, value STRING - )''') + )""") @staticmethod def get(attribute, dbcursor=None): if dbcursor is None: dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT value FROM RegzbotState WHERE attribute=(?)', (attribute, )).fetchone() + 'SELECT value FROM RegzbotState WHERE attribute=(?)', (attribute,) + ).fetchone() if dbresult: return dbresult[0] return False @@ -143,12 +151,15 @@ def get(attribute, dbcursor=None): def set(attribute, value, dbcursor=None): if dbcursor is None: dbcursor = DBCON.cursor() - dbcursor.execute(''' + dbcursor.execute( + """ INSERT OR REPLACE INTO RegzbotState - VALUES(?, ?)''', (attribute, value)) + VALUES(?, ?)""", + (attribute, value), + ) -class RecordProcessedMsgids(): +class RecordProcessedMsgids: def __init__(self, msgid, gmtime): self.msgid = msgid self.gmtime = gmtime @@ -157,23 +168,24 @@ def __init__(self, msgid, gmtime): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "msgidrecord"') RegzbotDbMeta.set_tableversion('msgidrecord', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE msgidrecord ( msgid STRING NOT NULL PRIMARY KEY, gmtime INTEGER NOT NULL - )''') + )""") @staticmethod def add(msgid, gmtime, dbcursor=None): if dbcursor is None: dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO msgidrecord + dbcursor.execute( + """INSERT INTO msgidrecord (msgid, gmtime) - VALUES (?, ?)''', - (msgid, gmtime)) - logger.debug( - '[db msgidrecord] insert (msgid:%s, gmtime:%s)', msgid, gmtime) + VALUES (?, ?)""", + (msgid, gmtime), + ) + logger.debug('[db msgidrecord] insert (msgid:%s, gmtime:%s)', msgid, gmtime) @staticmethod def check_presence(msgid, gmtime=None, dbcursor=None): @@ -181,7 +193,8 @@ def check_presence(msgid, gmtime=None, dbcursor=None): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM msgidrecord WHERE msgid=(?)', (msgid, )).fetchone() + 'SELECT * FROM msgidrecord WHERE msgid=(?)', (msgid,) + ).fetchone() if dbresult: return True elif gmtime: @@ -193,25 +206,29 @@ def check_presence(msgid, gmtime=None, dbcursor=None): def delete(msgid): dbcursor = DBCON.cursor() if RecordProcessedMsgids.check_presence(msgid, dbcursor=dbcursor): - dbcursor.execute('''DELETE FROM msgidrecord - WHERE msgid=(?)''', - (msgid, )) - logger.debug( - '[db msgidrecord] removed msgid: %s', msgid) + dbcursor.execute( + """DELETE FROM msgidrecord + WHERE msgid=(?)""", + (msgid,), + ) + logger.debug('[db msgidrecord] removed msgid: %s', msgid) @staticmethod def cleanup(cutoff_days): dbcursor = DBCON.cursor() - cutoff_gmtime = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) - (cutoff_days * 86400) - dbcursor.execute('''DELETE FROM msgidrecord - WHERE gmtime < (?)''', - (cutoff_gmtime, )) + cutoff_gmtime = int( + datetime.datetime.now(datetime.timezone.utc).timestamp() + ) - (cutoff_days * 86400) + dbcursor.execute( + """DELETE FROM msgidrecord + WHERE gmtime < (?)""", + (cutoff_gmtime,), + ) if dbcursor.rowcount > 0: - logger.debug( - '[db msgidrecord] removed %s stale entries', dbcursor.rowcount) + logger.debug('[db msgidrecord] removed %s stale entries', dbcursor.rowcount) -class GitBranch(): +class GitBranch: def __init__(self, gitbranchid, gittreeid, name, lastchked): self.gitbranchid = gitbranchid self.gittreeid = gittreeid @@ -223,24 +240,28 @@ def __init__(self, gitbranchid, gittreeid, name, lastchked): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "gitbranches"') RegzbotDbMeta.set_tableversion('gitbranches', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE gitbranches ( gitbranchid INTEGER NOT NULL PRIMARY KEY, gittreeid INTEGER NOT NULL, name STRING NOT NULL, lastchked STRING - )''') + )""") @staticmethod def add(gittree, branchname, lastchked): branchname = branchname.removeprefix('origin/') dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO gitbranches + dbcursor.execute( + """INSERT INTO gitbranches (gittreeid, name, lastchked) - VALUES (?, ?, ?)''', - (gittree.gittreeid, branchname, lastchked)) - logger.debug('[db gitbranches] insert (gitbranchid:%s, gittreeid:%s, branchname:%s, lastchked:%s)' % ( - dbcursor.lastrowid, gittree.gittreeid, branchname, lastchked)) + VALUES (?, ?, ?)""", + (gittree.gittreeid, branchname, lastchked), + ) + logger.debug( + '[db gitbranches] insert (gitbranchid:%s, gittreeid:%s, branchname:%s, lastchked:%s)' + % (dbcursor.lastrowid, gittree.gittreeid, branchname, lastchked) + ) return dbcursor.lastrowid def commit_exists(self, identifier, repo=None, gittree=None): @@ -253,19 +274,17 @@ def commit_exists(self, identifier, repo=None, gittree=None): try: # reminder: just relying on the exception is not enough here, as it will *not* fire # if the commit exists in the tree, but in another branch :-/ - result = repo.git.branch( - self.lookupname, '--all', '--contains', identifier) + result = repo.git.branch(self.lookupname, '--all', '--contains', identifier) if gittree.name == 'next': # the commit or tag seems to be present, but not in the current branch -- but we do not care about that return True if result: return True except git.exc.GitCommandError as err: - output = err.args[2].decode("utf-8") - ignored = {'error: malformed object name', - 'error: no such commit'} + output = err.args[2].decode('utf-8') + ignored = {'error: malformed object name', 'error: no such commit'} if not any(x in output for x in ignored): - logger.critical("GitCommandError: {0}".format(err)) + logger.critical('GitCommandError: {0}'.format(err)) logger.critical(err.args) return False @@ -273,13 +292,14 @@ def describe(self, gittreename): if self.name == 'master' or self.name == 'main': return gittreename else: - return "%s/%s" % (gittreename, self.name) + return '%s/%s' % (gittreename, self.name) @staticmethod def get_by_id(gitbranchid): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM gitbranches WHERE gitbranchid=(?)', (gitbranchid, )).fetchone() + 'SELECT * FROM gitbranches WHERE gitbranchid=(?)', (gitbranchid,) + ).fetchone() if dbresult: return GitBranch(*dbresult) return None @@ -292,7 +312,9 @@ def get_by_treeid_branchname(gittreeid, name): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM gitbranches WHERE gittreeid=(?) AND name=(?)', (gittreeid, name)).fetchone() + 'SELECT * FROM gitbranches WHERE gittreeid=(?) AND name=(?)', + (gittreeid, name), + ).fetchone() if dbresult: return GitBranch(*dbresult) @@ -307,7 +329,9 @@ def getall(order='gittreeid gitbranchid'): @staticmethod def getall_by_gittreeid(gittreeid): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT * FROM gitbranches WHERE gittreeid=(?)', (gittreeid,)): + for dbresult in dbcursor.execute( + 'SELECT * FROM gitbranches WHERE gittreeid=(?)', (gittreeid,) + ): yield GitBranch(*dbresult) def head_at_gmtime(self, gmtime, *, repo=None): @@ -316,11 +340,16 @@ def head_at_gmtime(self, gmtime, *, repo=None): repo = gittree.repo() try: - head = repo.git.rev_list('--first-parent', '--until="%s"' % gmtime, '-n 1', 'origin/%s' % self.name) + head = repo.git.rev_list( + '--first-parent', + '--until="%s"' % gmtime, + '-n 1', + 'origin/%s' % self.name, + ) return repo.commit(head) except git.exc.GitCommandError as err: - errmsg = err.args[2].decode("utf-8") - print("GitCommandError: {0}".format(errmsg)) + errmsg = err.args[2].decode('utf-8') + print('GitCommandError: {0}'.format(errmsg)) print(err.args) return None @@ -332,7 +361,9 @@ def is_abandoned(self, repo=None): gittree = GitTree.get_by_id(self.gittreeid) repo = gittree.repo() - date_offset = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) - 86400 * 63 + date_offset = ( + int(datetime.datetime.now(datetime.timezone.utc).timestamp()) - 86400 * 63 + ) date_head = repo.commit(self.lookupname).committed_date if date_head < date_offset: return True @@ -349,9 +380,11 @@ def get_date(repo, hexsha): try: # inspired by https://stackoverflow.com/a/20615706 ancestry_path = repo.git.rev_list( - '--ancestry-path', "%s..origin/%s" % (hexsha, self.name)).splitlines() + '--ancestry-path', '%s..origin/%s' % (hexsha, self.name) + ).splitlines() first_parent = repo.git.rev_list( - '--first-parent', "%s..origin/%s" % (hexsha, self.name)).splitlines() + '--first-parent', '%s..origin/%s' % (hexsha, self.name) + ).splitlines() # committed directly if len(ancestry_path) == 0: @@ -362,8 +395,8 @@ def get_date(repo, hexsha): if commit in first_parent: return get_date(repo, commit) except git.exc.GitCommandError as err: - errmsg = err.args[2].decode("utf-8") - logger.critical("GitCommandError: {0}".format(errmsg)) + errmsg = err.args[2].decode('utf-8') + logger.critical('GitCommandError: {0}'.format(errmsg)) logger.critical(err.args) return None @@ -388,7 +421,10 @@ def subject_exists(self, subject, gittree=None, repo=None): else: if gittree.name != 'mainline': logger.warning( - 'GitBranch.subject_exists(): could not find a merge base for the tree %s branch %s', gittree.name, self.name) + 'GitBranch.subject_exists(): could not find a merge base for the tree %s branch %s', + gittree.name, + self.name, + ) iterrange = self.lookupname # now search for a commit with the subject @@ -409,11 +445,13 @@ def url_by_id(gitbranchid, entry): def set_lastchked(self, lastchked): dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE gitbranches SET lastchked = (?) WHERE gitbranchid=(?)''', - (lastchked, self.gitbranchid)) + dbcursor.execute( + """UPDATE gitbranches SET lastchked = (?) WHERE gitbranchid=(?)""", + (lastchked, self.gitbranchid), + ) -class GitTree(): +class GitTree: def __init__(self, gittreeid, name, server, kind, weburl, branchregex, priority): self.gittreeid = gittreeid self.name = name @@ -428,7 +466,7 @@ def __init__(self, gittreeid, name, server, kind, weburl, branchregex, priority) def db_create(version, dbcursor): logger.debug('Initializing new dbtable "gittrees"') RegzbotDbMeta.set_tableversion('gittrees', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE gittrees ( gittreeid INTEGER NOT NULL PRIMARY KEY, name STRING NOT NULL, @@ -437,17 +475,21 @@ def db_create(version, dbcursor): weburl STRING NOT NULL, branchregex STRING NOT NULL, priority INTEGER NOT NULL - )''') + )""") @staticmethod def add(name, server, kind, weburl, branchregex, priority): dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO gittrees + dbcursor.execute( + """INSERT INTO gittrees (name, server, kind, weburl, branchregex, priority) - VALUES (?, ?, ?, ?, ?, ?)''', - (name, server, kind, weburl, branchregex, priority)) - logger.debug('[db gittrees] insert (gittreeid:%s, name:%s, server:%s, kind:%s, weburl:%s, branchregex:%s, priority: %s)' % ( - dbcursor.lastrowid, name, server, kind, weburl, branchregex, priority)) + VALUES (?, ?, ?, ?, ?, ?)""", + (name, server, kind, weburl, branchregex, priority), + ) + logger.debug( + '[db gittrees] insert (gittreeid:%s, name:%s, server:%s, kind:%s, weburl:%s, branchregex:%s, priority: %s)' + % (dbcursor.lastrowid, name, server, kind, weburl, branchregex, priority) + ) return dbcursor.lastrowid def commit(self, hexsha): @@ -473,13 +515,13 @@ def commit_describe(self, identifier, contains): result = re.sub('-[0-9]+-g[0-9,a-f]+$', '', result) return result, True except git.exc.GitCommandError as err: - output = err.args[2].decode("utf-8") + output = err.args[2].decode('utf-8') if 'fatal: cannot describe' in output: # commit present, but unabled to describe, as since then no commit was tagged return None, True - ignored = ('error: malformed object name') + ignored = 'error: malformed object name' if not any(x in output for x in ignored): - logger.critical("GitCommandError: {0}".format(err)) + logger.critical('GitCommandError: {0}'.format(err)) logger.critical(err.args) return None, None @@ -505,14 +547,25 @@ def commit_find_new(hexsha=None, subject=None, ascending=True): repo = gittree.repo() for gitbranch in GitBranch.getall_by_gittreeid(gittree.gittreeid): if gitbranch.is_abandoned(): - logger.debug("gittree, %s, %s: branch abandoned, skipping lookup", gittree.name, gitbranch.name) + logger.debug( + 'gittree, %s, %s: branch abandoned, skipping lookup', + gittree.name, + gitbranch.name, + ) continue if hexsha and gitbranch.commit_exists(hexsha, repo): yield gittree, gitbranch, hexsha continue if subject: - logger.debug("gittree, %s, %s: searching for subject '%s'", gittree.name, gitbranch.name, subject) - hexsha = gitbranch.subject_exists(subject, gittree=gittree, repo=repo) + logger.debug( + "gittree, %s, %s: searching for subject '%s'", + gittree.name, + gitbranch.name, + subject, + ) + hexsha = gitbranch.subject_exists( + subject, gittree=gittree, repo=repo + ) if hexsha: yield gittree, gitbranch, hexsha continue @@ -534,7 +587,8 @@ def check_latest_versions(repo=None): gittree = GitTree.get_by_name('mainline') if not gittree: logger.critical( - "Unable to determine current and next version, as it's determined from a gittree with the name 'mainline', which could not be found.") + "Unable to determine current and next version, as it's determined from a gittree with the name 'mainline', which could not be found." + ) return False repo = gittree.repo() @@ -545,13 +599,14 @@ def check_latest_versions(repo=None): 'previous': None, } - re_expectedtags = re.compile( - r'^(v[0-9]+\.[0-9]+)(-rc[0-9]+)*(-dontuse)*$') + re_expectedtags = re.compile(r'^(v[0-9]+\.[0-9]+)(-rc[0-9]+)*(-dontuse)*$') for line in repo.git.tag('--sort=-creatordate').splitlines(): match = re_expectedtags.search(line) if match is None: logger.critical( - "aborting: encountered a tag that doesn't follow the expected pattern ('%s')" % line) + "aborting: encountered a tag that doesn't follow the expected pattern ('%s')" + % line + ) sys.exit(1) if match.group(2): @@ -571,12 +626,16 @@ def check_latest_versions(repo=None): LATEST_VERSIONS['previous'] = match.group(1) break logger.critical( - "Unable to determine current and next version, could not find expected tags") + 'Unable to determine current and next version, could not find expected tags' + ) return False logger.debug( "'next' is now '%s', 'latest' is now '%s', and 'previous' is now '%s'", - LATEST_VERSIONS['indevelopment'], LATEST_VERSIONS['latest'], LATEST_VERSIONS['previous']) + LATEST_VERSIONS['indevelopment'], + LATEST_VERSIONS['latest'], + LATEST_VERSIONS['previous'], + ) @staticmethod def getall(FIXME=''): @@ -588,7 +647,8 @@ def getall(FIXME=''): def get_by_id(gittreeid): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM gittrees WHERE gittreeid=(?)', (gittreeid, )).fetchone() + 'SELECT * FROM gittrees WHERE gittreeid=(?)', (gittreeid,) + ).fetchone() if dbresult: return GitTree(*dbresult) return None @@ -597,7 +657,8 @@ def get_by_id(gittreeid): def get_by_name(treename): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM gittrees WHERE name=(?)', (treename, )).fetchone() + 'SELECT * FROM gittrees WHERE name=(?)', (treename,) + ).fetchone() if dbresult: return GitTree(*dbresult) return None @@ -610,7 +671,9 @@ def greplogmsgs(self, pattern): since = "--since='Aug 15 0:0:0 UTC 2010'" try: - for result in repo.git.log('--pretty=%H', since, '--all', '--grep=%s' % pattern).splitlines(): + for result in repo.git.log( + '--pretty=%H', since, '--all', '--grep=%s' % pattern + ).splitlines(): yield result except Exception: return @@ -630,36 +693,57 @@ def getregression(regression, regid): regressionfull = None for gittree in cls.getall(FIXME='ORDER BY priority ASC'): - searchprefix = "\(Link\|Closes\):" + searchprefix = '\(Link\|Closes\):' searchmain = repsrc.get_searchpattern() - searchstring = "%s.*%s" % (searchprefix, searchmain) - logger.debug("[GitTree] Trying to find '%s' in gittree %s", searchstring, gittree.name) + searchstring = '%s.*%s' % (searchprefix, searchmain) + logger.debug( + "[GitTree] Trying to find '%s' in gittree %s", + searchstring, + gittree.name, + ) for commit_hexsha in gittree.greplogmsgs(searchstring): for gitbranch in GitBranch.getall_by_gittreeid(gittree.gittreeid): - logger.debug("[GitTree] Found '%s' in this tree, thus checking branch '%s' now" % - (searchstring, gitbranch.describe(gittree.name))) + logger.debug( + "[GitTree] Found '%s' in this tree, thus checking branch '%s' now" + % (searchstring, gitbranch.describe(gittree.name)) + ) if gitbranch.commit_exists(commit_hexsha, repo=gittree.repo()): - logger.debug("[GitTree] Found %s in %s", commit_hexsha, gitbranch.describe(gittree.name)) + logger.debug( + '[GitTree] Found %s in %s', + commit_hexsha, + gitbranch.describe(gittree.name), + ) commit = gittree.commit(commit_hexsha) - getregression(regressionfull, regression.regid).commitmention(gittree, gitbranch, commit) + getregression(regressionfull, regression.regid).commitmention( + gittree, gitbranch, commit + ) - if '..' in regression.introduced \ - or len(regression.introduced) < 11: + if '..' in regression.introduced or len(regression.introduced) < 11: # we don't need to search for those continue - searchstring = "Fixes: %s" % regression.introduced[0:12] - logger.debug("[GitTree] Trying to find '%s' in gittree %s", searchstring, gittree.name) + searchstring = 'Fixes: %s' % regression.introduced[0:12] + logger.debug( + "[GitTree] Trying to find '%s' in gittree %s", + searchstring, + gittree.name, + ) for commit_hexsha in gittree.greplogmsgs(searchstring): for gitbranch in GitBranch.getall_by_gittreeid(gittree.gittreeid): - logger.debug("[GitTree] Found '%s' in this tree, thus checking branch '%s' now" % - (searchstring, gitbranch.describe(gittree.name))) + logger.debug( + "[GitTree] Found '%s' in this tree, thus checking branch '%s' now" + % (searchstring, gitbranch.describe(gittree.name)) + ) if gitbranch.commit_exists(commit_hexsha, repo=gittree.repo()): if RegHistory.present(commit_hexsha, regid=regression.regid): # no need to add a second entry for commits that already were noticed as related, # for example if this msg that already has a Link: to this regression continue - logger.debug("[GitTree] Found %s in %s", commit_hexsha, gitbranch.describe(gittree.name)) + logger.debug( + '[GitTree] Found %s in %s', + commit_hexsha, + gitbranch.describe(gittree.name), + ) commit = gittree.commit(commit_hexsha) mergedate = gitbranch.merge_date(commit.hexsha, gittree.repo()) if gmtime and gmtime > mergedate: @@ -667,10 +751,16 @@ def getregression(regression, regid): mergedate = gmtime + 1 # no activity, only a history entry, as it might be about different bug in the same commit - RegHistory.event(regression.regid, mergedate, commit.hexsha, commit.summary, '%s' % commit.author, - gitbranchid=gitbranch.gitbranchid, - regzbotcmd="note: '%s' in '%s' contains a 'Fixes:' tag for the culprit of this regression" - % (commit.hexsha[0:12], gitbranch.describe(gittree.name))) + RegHistory.event( + regression.regid, + mergedate, + commit.hexsha, + commit.summary, + '%s' % commit.author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd="note: '%s' in '%s' contains a 'Fixes:' tag for the culprit of this regression" + % (commit.hexsha[0:12], gitbranch.describe(gittree.name)), + ) def update(self): # update @@ -686,7 +776,8 @@ def update(self): continue gitbranch = GitBranch.get_by_treeid_branchname( - self.gittreeid, repobranch.name) + self.gittreeid, repobranch.name + ) # if we encounter this branch for the first time, start to track it # Note: we'll miss the first batch of commits if this is a new stable branch – but @@ -698,8 +789,7 @@ def update(self): # if nothing changed, there is nothing to do for us here if gitbranch.lastchked == repobranch.commit.hexsha: - logger.debug("nothing new in %s/%s ", - self.name, gitbranch.name) + logger.debug('nothing new in %s/%s ', self.name, gitbranch.name) continue # if this is mainline repo, update the latest versions variable @@ -712,12 +802,20 @@ def update(self): # now check new commits for links re_link = re.compile( - r'(^\s*Link:\s*|^\s*Closes:\s*)(http.*?)(\s.*)?\n', re.MULTILINE) - for commit in repo.iter_commits(('--reverse', gitbranch.lastchked + '..' + repobranch.commit.hexsha)): + r'(^\s*Link:\s*|^\s*Closes:\s*)(http.*?)(\s.*)?\n', re.MULTILINE + ) + for commit in repo.iter_commits( + ('--reverse', gitbranch.lastchked + '..' + repobranch.commit.hexsha) + ): # is this a commit we are waiting for? for expected_fix in expected_fixes: - if (expected_fix['solved_entry'] and commit.hexsha.startswith(expected_fix['solved_entry'])) \ - or (expected_fix['solved_subject'] and commit.summary == expected_fix['solved_subject']): + if ( + expected_fix['solved_entry'] + and commit.hexsha.startswith(expected_fix['solved_entry']) + ) or ( + expected_fix['solved_subject'] + and commit.summary == expected_fix['solved_subject'] + ): regression = RegressionBasic.get_by_regid(expected_fix['regid']) if regression.fixedby_found(self, gitbranch, commit): # this was fixed, no need to look closer at the commit @@ -731,33 +829,50 @@ def update(self): regression = None if not regression: logger.debug( - "Saw link to %s, but not aware of any regressions about it", match.group(2)) + 'Saw link to %s, but not aware of any regressions about it', + match.group(2), + ) else: regression.commitmention(self, gitbranch, commit) # now check if this commit contains a Fixed: tag that mentions a commit known to cause a regression - for match in re.finditer('^(Fixes: )([0-9,a-f]{12})( )', commit.message, re.MULTILINE): + for match in re.finditer( + '^(Fixes: )([0-9,a-f]{12})( )', commit.message, re.MULTILINE + ): # only fill this now, as we only need it if we found a Fixes: tag if len(open_regressions) == 0: for regression in RegressionBasic.get_all(only_unsolved=True): if '..' not in regression.introduced: - open_regressions[regression.regid] = regression.introduced[0:12] + open_regressions[regression.regid] = ( + regression.introduced[0:12] + ) - if not match.group(2) in open_regressions.values(): + if match.group(2) not in open_regressions.values(): continue for regid in open_regressions.keys(): if not open_regressions[regid] == match.group(2): continue - if RegHistory.present(commit.hexsha, regid=regid, gitbranchid=gitbranch.gitbranchid): + if RegHistory.present( + commit.hexsha, + regid=regid, + gitbranchid=gitbranch.gitbranchid, + ): # no need to add a second entry for commits that already were noticed as related, # for example if this msg that already has a Link: to this regression continue # no activity, only a history entry, as it might be about different bug in the same commit mergedate = gitbranch.merge_date(commit.hexsha, self.repo()) - RegHistory.event(regid, mergedate, commit.hexsha, commit.summary, '%s' % commit.author, - gitbranchid=gitbranch.gitbranchid, regzbotcmd="note: '%s' in '%s' contains a 'Fixes:' tag for the culprit of this regression" - % (commit.hexsha[0:12], gitbranch.describe(self.name))) + RegHistory.event( + regid, + mergedate, + commit.hexsha, + commit.summary, + '%s' % commit.author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd="note: '%s' in '%s' contains a 'Fixes:' tag for the culprit of this regression" + % (commit.hexsha[0:12], gitbranch.describe(self.name)), + ) # and we are done here gitbranch.set_lastchked(repobranch.commit.hexsha) @@ -768,8 +883,19 @@ def updateall(): gittree.update() -class RegActivityMonitor(): - def __init__(self, actimonid, regid, repsrcid, entry, gmtime, subject, authorname, authormail, lastchk): +class RegActivityMonitor: + def __init__( + self, + actimonid, + regid, + repsrcid, + entry, + gmtime, + subject, + authorname, + authormail, + lastchk, + ): self.actimonid = actimonid self.regid = regid self.repsrcid = repsrcid @@ -812,7 +938,7 @@ def repsrc(self): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "actmonitor"') RegzbotDbMeta.set_tableversion('actmonitor', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE actmonitor ( actimonid INTEGER NOT NULL PRIMARY KEY, regid INTEGER NOT NULL, @@ -823,18 +949,31 @@ def db_create(version, dbcursor): authorname STRING, authormail STRING, lastchk INTEGER - )''') + )""") @staticmethod def add(regid, repsrcid, entry, gmtime, subject, authorname, authormail): dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO actmonitor + dbcursor.execute( + """INSERT INTO actmonitor (regid, repsrcid, entry, gmtime, subject, authorname, authormail) - VALUES (?, ?, ?, ?, ?, ?, ?)''', - (regid, repsrcid, entry, gmtime, subject, authorname, authormail)) + VALUES (?, ?, ?, ?, ?, ?, ?)""", + (regid, repsrcid, entry, gmtime, subject, authorname, authormail), + ) - logger.debug('[db actmonitor] inserting (actimonid:%s, regid:%s, repsrcid:%s, entry:%s, gmtime:%s, subject:%s, authorname:%s, authormail:%s)' % ( - dbcursor.lastrowid, regid, repsrcid, entry, gmtime, subject, authorname, authormail)) + logger.debug( + '[db actmonitor] inserting (actimonid:%s, regid:%s, repsrcid:%s, entry:%s, gmtime:%s, subject:%s, authorname:%s, authormail:%s)' + % ( + dbcursor.lastrowid, + regid, + repsrcid, + entry, + gmtime, + subject, + authorname, + authormail, + ) + ) return dbcursor.lastrowid @@ -846,23 +985,45 @@ def delete(self, dbcursor=None): for activity in RegActivityEvent.getall_by_actimonid(self.actimonid): activity.delete() - dbcursor.execute('''DELETE FROM actmonitor - WHERE actimonid=(?)''', - (self.actimonid, )) + dbcursor.execute( + """DELETE FROM actmonitor + WHERE actimonid=(?)""", + (self.actimonid,), + ) if dbcursor.rowcount > 0: - logger.debug('[db actmonitor] deleted (actimonid:%s, regid:%s, repsrcid:%s, entry:%s)', - self.actimonid, self.regid, self.repsrcid, self.entry) + logger.debug( + '[db actmonitor] deleted (actimonid:%s, regid:%s, repsrcid:%s, entry:%s)', + self.actimonid, + self.regid, + self.repsrcid, + self.entry, + ) else: - logger.critical('[db actmonitor] failed to deleted entry (actimonid:%s, regid:%s, repsrcid:%s, entry:%s;)', - self.actimonid, self.regid, self.repsrcid, self.entry) + logger.critical( + '[db actmonitor] failed to deleted entry (actimonid:%s, regid:%s, repsrcid:%s, entry:%s;)', + self.actimonid, + self.regid, + self.repsrcid, + self.entry, + ) def remove(self): dbcursor = DBCON.cursor() - dbcursor.execute('''DELETE FROM actmonitor - WHERE actimonid=(?)''', - (self.actimonid, )) - logger.debug('[db actmonitor] deleted (actimonid:%s, regid:%s, repsrcid:%s, entry:%s; %s)' % ( - self.actimonid, self.regid, self.repsrcid, self.entry, dbcursor.lastrowid)) + dbcursor.execute( + """DELETE FROM actmonitor + WHERE actimonid=(?)""", + (self.actimonid,), + ) + logger.debug( + '[db actmonitor] deleted (actimonid:%s, regid:%s, repsrcid:%s, entry:%s; %s)' + % ( + self.actimonid, + self.regid, + self.repsrcid, + self.entry, + dbcursor.lastrowid, + ) + ) RegActivityEvent.remove(actimonid=self.actimonid) return True @@ -870,7 +1031,8 @@ def remove(self): def get(actimonid): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM actmonitor WHERE actimonid=(?)', (actimonid, )).fetchone() + 'SELECT * FROM actmonitor WHERE actimonid=(?)', (actimonid,) + ).fetchone() if dbresult: return RegActivityMonitor(*dbresult) return None @@ -879,12 +1041,18 @@ def get(actimonid): def get_by_reg_n_reptrd(cls, regression, reptrd): dbcursor = DBCON.cursor() if reptrd.repsrc.kind == 'lore': - for dbresult in dbcursor.execute('SELECT * FROM actmonitor WHERE regid=(?) and entry=(?)', (regression.regid, reptrd.id)): + for dbresult in dbcursor.execute( + 'SELECT * FROM actmonitor WHERE regid=(?) and entry=(?)', + (regression.regid, reptrd.id), + ): regactmon = cls(*dbresult) if ReportSource.islore(regactmon.repsrcid): yield regactmon else: - for dbresult in dbcursor.execute('SELECT * FROM actmonitor WHERE regid=(?) AND repsrcid=(?) and entry=(?)', (regression.regid, reptrd.repsrc.id, reptrd.id)): + for dbresult in dbcursor.execute( + 'SELECT * FROM actmonitor WHERE regid=(?) AND repsrcid=(?) and entry=(?)', + (regression.regid, reptrd.repsrc.id, reptrd.id), + ): yield cls(*dbresult) @classmethod @@ -895,7 +1063,7 @@ def get_by_regid(cls, regid, reports=None): else: sqlquery = 'SELECT * FROM actmonitor WHERE regid=(?)' - for dbresult in dbcursor.execute(sqlquery, (regid, )): + for dbresult in dbcursor.execute(sqlquery, (regid,)): yield RegActivityMonitor(*dbresult) @classmethod @@ -907,20 +1075,25 @@ def getall_by_regid(cls, regid, reports=None): else: sqlquery = 'SELECT * FROM actmonitor WHERE regid=(?)' - for dbresult in dbcursor.execute(sqlquery, (regid, )): + for dbresult in dbcursor.execute(sqlquery, (regid,)): yield RegActivityMonitor(*dbresult) @staticmethod def get_by_entry(entry): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT * FROM actmonitor WHERE entry=(?)', (entry, )): + for dbresult in dbcursor.execute( + 'SELECT * FROM actmonitor WHERE entry=(?)', (entry,) + ): return RegActivityMonitor(*dbresult) @classmethod def get_by_reptrd(cls, reptrd): if reptrd.repsrc.kind != 'lore': dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT * FROM actmonitor WHERE repsrcid=(?) AND entry=(?)', (reptrd.repsrc.id, reptrd.id)): + for dbresult in dbcursor.execute( + 'SELECT * FROM actmonitor WHERE repsrcid=(?) AND entry=(?)', + (reptrd.repsrc.id, reptrd.id), + ): yield RegActivityMonitor(*dbresult) else: actimonids_found = [] @@ -939,7 +1112,9 @@ def get_by_reptrd(cls, reptrd): def get_by_regactivity(cls, entry): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT actmonitor.* FROM actmonitor INNER JOIN regactivity ON regactivity.actimonid = actmonitor.actimonid WHERE regactivity.entry=?', (entry,)).fetchone() + 'SELECT actmonitor.* FROM actmonitor INNER JOIN regactivity ON regactivity.actimonid = actmonitor.actimonid WHERE regactivity.entry=?', + (entry,), + ).fetchone() if dbresult: return cls(*dbresult) return None @@ -948,34 +1123,64 @@ def get_by_regactivity(cls, entry): def ismonitored(entry, regid=None, repsrcid=None): dbcursor = DBCON.cursor() if regid and repsrcid: - if dbcursor.execute('SELECT * FROM actmonitor WHERE regid=(?) AND repsrcid=(?) AND entry=(?)', (regid, repsrcid, entry)).fetchone() is not None: + if ( + dbcursor.execute( + 'SELECT * FROM actmonitor WHERE regid=(?) AND repsrcid=(?) AND entry=(?)', + (regid, repsrcid, entry), + ).fetchone() + is not None + ): return True else: - if dbcursor.execute('SELECT * FROM actmonitor WHERE entry=(?)', (entry, )).fetchone() is not None: + if ( + dbcursor.execute( + 'SELECT * FROM actmonitor WHERE entry=(?)', (entry,) + ).fetchone() + is not None + ): return True return False def update_author(self, authorname, authormail): dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE actmonitor + dbcursor.execute( + """UPDATE actmonitor SET authorname = (?), authormail = (?) - WHERE actimonid=(?)''', - (authorname, authormail, self.actimonid)) - logger.debug("[db_actmonitor] %s (regid %s): author is now '%s', authormail now '%s'.", - self.actimonid, self.regid, authorname, authormail) + WHERE actimonid=(?)""", + (authorname, authormail, self.actimonid), + ) + logger.debug( + "[db_actmonitor] %s (regid %s): author is now '%s', authormail now '%s'.", + self.actimonid, + self.regid, + authorname, + authormail, + ) self.authorname = authorname self.authormail = authormail -class RegActivityEvent(): +class RegActivityEvent: # reminder: can either get added directly or indirectly via RegActivityMonitor, # hence eiher _actimonid or _regid is set - DBCOLS = "regactivity.gmtime, regactivity.entry, regactivity.subentry, regactivity.subject, regactivity.author, regactivity.repsrcid, \ - regactivity.gitbranchid, regactivity.actimonid, regactivity.regid, regactivity.patchkind" - - def __init__(self, gmtime, entry, subentry, subject, author, repsrcid, gitbranchid, actimonid, regid, patchkind): + DBCOLS = 'regactivity.gmtime, regactivity.entry, regactivity.subentry, regactivity.subject, regactivity.author, regactivity.repsrcid, \ + regactivity.gitbranchid, regactivity.actimonid, regactivity.regid, regactivity.patchkind' + + def __init__( + self, + gmtime, + entry, + subentry, + subject, + author, + repsrcid, + gitbranchid, + actimonid, + regid, + patchkind, + ): self.gmtime = gmtime self.entry = entry self.subentry = subentry @@ -994,7 +1199,7 @@ def __init__(self, gmtime, entry, subentry, subject, author, repsrcid, gitbranch def db_create(version, dbcursor): logger.debug('Initializing new dbtable "regactivity"') RegzbotDbMeta.set_tableversion('regactivity', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE regactivity ( gmtime INTEGER NOT NULL, entry STRING NOT NULL, @@ -1006,7 +1211,7 @@ def db_create(version, dbcursor): regid INTEGER, patchkind INTEGER, subentry STRING - )''') + )""") def delete(self, dbcursor=None): if not dbcursor: @@ -1018,25 +1223,64 @@ def delete(self, dbcursor=None): # delete if self._actimonid: - dbcursor.execute('''DELETE FROM regactivity - WHERE gmtime=(?) AND entry=(?) AND subject=(?) AND actimonid=(?)''', - (self.gmtime, self.entry, self.subject, self._actimonid)) + dbcursor.execute( + """DELETE FROM regactivity + WHERE gmtime=(?) AND entry=(?) AND subject=(?) AND actimonid=(?)""", + (self.gmtime, self.entry, self.subject, self._actimonid), + ) elif self._regid: - dbcursor.execute('''DELETE FROM regactivity - WHERE gmtime=(?) AND entry=(?) AND subject=(?) AND regid=(?)''', - (self.gmtime, self.entry, self.subject, self._regid, )) + dbcursor.execute( + """DELETE FROM regactivity + WHERE gmtime=(?) AND entry=(?) AND subject=(?) AND regid=(?)""", + ( + self.gmtime, + self.entry, + self.subject, + self._regid, + ), + ) if dbcursor.rowcount > 0: - logger.debug('[db regactivity] deleted (gmtime:%s, entry:"%s", subject:"%s", author:"%s", repsrcid:%s, gitbranchid:%s, actimonid:%s, regid:%s)', - self.gmtime, self.entry, self.subject, self.author, self.repsrcid, self.gitbranchid, self._actimonid, self._regid) + logger.debug( + '[db regactivity] deleted (gmtime:%s, entry:"%s", subject:"%s", author:"%s", repsrcid:%s, gitbranchid:%s, actimonid:%s, regid:%s)', + self.gmtime, + self.entry, + self.subject, + self.author, + self.repsrcid, + self.gitbranchid, + self._actimonid, + self._regid, + ) else: - logger.debug('[db regactivity] failed to deleted delete entry (gmtime:%s, entry:"%s", subject:"%s", author:"%s", repsrcid:%s, gitbranchid:%s, actimonid:%s, regid:%s)', - self.gmtime, self.entry, self.subject, self.author, self.repsrcid, self.gitbranchid, self._actimonid, self._regid) + logger.debug( + '[db regactivity] failed to deleted delete entry (gmtime:%s, entry:"%s", subject:"%s", author:"%s", repsrcid:%s, gitbranchid:%s, actimonid:%s, regid:%s)', + self.gmtime, + self.entry, + self.subject, + self.author, + self.repsrcid, + self.gitbranchid, + self._actimonid, + self._regid, + ) @staticmethod - def event(gmtime, entry, subject, author=None, repsrcid=None, gitbranchid=None, actimonid=None, regid=None, patchkind=0, subentry=None): + def event( + gmtime, + entry, + subject, + author=None, + repsrcid=None, + gitbranchid=None, + actimonid=None, + regid=None, + patchkind=0, + subentry=None, + ): def _getout(): import traceback + traceback.print_stack() sys.exit(1) @@ -1044,40 +1288,73 @@ def _getout(): if repsrcid is None and gitbranchid is None: logger.critical( 'this should not happen: RegActivityEvent.event(%s, %s, %s, %s, %s, %s, %s) was called without specifying either repsrcid or gitbranchid; ' - % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid)) + % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid) + ) _getout() if repsrcid and gitbranchid: logger.critical( 'this should not happen: RegActivityEvent.event(%s, %s, %s, %s, %s, %s, %s) was called with specifying both repsrcid or gitbranchid' - % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid)) + % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid) + ) _getout() # a few lines from the department of "this should not happen, but better ensure it doesn't": if actimonid is None and regid is None: logger.critical( 'this should not happen: RegActivityEvent.event(%s, %s, %s, %s, %s, %s, %s) was called without specifying either actimonid or regid; ' - % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid)) + % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid) + ) _getout() if actimonid and regid: logger.critical( 'this should not happen: RegActivityEvent.event(%s, %s, %s, %s, %s, %s, %s) was called with specifying both actimonid or regid' - % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid)) + % (gmtime, entry, subject, repsrcid, gitbranchid, actimonid, regid) + ) _getout() patchkind = int(patchkind) - logger.debug('[db regactivity] insert (gmtime:%s, entry:"%s", subject:"%s", author:"%s", repsrcid:%s, gitbranchid:%s, actimonid:%s, regid:%s, patchkind:%s, subentry:%s)' % ( - gmtime, entry, subject, author, repsrcid, gitbranchid, actimonid, regid, patchkind, subentry)) + logger.debug( + '[db regactivity] insert (gmtime:%s, entry:"%s", subject:"%s", author:"%s", repsrcid:%s, gitbranchid:%s, actimonid:%s, regid:%s, patchkind:%s, subentry:%s)' + % ( + gmtime, + entry, + subject, + author, + repsrcid, + gitbranchid, + actimonid, + regid, + patchkind, + subentry, + ) + ) dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO regactivity + dbcursor.execute( + """INSERT INTO regactivity (gmtime, entry, subject, author, repsrcid, gitbranchid, actimonid, regid, patchkind, subentry) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''', - (gmtime, entry, subject, author, repsrcid, gitbranchid, actimonid, regid, patchkind, subentry)) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", + ( + gmtime, + entry, + subject, + author, + repsrcid, + gitbranchid, + actimonid, + regid, + patchkind, + subentry, + ), + ) @classmethod def getall_by_actimonid(cls, actimonid): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT %s FROM regactivity WHERE actimonid=(?)' % RegActivityEvent.DBCOLS, (actimonid, )): + for dbresult in dbcursor.execute( + 'SELECT %s FROM regactivity WHERE actimonid=(?)' % RegActivityEvent.DBCOLS, + (actimonid,), + ): yield cls(*dbresult) @classmethod @@ -1085,7 +1362,7 @@ def get_all(cls, regid, onlyonce=True): def _getall_actimonids(regid): actimonids = list() for actimon in RegActivityMonitor.getall_by_regid(regid): - actimonids.append("%s" % actimon.actimonid) + actimonids.append('%s' % actimon.actimonid) return actimonids # prepare query with an unkown number of items in the "WHERE IN" clause @@ -1095,36 +1372,56 @@ def _getall_actimonids(regid): dbcursor = DBCON.cursor() if onlyonce: - for dbresult in dbcursor.execute('SELECT DISTINCT %s FROM regactivity WHERE actimonid IN (%s) OR regid=(?) ORDER BY gmtime' % (RegActivityEvent.DBCOLS, placeholders), replacements): + for dbresult in dbcursor.execute( + 'SELECT DISTINCT %s FROM regactivity WHERE actimonid IN (%s) OR regid=(?) ORDER BY gmtime' + % (RegActivityEvent.DBCOLS, placeholders), + replacements, + ): yield cls(*dbresult) else: - for dbresult in dbcursor.execute('SELECT %s FROM regactivity WHERE actimonid IN (%s) OR regid=(?) ORDER BY gmtime' % (RegActivityEvent.DBCOLS, placeholders), replacements): + for dbresult in dbcursor.execute( + 'SELECT %s FROM regactivity WHERE actimonid IN (%s) OR regid=(?) ORDER BY gmtime' + % (RegActivityEvent.DBCOLS, placeholders), + replacements, + ): yield cls(*dbresult) @staticmethod def present(entry, actimonid=None, regid=None, gitbranchid=None, subentry=None): if not actimonid and not regid: - logger.critical("Aborting, RegActivitaEvent.present() called with neither actimonid or regid.") + logger.critical( + 'Aborting, RegActivitaEvent.present() called with neither actimonid or regid.' + ) sys.exit(1) elif actimonid and regid: - logger.critical("Aborting, RegActivitaEvent.present() called with both actimonid or regid set.") + logger.critical( + 'Aborting, RegActivitaEvent.present() called with both actimonid or regid set.' + ) sys.exit(1) dbcursor = DBCON.cursor() if actimonid: if gitbranchid: dbresult = dbcursor.execute( - 'SELECT * FROM regactivity WHERE actimonid=(?) AND entry=(?) AND gitbranchid=(?)', (actimonid, entry, gitbranchid)).fetchone() + 'SELECT * FROM regactivity WHERE actimonid=(?) AND entry=(?) AND gitbranchid=(?)', + (actimonid, entry, gitbranchid), + ).fetchone() else: dbresult = dbcursor.execute( - 'SELECT * FROM regactivity WHERE actimonid=(?) AND entry=(?)', (actimonid, entry)).fetchone() + 'SELECT * FROM regactivity WHERE actimonid=(?) AND entry=(?)', + (actimonid, entry), + ).fetchone() elif regid: if gitbranchid: dbresult = dbcursor.execute( - 'SELECT * FROM regactivity WHERE regid=(?) AND entry=(?) AND gitbranchid=(?)', (regid, entry, gitbranchid)).fetchone() + 'SELECT * FROM regactivity WHERE regid=(?) AND entry=(?) AND gitbranchid=(?)', + (regid, entry, gitbranchid), + ).fetchone() else: dbresult = dbcursor.execute( - 'SELECT * FROM regactivity WHERE regid=(?) AND entry=(?)', (regid, entry)).fetchone() + 'SELECT * FROM regactivity WHERE regid=(?) AND entry=(?)', + (regid, entry), + ).fetchone() if dbresult is None: return False @@ -1135,12 +1432,17 @@ def present(entry, actimonid=None, regid=None, gitbranchid=None, subentry=None): def remove(actimonid=None): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT actimonid FROM regactivity WHERE actimonid=(?)', (actimonid, )).fetchone() + 'SELECT actimonid FROM regactivity WHERE actimonid=(?)', (actimonid,) + ).fetchone() if dbresult is not None: - dbcursor.execute('''DELETE FROM regactivity - WHERE actimonid=(?)''', - (actimonid, )) - logger.debug('[db regactivity] deleted all lines where actimonid=%s)', actimonid) + dbcursor.execute( + """DELETE FROM regactivity + WHERE actimonid=(?)""", + (actimonid,), + ) + logger.debug( + '[db regactivity] deleted all lines where actimonid=%s)', actimonid + ) RegActivityEvent.remove(actimonid=dbcursor.lastrowid) return True return False @@ -1151,7 +1453,7 @@ def url(self): return ReportSource.url_by_id(self.repsrcid, self.entry, subentry=self.subentry) -class RegBackburner(): +class RegBackburner: def __init__(self, regid, repsrcid, entry, gmtime, author, subject, timelimit): self.regid = regid self.gmtime = gmtime @@ -1165,7 +1467,7 @@ def __init__(self, regid, repsrcid, entry, gmtime, author, subject, timelimit): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "regbackburner"') RegzbotDbMeta.set_tableversion('regbackburner', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE regbackburner ( regid INTEGER NOT NULL, repsrcid INTEGER, @@ -1174,7 +1476,7 @@ def db_create(version, dbcursor): author STRING, subject STRING, timelimit INTEGER - )''') + )""") @classmethod def add(cls, regid, repsrcid, entry, gmtime, author, subject, timelimit=0): @@ -1184,18 +1486,28 @@ def add(cls, regid, repsrcid, entry, gmtime, author, subject, timelimit=0): cls.remove(regid, dbcursor) # add entry - dbcursor.execute('''INSERT INTO regbackburner + dbcursor.execute( + """INSERT INTO regbackburner (regid, repsrcid, entry, gmtime, author, subject) - VALUES (?, ?, ?, ?, ?, ?)''', - (regid, repsrcid, entry, gmtime, author, subject)) - logger.debug('[db regbackburner] insert (regid:%s, repsrcid:%s, entry:%s, gmtime:%s, author:"%s", subject:"%s")', - regid, repsrcid, entry, gmtime, author, subject) + VALUES (?, ?, ?, ?, ?, ?)""", + (regid, repsrcid, entry, gmtime, author, subject), + ) + logger.debug( + '[db regbackburner] insert (regid:%s, repsrcid:%s, entry:%s, gmtime:%s, author:"%s", subject:"%s")', + regid, + repsrcid, + entry, + gmtime, + author, + subject, + ) @classmethod def get_by_regid(cls, regid): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM regbackburner WHERE regid=?', (regid,)).fetchone() + 'SELECT * FROM regbackburner WHERE regid=?', (regid,) + ).fetchone() if dbresult: return cls(*dbresult) return None @@ -1205,13 +1517,17 @@ def remove(regid, dbcursor=None): if dbcursor is None: dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT subject FROM regbackburner WHERE regid=(?)', (regid,)).fetchone() + 'SELECT subject FROM regbackburner WHERE regid=(?)', (regid,) + ).fetchone() if dbresult is not None: - dbcursor.execute('''DELETE FROM regbackburner - WHERE regid=(?)''', - (regid, )) + dbcursor.execute( + """DELETE FROM regbackburner + WHERE regid=(?)""", + (regid,), + ) logger.debug( - '[db regbackburner] delete (regid:%s, subject:%s)', regid, dbresult[0]) + '[db regbackburner] delete (regid:%s, subject:%s)', regid, dbresult[0] + ) return True return False @@ -1219,8 +1535,10 @@ def report_url(self): return ReportSource.url_by_id(self.repsrcid, self.entry) -class RegHistory(): - def __init__(self, regid, gmtime, entry, subject, regzbotcmd, gitbranchid, repsrcid, author): +class RegHistory: + def __init__( + self, regid, gmtime, entry, subject, regzbotcmd, gitbranchid, repsrcid, author + ): self.regid = regid self.gmtime = gmtime self.entry = entry @@ -1236,7 +1554,7 @@ def __init__(self, regid, gmtime, entry, subject, regzbotcmd, gitbranchid, repsr def db_create(version, dbcursor): logger.debug('Initializing new dbtable "reghistory"') RegzbotDbMeta.set_tableversion('reghistory', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE reghistory ( regid INTEGER NOT NULL, gmtime INTEGER NOT NULL, @@ -1246,7 +1564,7 @@ def db_create(version, dbcursor): gitbranchid INTEGER, repsrcid INTEGER, author STRING - )''') + )""") def delete(self, dbcursor=None): if not dbcursor: @@ -1255,62 +1573,125 @@ def delete(self, dbcursor=None): if self.repsrcid and ReportSource.get_by_id(self.repsrcid, dbcursor).ismail(): RecordProcessedMsgids.delete(self.entry) - dbcursor.execute('''DELETE FROM reghistory - WHERE regid=(?) AND gmtime=(?) AND entry=(?) AND subject=(?)''', - (self.regid, self.gmtime, self.entry, self.subject,)) + dbcursor.execute( + """DELETE FROM reghistory + WHERE regid=(?) AND gmtime=(?) AND entry=(?) AND subject=(?)""", + ( + self.regid, + self.gmtime, + self.entry, + self.subject, + ), + ) if dbcursor.rowcount > 0: - logger.debug('[db reghistory] deleted (regid:%s, gmtime:%s, entry:%s, subject:"%s", regzbotcmd:"%s", gitbranchid:%s, repsrcid:%s)', - self.regid, self.gmtime, self.entry, self.subject, self.regzbotcmd, self.gitbranchid, self.repsrcid) + logger.debug( + '[db reghistory] deleted (regid:%s, gmtime:%s, entry:%s, subject:"%s", regzbotcmd:"%s", gitbranchid:%s, repsrcid:%s)', + self.regid, + self.gmtime, + self.entry, + self.subject, + self.regzbotcmd, + self.gitbranchid, + self.repsrcid, + ) return True else: - logger.debug('[db reghistory] failed to deleted entry (regid:%s, gmtime:%s, entry:%s, subject:"%s", regzbotcmd:"%s", gitbranchid:%s, repsrcid:%s)', - self.regid, self.gmtime, self.entry, self.subject, self.regzbotcmd, self.gitbranchid, self.repsrcid) + logger.debug( + '[db reghistory] failed to deleted entry (regid:%s, gmtime:%s, entry:%s, subject:"%s", regzbotcmd:"%s", gitbranchid:%s, repsrcid:%s)', + self.regid, + self.gmtime, + self.entry, + self.subject, + self.regzbotcmd, + self.gitbranchid, + self.repsrcid, + ) return False @staticmethod - def _event(regid, gmtime, entry, subject, author, gitbranchid=None, repsrcid=None, regzbotcmd=None): + def _event( + regid, + gmtime, + entry, + subject, + author, + gitbranchid=None, + repsrcid=None, + regzbotcmd=None, + ): dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO reghistory + dbcursor.execute( + """INSERT INTO reghistory (regid, gmtime, entry, subject, author, regzbotcmd, gitbranchid, repsrcid) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)''', - (regid, gmtime, entry, subject, author, regzbotcmd, gitbranchid, repsrcid)) - logger.debug('[db reghistory] insert (regid:%s, gmtime:%s, entry:%s, subject:"%s", author:"%s" regzbotcmd:"%s", gitbranchid:%s, repsrcid:%s)' % ( - regid, gmtime, entry, subject, author, regzbotcmd, gitbranchid, repsrcid)) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)""", + (regid, gmtime, entry, subject, author, regzbotcmd, gitbranchid, repsrcid), + ) + logger.debug( + '[db reghistory] insert (regid:%s, gmtime:%s, entry:%s, subject:"%s", author:"%s" regzbotcmd:"%s", gitbranchid:%s, repsrcid:%s)' + % (regid, gmtime, entry, subject, author, regzbotcmd, gitbranchid, repsrcid) + ) return dbcursor.lastrowid @staticmethod - def event(regid, gmtime, entry, subject, author, repsrcid=None, gitbranchid=None, regzbotcmd=None): + def event( + regid, + gmtime, + entry, + subject, + author, + repsrcid=None, + gitbranchid=None, + regzbotcmd=None, + ): # a few lines from the department of "this should not happen, but better ensure it doesn't": if repsrcid is None and gitbranchid is None: logger.critical( 'this should not happen: RegHistoryEvent.event(%s, %s, %s, %s, %s, %s, %s) was called without specifying either repsrcid or gitbranchid; ' - % (gmtime, entry, subject, repsrcid, gitbranchid, regzbotcmd, regid)) + % (gmtime, entry, subject, repsrcid, gitbranchid, regzbotcmd, regid) + ) if repsrcid and gitbranchid: logger.critical( 'this should not happen: RegHistoryEvent.event(%s, %s, %s, %s, %s, %s, %s) was called with specifying both repsrcid or gitbranchid' - % (gmtime, entry, subject, repsrcid, gitbranchid, regzbotcmd, regid)) + % (gmtime, entry, subject, repsrcid, gitbranchid, regzbotcmd, regid) + ) RegHistory._event( - regid, gmtime, entry, subject, author, repsrcid=repsrcid, gitbranchid=gitbranchid, regzbotcmd=regzbotcmd) + regid, + gmtime, + entry, + subject, + author, + repsrcid=repsrcid, + gitbranchid=gitbranchid, + regzbotcmd=regzbotcmd, + ) def present(entry, regid=None, repsrcid=None, gitbranchid=None): dbcursor = DBCON.cursor() if gitbranchid and regid: dbresult = dbcursor.execute( - 'SELECT * FROM reghistory WHERE entry=(?) AND gitbranchid=(?) AND regid=(?)', (entry, gitbranchid, regid)).fetchone() + 'SELECT * FROM reghistory WHERE entry=(?) AND gitbranchid=(?) AND regid=(?)', + (entry, gitbranchid, regid), + ).fetchone() elif repsrcid and regid: dbresult = dbcursor.execute( - 'SELECT * FROM reghistory WHERE entry=(?) AND repsrcid=(?) AND regid=(?)', (entry, repsrcid, regid)).fetchone() + 'SELECT * FROM reghistory WHERE entry=(?) AND repsrcid=(?) AND regid=(?)', + (entry, repsrcid, regid), + ).fetchone() elif regid: dbresult = dbcursor.execute( - 'SELECT * FROM reghistory WHERE entry=(?) AND regid=(?)', (entry, regid)).fetchone() + 'SELECT * FROM reghistory WHERE entry=(?) AND regid=(?)', (entry, regid) + ).fetchone() elif repsrcid: dbresult = dbcursor.execute( - 'SELECT * FROM reghistory WHERE entry=(?) AND repsrcid=(?)', (entry, repsrcid)).fetchone() + 'SELECT * FROM reghistory WHERE entry=(?) AND repsrcid=(?)', + (entry, repsrcid), + ).fetchone() else: dbresult = dbcursor.execute( - 'SELECT * FROM reghistory WHERE entry=(?)', (entry, )).fetchone() + 'SELECT * FROM reghistory WHERE entry=(?)', (entry,) + ).fetchone() if dbresult is None: return False @@ -1321,21 +1702,29 @@ def present(entry, regid=None, repsrcid=None, gitbranchid=None): def filed(regid): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT gmtime FROM reghistory WHERE regzbotcmd LIKE (?) AND regid=(?) ORDER BY gmtime', ('%%introduced: %%', regid)).fetchone() + 'SELECT gmtime FROM reghistory WHERE regzbotcmd LIKE (?) AND regid=(?) ORDER BY gmtime', + ('%%introduced: %%', regid), + ).fetchone() # fallback, in case introduced command couldn't be found if not dbresult: dbresult = dbcursor.execute( - 'SELECT gmtime FROM reghistory WHERE regid=(?) ORDER BY gmtime', (regid, )).fetchone() + 'SELECT gmtime FROM reghistory WHERE regid=(?) ORDER BY gmtime', + (regid,), + ).fetchone() # fallback, in case history entry was not created yet if not dbresult: dbresult = dbcursor.execute( - 'SELECT gmtime FROM actmonitor WHERE regid=(?) ORDER BY gmtime', (regid, )).fetchone() + 'SELECT gmtime FROM actmonitor WHERE regid=(?) ORDER BY gmtime', + (regid,), + ).fetchone() return dbresult[0] @classmethod def get_all(cls, regid): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT * FROM reghistory WHERE regid=(?) ORDER BY gmtime', (regid, )): + for dbresult in dbcursor.execute( + 'SELECT * FROM reghistory WHERE regid=(?) ORDER BY gmtime', (regid,) + ): yield cls(*dbresult) def url(self): @@ -1346,7 +1735,7 @@ def url(self): return None -class RegLink(): +class RegLink: def __init__(self, regid, gmtime, repsrcid, entry, link, subject, author): self.regid = regid self.gmtime = gmtime @@ -1364,7 +1753,7 @@ def __init__(self, regid, gmtime, repsrcid, entry, link, subject, author): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "reglinks"') RegzbotDbMeta.set_tableversion('reglinks', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE reglinks ( regid INTEGER NOT NULL, gmtime INTEGER, @@ -1373,25 +1762,36 @@ def db_create(version, dbcursor): link STRING, subject STRING, author STRING - )''') + )""") @staticmethod def add_entry(regid, gmtime, subject, author, repsrcid, entry): dbcursor = DBCON.cursor() - if dbcursor.execute('SELECT entry FROM reglinks WHERE regid=(?) AND repsrcid=(?) AND entry=(?)', (regid, repsrcid, entry)).fetchone(): - dbcursor.execute('''UPDATE reglinks + if dbcursor.execute( + 'SELECT entry FROM reglinks WHERE regid=(?) AND repsrcid=(?) AND entry=(?)', + (regid, repsrcid, entry), + ).fetchone(): + dbcursor.execute( + """UPDATE reglinks SET gmtime = (?), author = (?), subject = (?) - WHERE regid=(?) AND repsrcid=(?) AND entry=(?)''', - (gmtime, author, subject, regid, repsrcid, entry)) - logger.debug('[db reglinks] updated (regid:%s, gmtime:%s, repsrcid:%s, entry:%s, subject:"%s", author:"%s" )' % ( - regid, gmtime, repsrcid, entry, subject, author)) + WHERE regid=(?) AND repsrcid=(?) AND entry=(?)""", + (gmtime, author, subject, regid, repsrcid, entry), + ) + logger.debug( + '[db reglinks] updated (regid:%s, gmtime:%s, repsrcid:%s, entry:%s, subject:"%s", author:"%s" )' + % (regid, gmtime, repsrcid, entry, subject, author) + ) else: - dbcursor.execute('''INSERT INTO reglinks + dbcursor.execute( + """INSERT INTO reglinks (regid, gmtime, repsrcid, entry, subject, author) - VALUES (?, ?, ?, ?, ?, ?)''', - (regid, gmtime, repsrcid, entry, subject, author)) - logger.debug('[db reglinks] insert (regid:%s, gmtime:%s, repsrcid:%s, entry:%s, subject:"%s", author:"%s" )' % ( - regid, gmtime, repsrcid, entry, subject, author)) + VALUES (?, ?, ?, ?, ?, ?)""", + (regid, gmtime, repsrcid, entry, subject, author), + ) + logger.debug( + '[db reglinks] insert (regid:%s, gmtime:%s, repsrcid:%s, entry:%s, subject:"%s", author:"%s" )' + % (regid, gmtime, repsrcid, entry, subject, author) + ) def remove(self): self.remove_entry(self.regid, self.repsrcid, self.entry) @@ -1399,56 +1799,96 @@ def remove(self): @staticmethod def remove_entry(regid, repsrcid, entry): dbcursor = DBCON.cursor() - if dbcursor.execute('SELECT repsrcid FROM reglinks WHERE regid=(?) AND repsrcid=(?) AND entry=(?)', (regid, repsrcid, entry)).fetchone(): - dbcursor.execute('''DELETE FROM reglinks - WHERE regid=(?) AND repsrcid=(?) AND entry=(?)''', - (regid, repsrcid, entry)) + if dbcursor.execute( + 'SELECT repsrcid FROM reglinks WHERE regid=(?) AND repsrcid=(?) AND entry=(?)', + (regid, repsrcid, entry), + ).fetchone(): + dbcursor.execute( + """DELETE FROM reglinks + WHERE regid=(?) AND repsrcid=(?) AND entry=(?)""", + (regid, repsrcid, entry), + ) logger.debug( - '[db reglinks] deleted (regid:%s, repsrcid:%s, entry:%s)' % (regid, repsrcid, entry)) + '[db reglinks] deleted (regid:%s, repsrcid:%s, entry:%s)' + % (regid, repsrcid, entry) + ) @classmethod def get_all(cls, regid, order='ASC'): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT * FROM reglinks WHERE regid=(?) ORDER BY gmtime %s' % order, (regid,)): + for dbresult in dbcursor.execute( + 'SELECT * FROM reglinks WHERE regid=(?) ORDER BY gmtime %s' % order, + (regid,), + ): yield cls(*dbresult) @classmethod def get_by_reg_n_reptrd(cls, regression, reptrd): dbcursor = DBCON.cursor() if reptrd.repsrc.kind == 'lore': - for dbresult in dbcursor.execute('SELECT * FROM reglinks WHERE regid=(?) and entry=(?)', (regression.regid, reptrd.id)): + for dbresult in dbcursor.execute( + 'SELECT * FROM reglinks WHERE regid=(?) and entry=(?)', + (regression.regid, reptrd.id), + ): reglink = cls(*dbresult) if ReportSource.islore(reglink.repsrcid): yield reglink else: - for dbresult in dbcursor.execute('SELECT * FROM reglinks WHERE regid=(?) AND repsrcid=(?) and entry=(?)', (regression.regid, reptrd.repsrc.id, reptrd.id)): + for dbresult in dbcursor.execute( + 'SELECT * FROM reglinks WHERE regid=(?) AND repsrcid=(?) and entry=(?)', + (regression.regid, reptrd.repsrc.id, reptrd.id), + ): yield cls(*dbresult) def delete(self, dbcursor=None): if not dbcursor: dbcursor = DBCON.cursor() - dbcursor.execute('''DELETE FROM reglinks - WHERE regid=(?) AND gmtime=(?) AND subject=(?)''', - (self.regid, self.gmtime, self.subject)) + dbcursor.execute( + """DELETE FROM reglinks + WHERE regid=(?) AND gmtime=(?) AND subject=(?)""", + (self.regid, self.gmtime, self.subject), + ) if dbcursor.rowcount > 0: - logger.debug('[db reglinks] deleted (regid:%s; subject:"%s" gmtime:%s)', - self.regid, self.gmtime, self.subject) + logger.debug( + '[db reglinks] deleted (regid:%s; subject:"%s" gmtime:%s)', + self.regid, + self.gmtime, + self.subject, + ) return True else: - logger.debug('[db reglinks] failed to deleted entry (regid:%s; subject:"%s" gmtime:%s)', - self.regid, self.gmtime, self.subject) + logger.debug( + '[db reglinks] failed to deleted entry (regid:%s; subject:"%s" gmtime:%s)', + self.regid, + self.gmtime, + self.subject, + ) return False -class RegressionBasic(): - DBCOLS = "regressions.regid, regressions.subject, regressions.introduced, regressions.gitbranchid, regressions.actimonid, \ +class RegressionBasic: + DBCOLS = 'regressions.regid, regressions.subject, regressions.introduced, regressions.gitbranchid, regressions.actimonid, \ regressions.solved_reason, regressions.solved_gmtime, regressions.solved_entry, regressions.solved_subject, \ - regressions.solved_gitbranchid, regressions.solved_repsrcid, regressions.solved_repentry, regressions.solved_duplicateof" - - def __init__(self, regid, subject, introduced, gitbranchid, actimonid, solved_reason=None, solved_gmtime=None, - solved_entry=None, solved_subject=None, solved_gitbranchid=None, solved_repsrcid=None, solved_repentry=None, solved_duplicateof=None): + regressions.solved_gitbranchid, regressions.solved_repsrcid, regressions.solved_repentry, regressions.solved_duplicateof' + + def __init__( + self, + regid, + subject, + introduced, + gitbranchid, + actimonid, + solved_reason=None, + solved_gmtime=None, + solved_entry=None, + solved_subject=None, + solved_gitbranchid=None, + solved_repsrcid=None, + solved_repentry=None, + solved_duplicateof=None, + ): self.regid = regid self.subject = subject self.introduced = str(introduced) @@ -1486,15 +1926,26 @@ def username(self): def __create(cls, rgzcmd, reptrd, *, introduced=None, gitbranchid=None): if not introduced: introduced = rgzcmd.parameters - regression = cls.__create_obsolete(introduced, gitbranchid, reptrd.repsrc.id, - reptrd.id, reptrd.gmtime, reptrd.summary, reptrd.realname, reptrd.username) + regression = cls.__create_obsolete( + introduced, + gitbranchid, + reptrd.repsrc.id, + reptrd.id, + reptrd.gmtime, + reptrd.summary, + reptrd.realname, + reptrd.username, + ) return regression def __duplicate(self, rgzcmd, other): if self.regid == other.regid: - logger.warning('regression[%s, "%s"]: ignoring request to mark this regression as a duplicate of itself.', - self.regid, self.subject) + logger.warning( + 'regression[%s, "%s"]: ignoring request to mark this regression as a duplicate of itself.', + self.regid, + self.subject, + ) return if self.actimon.gmtime < other.actimon.gmtime: @@ -1509,23 +1960,44 @@ def __duplicate(self, rgzcmd, other): younger._db_update_solved() if self == older: - younger.add_history_event(rgzcmd, cmdline='duplicate: %s [implicit via duplicate]' % older.web_url) + younger.add_history_event( + rgzcmd, cmdline='duplicate: %s [implicit via duplicate]' % older.web_url + ) else: - older.add_history_event(rgzcmd, cmdline='duplicate: %s [implicit via duplicate]' % younger.web_url) - logger.info('Regression(%s): now a duplicate of Regression(%s).', - younger.web_url, older.web_url) + older.add_history_event( + rgzcmd, + cmdline='duplicate: %s [implicit via duplicate]' % younger.web_url, + ) + logger.info( + 'Regression(%s): now a duplicate of Regression(%s).', + younger.web_url, + older.web_url, + ) def add_history_event(self, rgzcmd, *, cmdline=None): if not cmdline: cmdline = rgzcmd.cmd if rgzcmd.parameters: cmdline = '%s: %s' % (cmdline, rgzcmd.parameters) - RegHistory.event(self.regid, rgzcmd.repact.gmtime, rgzcmd.repact.reptrd.id, rgzcmd.repact.summary, - rgzcmd.repact.realname, repsrcid=rgzcmd.repact.repsrc.id, regzbotcmd=cmdline) + RegHistory.event( + self.regid, + rgzcmd.repact.gmtime, + rgzcmd.repact.reptrd.id, + rgzcmd.repact.summary, + rgzcmd.repact.realname, + repsrcid=rgzcmd.repact.repsrc.id, + regzbotcmd=cmdline, + ) def cmd_backburn(self, rgzcmd, reason): - RegBackburner.add(self.regid, rgzcmd.repact.repsrc.id, rgzcmd.repact.reptrd.id, rgzcmd.repact.gmtime, - rgzcmd.repact.realname, reason) + RegBackburner.add( + self.regid, + rgzcmd.repact.repsrc.id, + rgzcmd.repact.reptrd.id, + rgzcmd.repact.gmtime, + rgzcmd.repact.realname, + reason, + ) def cmd_duplicate(self, rgzcmd, reptrd): # handle duplicates already tracked @@ -1544,9 +2016,12 @@ def cmd_duplicate(self, rgzcmd, reptrd): reptrd.summary = self.subject if not reptrd.username: reptrd.username = rgzcmd.repact.realname - regression_created = self.__create(rgzcmd, reptrd, introduced=self.introduced, gitbranchid=self.gitbranchid) - regression_created.add_history_event(rgzcmd, cmdline="introduced: %s [implicit via duplicate]" - % self.introduced) + regression_created = self.__create( + rgzcmd, reptrd, introduced=self.introduced, gitbranchid=self.gitbranchid + ) + regression_created.add_history_event( + rgzcmd, cmdline='introduced: %s [implicit via duplicate]' % self.introduced + ) # for generic urls, take over the subject if reptrd.repsrc.kind == 'generic': regression_created.title(self.subject) @@ -1554,29 +2029,53 @@ def cmd_duplicate(self, rgzcmd, reptrd): return regression_created def cmd_fix(self, rgzcmd, hexsha, summary): - self.fixedby(rgzcmd.repact.gmtime, hexsha, summary, repsrcid=rgzcmd.repact.repsrc.repsrcid, - repentry=rgzcmd.repact.reptrd.id) + self.fixedby( + rgzcmd.repact.gmtime, + hexsha, + summary, + repsrcid=rgzcmd.repact.repsrc.repsrcid, + repentry=rgzcmd.repact.reptrd.id, + ) def cmd_from(self, rgzcmd, realname, username): self.actimon.update_author(realname, username) - logger.info('Regression(%s)]: author is now %s, authormail now %s', self.web_url, realname, username) + logger.info( + 'Regression(%s)]: author is now %s, authormail now %s', + self.web_url, + realname, + username, + ) def cmd_introduced_update(self, rgzcmd, hexsha): self.introduced_update(hexsha) @classmethod def cmd_introduced_new(cls, rgzcmd, hexsha): - return cls.introduced_create(rgzcmd.reptrd.repsrc.id, rgzcmd.reptrd.id, rgzcmd.reptrd.summary, rgzcmd.reptrd.realname, - rgzcmd.reptrd.username, hexsha, rgzcmd.reptrd.gmtime) + return cls.introduced_create( + rgzcmd.reptrd.repsrc.id, + rgzcmd.reptrd.id, + rgzcmd.reptrd.summary, + rgzcmd.reptrd.realname, + rgzcmd.reptrd.username, + hexsha, + rgzcmd.reptrd.gmtime, + ) def cmd_link(self, rgzcmd, url, description): reptrd = ReportThread.from_url(url, repact=rgzcmd.repact) if not description: description = reptrd.summary RegLink.add_entry( - self.regid, reptrd.gmtime, description, reptrd.realname, reptrd.repsrc.id, reptrd.id) - logger.info('regression[%s, "%s"]: added link %s' % ( - self.regid, self.subject, url)) + self.regid, + reptrd.gmtime, + description, + reptrd.realname, + reptrd.repsrc.id, + reptrd.id, + ) + logger.info( + 'regression[%s, "%s"]: added link %s' % (self.regid, self.subject, url) + ) def cmd_monitor(self, rgzcmd, url, description): reptrd = ReportThread.from_url(url) @@ -1586,20 +2085,41 @@ def cmd_monitor(self, rgzcmd, url, description): return if not description: description = reptrd.summary - actimonid = RegActivityMonitor.add(self.regid, reptrd.repsrc.id, reptrd.id, - reptrd.gmtime, description, reptrd.realname, reptrd.username) + actimonid = RegActivityMonitor.add( + self.regid, + reptrd.repsrc.id, + reptrd.id, + reptrd.gmtime, + description, + reptrd.realname, + reptrd.username, + ) actimon = RegActivityMonitor.get(actimonid) RegLink.add_entry( - self.regid, rgzcmd.reptrd.gmtime, description, reptrd.realname, reptrd.repsrc.id, reptrd.id) + self.regid, + rgzcmd.reptrd.gmtime, + description, + reptrd.realname, + reptrd.repsrc.id, + reptrd.id, + ) reptrd.update(None, None, actimon=actimon, triggering_repact=rgzcmd.repact) - logger.info('regression[%s, "%s"]: started to monitor %s' % ( - self.regid, self.subject, url)) + logger.info( + 'regression[%s, "%s"]: started to monitor %s' + % (self.regid, self.subject, url) + ) def cmd_resolve(self, rgzcmd, reason): cmd = rgzcmd.cmd if cmd == 'resolve': cmd = 'resolved' - self._solve_reason(cmd, reason, rgzcmd.repact.gmtime, rgzcmd.repact.reptrd.id, rgzcmd.repact.repsrc.id) + self._solve_reason( + cmd, + reason, + rgzcmd.repact.gmtime, + rgzcmd.repact.reptrd.id, + rgzcmd.repact.repsrc.id, + ) def cmd_unbackburn(self, rgzcmd): RegBackburner.remove(self.regid) @@ -1622,7 +2142,7 @@ def cmd_unlink(self, rgzcmd, url): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "regressions"') RegzbotDbMeta.set_tableversion('regressions', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE regressions ( regid INTEGER NOT NULL PRIMARY KEY, subject STRING NOT NULL, @@ -1637,16 +2157,27 @@ def db_create(version, dbcursor): solved_repsrcid INTEGER, solved_repentry STRING, solved_duplicateof INTEGER - )''') + )""") def _db_update_solved(self): dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE regressions + dbcursor.execute( + """UPDATE regressions SET solved_reason = (?), solved_gmtime = (?), solved_entry = (?), solved_subject = (?), solved_gitbranchid = (?), solved_repsrcid = (?) , solved_repentry = (?), solved_duplicateof = (?) - WHERE regid=(?)''', - (self.solved_reason, self.solved_gmtime, self.solved_entry, self.solved_subject, - self.solved_gitbranchid, self.solved_repsrcid, self.solved_repentry, self.solved_duplicateof, self.regid)) + WHERE regid=(?)""", + ( + self.solved_reason, + self.solved_gmtime, + self.solved_entry, + self.solved_subject, + self.solved_gitbranchid, + self.solved_repsrcid, + self.solved_repentry, + self.solved_duplicateof, + self.regid, + ), + ) # in case it's on backburner, unbackburn this if self.solved_reason != 'to_be_fixed': @@ -1654,8 +2185,15 @@ def _db_update_solved(self): logger.debug( '[db regressions] update solved fieds: (regid:%s; solved_reason:%s; solved_gmtime:%s; solved_entry:%s; solved_subject:"%s"; solved_gitbranchid:%s; solved_repsrcid:%s; solved_repentry:%s; )', - self.regid, self.solved_reason, self.solved_gmtime, self.solved_entry, - self.solved_subject, self.solved_gitbranchid, self.solved_repsrcid, self.solved_repentry) + self.regid, + self.solved_reason, + self.solved_gmtime, + self.solved_entry, + self.solved_subject, + self.solved_gitbranchid, + self.solved_repsrcid, + self.solved_repentry, + ) def delete(self, dbcursor=None): if not dbcursor: @@ -1674,28 +2212,46 @@ def delete(self, dbcursor=None): # if self.repsrcid and ReportSource.get_by_id(self.repsrcid, dbcursor).ismail(): # RecordProcessedMsgids.delete(self.entry) - dbcursor.execute('''DELETE FROM regressions - WHERE regid=(?)''', - (self.regid, )) + dbcursor.execute( + """DELETE FROM regressions + WHERE regid=(?)""", + (self.regid,), + ) if dbcursor.rowcount > 0: - logger.debug('[db regressions] deleted (regid:%s; subject:"%s"; introduced:%s; gitbranchid:%s)', - self.regid, self.subject, self.introduced, self.gitbranchid) + logger.debug( + '[db regressions] deleted (regid:%s; subject:"%s"; introduced:%s; gitbranchid:%s)', + self.regid, + self.subject, + self.introduced, + self.gitbranchid, + ) return True else: - logger.debug('[db regressions] failed to deleted entry (regid:%s; subject:"%s"; introduced:%s; gitbranchid:%s)', - self.regid, self.subject, self.introduced, self.gitbranchid) + logger.debug( + '[db regressions] failed to deleted entry (regid:%s; subject:"%s"; introduced:%s; gitbranchid:%s)', + self.regid, + self.subject, + self.introduced, + self.gitbranchid, + ) return False @classmethod - def get_all(cls, order="regid", only_unsolved=False): + def get_all(cls, order='regid', only_unsolved=False): dbcursor = DBCON.cursor() if only_unsolved: - for dbresult in dbcursor.execute('SELECT %s FROM regressions WHERE (solved_reason IS NULL AND solved_duplicateof IS NULL) OR solved_reason IS "to_be_fixed" ORDER BY %s' % (RegressionBasic.DBCOLS, order)): + for dbresult in dbcursor.execute( + 'SELECT %s FROM regressions WHERE (solved_reason IS NULL AND solved_duplicateof IS NULL) OR solved_reason IS "to_be_fixed" ORDER BY %s' + % (RegressionBasic.DBCOLS, order) + ): yield cls(*dbresult) else: - for dbresult in dbcursor.execute('SELECT %s FROM regressions ORDER BY %s' % (RegressionBasic.DBCOLS, order)): + for dbresult in dbcursor.execute( + 'SELECT %s FROM regressions ORDER BY %s' + % (RegressionBasic.DBCOLS, order) + ): yield cls(*dbresult) @classmethod @@ -1703,7 +2259,9 @@ def get_by_regid(cls, regid, dbcursor=None): if not dbcursor: dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT %s FROM regressions WHERE regid=?' % RegressionBasic.DBCOLS, (regid,)).fetchone() + 'SELECT %s FROM regressions WHERE regid=?' % RegressionBasic.DBCOLS, + (regid,), + ).fetchone() if dbresult: return cls(*dbresult) return None @@ -1714,11 +2272,17 @@ def get_by_entry(cls, entry, dbcursor=None): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT %s FROM regressions INNER JOIN actmonitor ON actmonitor.regid = regressions.regid WHERE actmonitor.entry=?' % RegressionBasic.DBCOLS, (entry,)).fetchone() + 'SELECT %s FROM regressions INNER JOIN actmonitor ON actmonitor.regid = regressions.regid WHERE actmonitor.entry=?' + % RegressionBasic.DBCOLS, + (entry,), + ).fetchone() if not dbresult: # fallback for deep threads dbresult = dbcursor.execute( - 'SELECT %s FROM ((actmonitor INNER JOIN regactivity ON regactivity.actimonid = actmonitor.actimonid) INNER JOIN regressions ON actmonitor.regid = regressions.regid) WHERE regactivity.entry=?; ' % RegressionBasic.DBCOLS, (entry,)).fetchone() + 'SELECT %s FROM ((actmonitor INNER JOIN regactivity ON regactivity.actimonid = actmonitor.actimonid) INNER JOIN regressions ON actmonitor.regid = regressions.regid) WHERE regactivity.entry=?; ' + % RegressionBasic.DBCOLS, + (entry,), + ).fetchone() if dbresult: yield cls(*dbresult) @@ -1732,12 +2296,17 @@ def get_by_reptrd(cls, reptrd): def get_dupes(self, *, recursion_count=-1): if recursion_count > 12: - logger.critical("Aborting, recursion limit in RegActivityMonitor.__walk_duplicates() exceeded.") + logger.critical( + 'Aborting, recursion limit in RegActivityMonitor.__walk_duplicates() exceeded.' + ) sys.exit(1) recursion_count += 1 dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute("SELECT %s FROM regressions WHERE solved_duplicateof=(?)" % self.DBCOLS, (self.regid, )): + for dbresult in dbcursor.execute( + 'SELECT %s FROM regressions WHERE solved_duplicateof=(?)' % self.DBCOLS, + (self.regid,), + ): regression = self.__class__(*dbresult) yield regression for duplicate in regression.get_dupes(recursion_count=recursion_count): @@ -1753,12 +2322,16 @@ def find_topmost(self, *, recursion_count=-1): yield self if recursion_count > 12: - logger.critical("Aborting, recursion limit in RegActivityMonitor.__walk_duplicates() exceeded.") + logger.critical( + 'Aborting, recursion limit in RegActivityMonitor.__walk_duplicates() exceeded.' + ) sys.exit(1) recursion_count += 1 upper_regression = self.get_by_regid(self.solved_duplicateof) - for regression in upper_regression.find_topmost(recursion_count=recursion_count): + for regression in upper_regression.find_topmost( + recursion_count=recursion_count + ): yield regression return @@ -1767,13 +2340,19 @@ def get_by_regactivity(cls, entry): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT %s FROM regressions INNER JOIN actmonitor ON actmonitor.regid = regressions.regid WHERE actmonitor.entry=?; ' % RegressionBasic.DBCOLS, (entry,)).fetchone() + 'SELECT %s FROM regressions INNER JOIN actmonitor ON actmonitor.regid = regressions.regid WHERE actmonitor.entry=?; ' + % RegressionBasic.DBCOLS, + (entry,), + ).fetchone() if dbresult: return cls(*dbresult) # fallback for deep threads dbresult = dbcursor.execute( - 'SELECT %s FROM ((actmonitor INNER JOIN regactivity ON regactivity.actimonid = actmonitor.actimonid) INNER JOIN regressions ON actmonitor.regid = regressions.regid) WHERE regactivity.entry=?; ' % RegressionBasic.DBCOLS, (entry,)).fetchone() + 'SELECT %s FROM ((actmonitor INNER JOIN regactivity ON regactivity.actimonid = actmonitor.actimonid) INNER JOIN regressions ON actmonitor.regid = regressions.regid) WHERE regactivity.entry=?; ' + % RegressionBasic.DBCOLS, + (entry,), + ).fetchone() if dbresult: return cls(*dbresult) @@ -1782,26 +2361,32 @@ def get_by_regactivity(cls, entry): @classmethod def get_expected_by_subject(cls, subject): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT %s FROM regressions WHERE solved_reason=? AND solved_subject LIKE (?)' % RegressionBasic.DBCOLS, ('to_be_fixed', subject,)): + for dbresult in dbcursor.execute( + 'SELECT %s FROM regressions WHERE solved_reason=? AND solved_subject LIKE (?)' + % RegressionBasic.DBCOLS, + ( + 'to_be_fixed', + subject, + ), + ): if dbresult: yield cls(*dbresult) @classmethod def get_by_link(cls, link): tmpstring = link - if tmpstring.startswith("https://"): - tmpstring = tmpstring.removeprefix("https://") - elif tmpstring.startswith("http://"): - tmpstring = tmpstring.removeprefix("http://") + if tmpstring.startswith('https://'): + tmpstring = tmpstring.removeprefix('https://') + elif tmpstring.startswith('http://'): + tmpstring = tmpstring.removeprefix('http://') - if tmpstring.startswith("lore.kernel.org/"): + if tmpstring.startswith('lore.kernel.org/'): _, _, tmpstring = tmpstring.split('/', maxsplit=2) msgid, _, _ = tmpstring.partition('/') for regression in cls.get_by_entry(urldecode(msgid)): return regression else: - logger.warning( - "RegressionBasic.get_by_link(%s): unsupported domain ", link) + logger.warning('RegressionBasic.get_by_link(%s): unsupported domain ', link) return None @classmethod @@ -1819,8 +2404,17 @@ def get_by_url(cls, url): def fixes_expected(): dbcursor = DBCON.cursor() pending = [] - for dbresult in dbcursor.execute('SELECT regid, solved_entry, solved_subject FROM regressions WHERE solved_reason=?', ('to_be_fixed',)): - pending.append({"regid": dbresult[0], "solved_entry": dbresult[1], "solved_subject": dbresult[2]}) + for dbresult in dbcursor.execute( + 'SELECT regid, solved_entry, solved_subject FROM regressions WHERE solved_reason=?', + ('to_be_fixed',), + ): + pending.append( + { + 'regid': dbresult[0], + 'solved_entry': dbresult[1], + 'solved_subject': dbresult[2], + } + ) return pending @classmethod @@ -1838,28 +2432,55 @@ def __introduced_precheck(cls, introduced, gmtime=None): return introduced, None @classmethod - def __create_obsolete(cls, introduced, gitbranchid, repsrcid, entry, gmtime, subject, authorname, authormail): + def __create_obsolete( + cls, + introduced, + gitbranchid, + repsrcid, + entry, + gmtime, + subject, + authorname, + authormail, + ): dbcursor = DBCON.cursor() # create regression - dbcursor.execute('''INSERT INTO regressions + dbcursor.execute( + """INSERT INTO regressions (subject, introduced, gitbranchid) - VALUES (?, ?, ?)''', - (subject, introduced, gitbranchid)) + VALUES (?, ?, ?)""", + (subject, introduced, gitbranchid), + ) regid = dbcursor.lastrowid # create entry for monitoring - actimonid = RegActivityMonitor.add(regid, repsrcid, entry, gmtime, subject, authorname, authormail) - dbcursor.execute('''UPDATE regressions + actimonid = RegActivityMonitor.add( + regid, repsrcid, entry, gmtime, subject, authorname, authormail + ) + dbcursor.execute( + """UPDATE regressions SET actimonid = (?) - WHERE regid = (?)''', - (actimonid, regid)) + WHERE regid = (?)""", + (actimonid, regid), + ) - logger.debug('[db regressions] inserted (regid:%s; subject:"%s"; introduced:%s; actimonid:%s; gitbranchid:%s)', - regid, subject, introduced, actimonid, gitbranchid) + logger.debug( + '[db regressions] inserted (regid:%s; subject:"%s"; introduced:%s; actimonid:%s; gitbranchid:%s)', + regid, + subject, + introduced, + actimonid, + gitbranchid, + ) - logger.info('regression[%s, "%s"]: created ("%s"; "%s")', - regid, subject, entry, introduced) + logger.info( + 'regression[%s, "%s"]: created ("%s"; "%s")', + regid, + subject, + entry, + introduced, + ) # check if it already got fixed regression = cls.get_by_regid(regid) @@ -1869,26 +2490,51 @@ def __create_obsolete(cls, introduced, gitbranchid, repsrcid, entry, gmtime, sub return regression @classmethod - def introduced_create(cls, repsrcid, entry, subject, authorname, authormail, introduced, gmtime): + def introduced_create( + cls, repsrcid, entry, subject, authorname, authormail, introduced, gmtime + ): introduced, gitbranchid = cls.__introduced_precheck(introduced, gmtime) - return cls.__create_obsolete(introduced, gitbranchid, repsrcid, entry, gmtime, subject, authorname, authormail) + return cls.__create_obsolete( + introduced, + gitbranchid, + repsrcid, + entry, + gmtime, + subject, + authorname, + authormail, + ) def introduced_update(self, tagload): self.introduced, self.gitbranchid = self.__introduced_precheck(tagload) - logger.debug('regression %s (%s): setting introduced to %s', - self.regid, self.subject, self.introduced) + logger.debug( + 'regression %s (%s): setting introduced to %s', + self.regid, + self.subject, + self.introduced, + ) dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE regressions + dbcursor.execute( + """UPDATE regressions SET introduced = (?), gitbranchid = (?) - WHERE regid=(?)''', - (self.introduced, self.gitbranchid, self.regid)) - logger.debug('[db regressions] introduced is now %s (regid:%s; subject:"%s" )', - self.introduced, self.regid, self.subject) - logger.info('regression[%s, "%s"]: setting introduced to "%s"', - self.regid, self.subject, self.introduced) + WHERE regid=(?)""", + (self.introduced, self.gitbranchid, self.regid), + ) + logger.debug( + '[db regressions] introduced is now %s (regid:%s; subject:"%s" )', + self.introduced, + self.regid, + self.subject, + ) + logger.info( + 'regression[%s, "%s"]: setting introduced to "%s"', + self.regid, + self.subject, + self.introduced, + ) def __create_dup(self, url, gmtime): subject = self.subject @@ -1899,12 +2545,34 @@ def __create_dup(self, url, gmtime): authormail = None # create regression - return self.__create_obsolete(self.introduced, self.gitbranchid, repsrc.repsrcid, entry, gmtime, subject, authorname, authormail) + return self.__create_obsolete( + self.introduced, + self.gitbranchid, + repsrc.repsrcid, + entry, + gmtime, + subject, + authorname, + authormail, + ) - def _dupof_direct(self, regression_other, gmtime, msgid, msgsubject, authorname, repsrcid, *, history=True): + def _dupof_direct( + self, + regression_other, + gmtime, + msgid, + msgsubject, + authorname, + repsrcid, + *, + history=True, + ): if self.regid == regression_other.regid: - logger.warning('regression[%s, "%s"]: request to mark this a as duplicate of ourselves; aborting', - self.regid, self.subject) + logger.warning( + 'regression[%s, "%s"]: request to mark this a as duplicate of ourselves; aborting', + self.regid, + self.subject, + ) # FIXME properly sys.exit(1) @@ -1916,12 +2584,25 @@ def _dupof_direct(self, regression_other, gmtime, msgid, msgsubject, authorname, self._db_update_solved() - logger.info('regression[%s, "%s"]: marked as duplicate of regression Regression[%s, "%s"])', - self.regid, self.subject, regression_other.regid, regression_other.subject) + logger.info( + 'regression[%s, "%s"]: marked as duplicate of regression Regression[%s, "%s"])', + self.regid, + self.subject, + regression_other.regid, + regression_other.subject, + ) if history: # make sure this is mentioned in the other regression, too - RegHistory.event(regression_other.regid, gmtime, msgid, self.solved_subject, authorname, repsrcid=repsrcid, - regzbotcmd='dup: the regression "%s" was marked as duplicate of this' % (self.subject)) + RegHistory.event( + regression_other.regid, + gmtime, + msgid, + self.solved_subject, + authorname, + repsrcid=repsrcid, + regzbotcmd='dup: the regression "%s" was marked as duplicate of this' + % (self.subject), + ) def dupof(self, tagload, gmtime, msgid, msgsubject, authorname, repsrcid): def parse(tagload): @@ -1938,15 +2619,30 @@ def parse(tagload): regression_other = self.get_by_link(urldup) if not regression_other: regression_other = self.__create_dup(urldup, gmtime) - RegHistory.event(regression_other.regid, gmtime, msgid, msgsubject, authorname, repsrcid=repsrcid, - regzbotcmd="introduced: %s [implicit, due to usage of 'dup-of']" % self.introduced) - - self._dupof_direct(regression_other, gmtime, msgid, msgsubject, authorname, repsrcid) + RegHistory.event( + regression_other.regid, + gmtime, + msgid, + msgsubject, + authorname, + repsrcid=repsrcid, + regzbotcmd="introduced: %s [implicit, due to usage of 'dup-of']" + % self.introduced, + ) + + self._dupof_direct( + regression_other, gmtime, msgid, msgsubject, authorname, repsrcid + ) def fixed(self, gmtime, commit_hexsha, commit_subject, gitbranchid): if self.solved_reason == 'fixed': - logger.info('regression[%s, "%s"]: was marked as fixed by %s earlier, changing it to %s instead.', - self.regid, self.subject, self.solved_entry, commit_hexsha) + logger.info( + 'regression[%s, "%s"]: was marked as fixed by %s earlier, changing it to %s instead.', + self.regid, + self.subject, + self.solved_entry, + commit_hexsha, + ) self.solved_reason = 'fixed' self.solved_gmtime = gmtime @@ -1959,13 +2655,32 @@ def fixed(self, gmtime, commit_hexsha, commit_subject, gitbranchid): self.solved_repentry = None self._db_update_solved() - logger.info('regression[%s, "%s"]: marked as %s by %s ("%s")', self.regid, - self.subject, self.solved_reason, self.solved_entry, self.solved_subject) + logger.info( + 'regression[%s, "%s"]: marked as %s by %s ("%s")', + self.regid, + self.subject, + self.solved_reason, + self.solved_entry, + self.solved_subject, + ) return True - def fixedby(self, gmtime, commit_hexsha, commit_subject, gitbranchid=None, repsrcid=None, repentry=None, lookup=True): + def fixedby( + self, + gmtime, + commit_hexsha, + commit_subject, + gitbranchid=None, + repsrcid=None, + repentry=None, + lookup=True, + ): # mark the commit as fixed, unless it's already considered fixed - if self.solved_reason == 'fixed' and commit_hexsha and self.solved_entry.startswith(commit_hexsha): + if ( + self.solved_reason == 'fixed' + and commit_hexsha + and self.solved_entry.startswith(commit_hexsha) + ): return True self.solved_reason = 'to_be_fixed' @@ -1978,34 +2693,65 @@ def fixedby(self, gmtime, commit_hexsha, commit_subject, gitbranchid=None, repsr self.solved_repentry = repentry self._db_update_solved() - logger.info('regression[%s, "%s"]: marked as %s by %s ("%s")', self.regid, - self.subject, self.solved_reason, self.solved_entry, self.solved_subject) + logger.info( + 'regression[%s, "%s"]: marked as %s by %s ("%s")', + self.regid, + self.subject, + self.solved_reason, + self.solved_entry, + self.solved_subject, + ) # look the commit up, in case it was commited already if lookup: - self.lookup_fixedby_everywhere(self.solved_entry, self.solved_subject, gmtime=self.solved_gmtime) + self.lookup_fixedby_everywhere( + self.solved_entry, self.solved_subject, gmtime=self.solved_gmtime + ) return True def lookup_fixedby_everywhere(self, commit_hexsha, subject, gmtime=None): - for gittree, gitbranch, commit_hexsha in GitTree.commit_find_new(hexsha=commit_hexsha, subject=subject, ascending=False): + for gittree, gitbranch, commit_hexsha in GitTree.commit_find_new( + hexsha=commit_hexsha, subject=subject, ascending=False + ): _, culprit_gittree, _, _ = self._gettree_n_branch(self.introduced) - logger.debug("[regression.fixedby] specified fix '%s' found in %s/%s", - commit_hexsha[0:12], gittree.name, gitbranch.name) + logger.debug( + "[regression.fixedby] specified fix '%s' found in %s/%s", + commit_hexsha[0:12], + gittree.name, + gitbranch.name, + ) if culprit_gittree and gittree.priority > culprit_gittree.priority: # this is a commit in a downstream repo we can ignore continue - self.fixedby_found(gittree, gitbranch, commit_hexsha, culprit_gittree, gmtime=gmtime) + self.fixedby_found( + gittree, gitbranch, commit_hexsha, culprit_gittree, gmtime=gmtime + ) - def fixedby_found(self, gittree, gitbranch, commit_hexsha, culprit_gittree=None, gmtime=None): + def fixedby_found( + self, gittree, gitbranch, commit_hexsha, culprit_gittree=None, gmtime=None + ): def add_activity(gittree, gitbranch, commit, mergedate, author): - RegActivityEvent.event(mergedate, commit.hexsha, "%s, the fix specified through '#regzbot fix:' earlier landed in %s" % ( - commit.hexsha[0:12], gitbranch.describe(gittree.name)), gitbranchid=gitbranch.gitbranchid, regid=self.regid, author=author) + RegActivityEvent.event( + mergedate, + commit.hexsha, + "%s, the fix specified through '#regzbot fix:' earlier landed in %s" + % (commit.hexsha[0:12], gitbranch.describe(gittree.name)), + gitbranchid=gitbranch.gitbranchid, + regid=self.regid, + author=author, + ) def add_history(gittree, gitbranch, commit, mergedate, regzbotcmd, author): - RegHistory.event(self.regid, mergedate, commit.hexsha, - commit.summary, author, gitbranchid=gitbranch.gitbranchid, - regzbotcmd=regzbotcmd) + RegHistory.event( + self.regid, + mergedate, + commit.hexsha, + commit.summary, + author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd=regzbotcmd, + ) def update_solved_data(gitbranch, commit, mergedate): self.solved_gitbranchid = gitbranch.gitbranchid @@ -2021,20 +2767,27 @@ def update_solved_data(gitbranch, commit, mergedate): author = '%s' % commit.author mergedate = gitbranch.merge_date(commit.hexsha, gittree.repo()) - if RegActivityEvent.present(commit.hexsha, regid=self.regid, gitbranchid=gitbranch.gitbranchid): + if RegActivityEvent.present( + commit.hexsha, regid=self.regid, gitbranchid=gitbranch.gitbranchid + ): # we noticed this one already # update data in case a fix came after we noticed it if not self.solved_subject: update_solved_data(gitbranch, commit, mergedate) return - if self.solved_reason == 'fixed' and self.solved_gitbranchid != gitbranch.gitbranchid: + if ( + self.solved_reason == 'fixed' + and self.solved_gitbranchid != gitbranch.gitbranchid + ): # we don't care what happens in other gitbranches if the commit landed already where it's supposed to # this can happen if something get's commited to mainline and later shows up in next return True historytext_post = "'fix' commit '%s' now in '%s'" % ( - commit.hexsha[0:12], gitbranch.describe(gittree.name)) + commit.hexsha[0:12], + gitbranch.describe(gittree.name), + ) if gmtime and gmtime > mergedate: # use gmtime instead of mergetime in this case, otherwise entries will show up in strange order @@ -2047,7 +2800,9 @@ def update_solved_data(gitbranch, commit, mergedate): if not self.solved_reason == 'fixed': # mark the commit as fixed, unless it's already considered fixed historytext = 'fixed: %s' % historytext_post - self.fixed(mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid) + self.fixed( + mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid + ) returnval = True elif gittree.priority < culprit_gittree.priority: # the fix hasn't reached the proper tree yet; but we have the commit, so use @@ -2068,48 +2823,74 @@ def _solve_reason(self, reason, tagload, gmtime, msgid, repsrcid): def update_author(self, entry, tagload): from email.utils import parseaddr + author, authormail = parseaddr(tagload) dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE actmonitor + dbcursor.execute( + """UPDATE actmonitor SET authorname = (?), authormail = (?) - WHERE regid=(?) and entry=(?)''', - (author, authormail, self.regid, entry)) - logger.debug('[db regressions] author is now %s, authormail now %s (regid:%s; subject:"%s")', - author, authormail, self.regid, self.subject) - logger.info('regression[%s, "%s"]: author is now %s, authormail now %s', - self.regid, self.subject, author, authormail) + WHERE regid=(?) and entry=(?)""", + (author, authormail, self.regid, entry), + ) + logger.debug( + '[db regressions] author is now %s, authormail now %s (regid:%s; subject:"%s")', + author, + authormail, + self.regid, + self.subject, + ) + logger.info( + 'regression[%s, "%s"]: author is now %s, authormail now %s', + self.regid, + self.subject, + author, + authormail, + ) self.author = author self.author = authormail def title(self, tagload): dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE regressions + dbcursor.execute( + """UPDATE regressions SET subject = (?) - WHERE regid=(?)''', - (tagload, self.regid)) - logger.debug('[db regressions] subject is now %s (regid:%s; subject:"%s" )', - tagload, self.regid, self.subject) - logger.info('regression[%s, "%s"]: subject now "%s"', - self.regid, self.subject, tagload) + WHERE regid=(?)""", + (tagload, self.regid), + ) + logger.debug( + '[db regressions] subject is now %s (regid:%s; subject:"%s" )', + tagload, + self.regid, + self.subject, + ) + logger.info( + 'regression[%s, "%s"]: subject now "%s"', self.regid, self.subject, tagload + ) self.subject = tagload @staticmethod def _gettree_n_branch(introduced, gmtime=None): if '..' in introduced: - range_start, range_end = introduced.split("..", 1) + range_start, range_end = introduced.split('..', 1) if not range_end: # something like 'v5.15..' gittree_start, gitbranch_start = GitTree.commit_find_old(range_start) - commit = gitbranch_start.head_at_gmtime(gmtime, repo=gittree_start.repo()) - introduced = "%s%s" % (introduced, commit.hexsha) + commit = gitbranch_start.head_at_gmtime( + gmtime, repo=gittree_start.repo() + ) + introduced = '%s%s' % (introduced, commit.hexsha) return introduced, gittree_start, gitbranch_start, True gittree_start, gitbranch_start = GitTree.commit_find_old(range_start) gittree_end, gitbranch_end = GitTree.commit_find_old(range_end) # make sure to not sort v5.14.15..v5.15.1 into mainline: - if gitbranch_start and gitbranch_end and gitbranch_start.name == gitbranch_end.name: + if ( + gitbranch_start + and gitbranch_end + and gitbranch_start.name == gitbranch_end.name + ): return introduced, gittree_end, gitbranch_end, True else: return introduced, None, None, True @@ -2133,7 +2914,9 @@ def __init__(self, *args): self._dupes = self._init_dupes(list()) - self._actim_report, self._actim_monitored = self._init_actimons(list(), self.Regactivitymonitor) + self._actim_report, self._actim_monitored = self._init_actimons( + list(), self.Regactivitymonitor + ) self.gmtime = self._actim_report.gmtime self._links = self._init_related_objects(list(), self.Reglink) @@ -2166,7 +2949,8 @@ def __init__(self, *args): # catch commits that were introduced and reported in next but moved to master if self.gittree.name == 'next': _, tmpgittree, tmpgitbranch, _ = RegressionBasic._gettree_n_branch( - self.introduced) + self.introduced + ) if tmpgittree.name == 'master': gitbranch = tmpgitbranch self.gittree = tmpgittree @@ -2174,25 +2958,28 @@ def __init__(self, *args): self.treename = self.gittree.name self._branchname = gitbranch.name self._introduced_presentable, self.versionline = self._get_presentable( - self.introduced, gittree=self.gittree) + self.introduced, gittree=self.gittree + ) if self._introduced_short == self._introduced_presentable: self._introduced_presentable = None if '..' not in self.introduced: - self._introduced_url = gitbranch.url( - self.introduced, self.gittree) + self._introduced_url = gitbranch.url(self.introduced, self.gittree) if self.solved_gitbranchid: self._solved_entry_presentable, _ = self._get_presentable( - self.solved_entry, gittree=self.gittree) + self.solved_entry, gittree=self.gittree + ) self.solved_url = GitBranch.url_by_id( - self.solved_gitbranchid, self.solved_entry) + self.solved_gitbranchid, self.solved_entry + ) # # FIXMELATER: link to fixes in next that are supposed to fix this, but haven't reach master yet # elif self.solved_repsrcid: self.solved_url = ReportSource.url_by_id( - self.solved_repsrcid, self.solved_repentry) + self.solved_repsrcid, self.solved_repentry + ) else: self.solved_url = None @@ -2216,9 +3003,11 @@ def _init_actimons(self, datalist, cls): return report, datalist def _get_poked(self, histevents, actievents): - if len(histevents) > 0 and \ - histevents[-1].regzbotcmd.startswith('poke') and \ - (len(actievents) > 0 and histevents[-1].gmtime > actievents[-1].gmtime): + if ( + len(histevents) > 0 + and histevents[-1].regzbotcmd.startswith('poke') + and (len(actievents) > 0 and histevents[-1].gmtime > actievents[-1].gmtime) + ): return histevents[-1] return False @@ -2250,9 +3039,9 @@ def combine(point1, point2): point1 = shorten(point1) point2 = shorten(point2) if point1 is not None: - return "%s..%s" % (point1, point2) + return '%s..%s' % (point1, point2) else: - return "%s" % (point2) + return '%s' % (point2) def isdevcycle(series, version): if LATEST_VERSIONS[series] and version.startswith(LATEST_VERSIONS[series]): @@ -2265,7 +3054,7 @@ def isdevcycle(series, version): if gitref is None: return None, None elif '..' in gitref: - point1, point2 = gitref.split("..", 1) + point1, point2 = gitref.split('..', 1) else: point1 = None point2 = gitref @@ -2305,12 +3094,14 @@ def isdevcycle(series, version): # this checks: # 1) if range starts with the same version number # 2) if range starts with the number from the previous cycle (catches mainline and stable releases) - if isdevcycle('indevelopment', point1) or \ - point1.startswith(LATEST_VERSIONS['latest']): + if isdevcycle('indevelopment', point1) or point1.startswith( + LATEST_VERSIONS['latest'] + ): return combine(point1, point2), 'indevelopment' if isdevcycle('latest', point2): - if isdevcycle('latest', point1) or \ - point1.startswith(LATEST_VERSIONS['previous']): + if isdevcycle('latest', point1) or point1.startswith( + LATEST_VERSIONS['previous'] + ): return combine(point1, point2), 'latest' # default: either its and older range or something doesn't match up, which can happen if user specifies odd ranges @@ -2319,51 +3110,120 @@ def isdevcycle(series, version): def commitmention(self, gittree, gitbranch, commit): mergedate = gitbranch.merge_date(commit.hexsha) author = '%s' % commit.author - regzbotcmd = "%s in %s referred to this regression" % (commit.hexsha[0:12], gitbranch.describe(gittree.name)) + regzbotcmd = '%s in %s referred to this regression' % ( + commit.hexsha[0:12], + gitbranch.describe(gittree.name), + ) - RegActivityEvent.event(mergedate, commit.hexsha, "Commit %s in %s" % ( - commit.hexsha[0:12], gitbranch.describe(gittree.name)), gitbranchid=gitbranch.gitbranchid, regid=self.regid, author=author) + RegActivityEvent.event( + mergedate, + commit.hexsha, + 'Commit %s in %s' % (commit.hexsha[0:12], gitbranch.describe(gittree.name)), + gitbranchid=gitbranch.gitbranchid, + regid=self.regid, + author=author, + ) if self.treename == gittree.name: - self.fixed( - mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid) - RegHistory.event(self.regid, mergedate, commit.hexsha, commit.summary, author, - gitbranchid=gitbranch.gitbranchid, regzbotcmd="fix: %s [implicit, due to a Link/Closes tag]" % commit.hexsha[0:12]) + self.fixed(mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid) + RegHistory.event( + self.regid, + mergedate, + commit.hexsha, + commit.summary, + author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd='fix: %s [implicit, due to a Link/Closes tag]' + % commit.hexsha[0:12], + ) for duplicate in self.find_topmost(): if self.regid != duplicate.regid: duplicate.fixed( - mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid) - RegHistory.event(duplicate.regid, mergedate, commit.hexsha, commit.summary, author, - gitbranchid=gitbranch.gitbranchid, regzbotcmd="fix: %s [implicit, due to a Link/Closes tag]" % commit.hexsha[0:12]) + mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid + ) + RegHistory.event( + duplicate.regid, + mergedate, + commit.hexsha, + commit.summary, + author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd='fix: %s [implicit, due to a Link/Closes tag]' + % commit.hexsha[0:12], + ) else: # downstream? then just add a note if self.gittree and gittree.priority > self.gittree.priority: - RegHistory.event(self.regid, mergedate, commit.hexsha, commit.summary, author, - gitbranchid=gitbranch.gitbranchid, regzbotcmd='note: %s' % regzbotcmd) + RegHistory.event( + self.regid, + mergedate, + commit.hexsha, + commit.summary, + author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd='note: %s' % regzbotcmd, + ) return # upstream and already fixed? then just add a note - elif self.solved_reason == 'fixed' and self.gittree and gittree.priority < self.gittree.priority: - RegHistory.event(self.regid, mergedate, commit.hexsha, commit.summary, author, - gitbranchid=gitbranch.gitbranchid, regzbotcmd='note: %s' % regzbotcmd) + elif ( + self.solved_reason == 'fixed' + and self.gittree + and gittree.priority < self.gittree.priority + ): + RegHistory.event( + self.regid, + mergedate, + commit.hexsha, + commit.summary, + author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd='note: %s' % regzbotcmd, + ) return - RegHistory.event(self.regid, mergedate, commit.hexsha, commit.summary, author, - gitbranchid=gitbranch.gitbranchid, regzbotcmd='fix: %s' % regzbotcmd) + RegHistory.event( + self.regid, + mergedate, + commit.hexsha, + commit.summary, + author, + gitbranchid=gitbranch.gitbranchid, + regzbotcmd='fix: %s' % regzbotcmd, + ) self.fixedby( - mergedate, commit.hexsha, commit.summary, gitbranch.gitbranchid, lookup=False) + mergedate, + commit.hexsha, + commit.summary, + gitbranch.gitbranchid, + lookup=False, + ) @staticmethod def get_by_entry(entry): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT %s FROM regressions INNER JOIN actmonitor ON actmonitor.regid = regressions.regid WHERE actmonitor.actimonid = regressions.actimonid AND actmonitor.entry=?' % RegressionBasic.DBCOLS, (entry,)).fetchone() + 'SELECT %s FROM regressions INNER JOIN actmonitor ON actmonitor.regid = regressions.regid WHERE actmonitor.actimonid = regressions.actimonid AND actmonitor.entry=?' + % RegressionBasic.DBCOLS, + (entry,), + ).fetchone() if dbresult: return RegressionFull(*dbresult) return None -class UnhandledEvent(): - def __init__(self, unhanid, link, note, gmtime, regid, subject, solved_gmtime, solved_link, solved_subject): +class UnhandledEvent: + def __init__( + self, + unhanid, + link, + note, + gmtime, + regid, + subject, + solved_gmtime, + solved_link, + solved_subject, + ): self.unhanid = unhanid self.link = link self.note = note @@ -2378,7 +3238,7 @@ def __init__(self, unhanid, link, note, gmtime, regid, subject, solved_gmtime, s def db_create(version, dbcursor): logger.debug('Initializing new dbtable "unhandled"') RegzbotDbMeta.set_tableversion('unhandled', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE unhandled ( unhanid INTEGER NOT NULL PRIMARY KEY, link STRING NOT NULL, @@ -2389,17 +3249,21 @@ def db_create(version, dbcursor): solved_gmtime INTEGER, solved_link STRING, solved_subject STRING - )''') + )""") @staticmethod def add(link, note, gmtime=None, regid=None, subject=None): dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO unhandled + dbcursor.execute( + """INSERT INTO unhandled (link, note, gmtime, regid, subject) - VALUES (?, ?, ?, ?, ?)''', - (link, note, gmtime, regid, subject)) - logger.debug('[db unhandled] insert (unhanid:%s, link:%s, note:%s, gmtime:%s,regid:%s, subject:"%s")' % ( - dbcursor.lastrowid, link, note, gmtime, regid, subject)) + VALUES (?, ?, ?, ?, ?)""", + (link, note, gmtime, regid, subject), + ) + logger.debug( + '[db unhandled] insert (unhanid:%s, link:%s, note:%s, gmtime:%s,regid:%s, subject:"%s")' + % (dbcursor.lastrowid, link, note, gmtime, regid, subject) + ) @classmethod def get_all(cls): @@ -2408,10 +3272,21 @@ def get_all(cls): yield cls(*dbresult) -class ReportSource(): +class ReportSource: __ids_lore = [] - def __init__(self, repsrcid, priority, name, serverurl, kind, weburl, identifiers, lastchked, mininterval): + def __init__( + self, + repsrcid, + priority, + name, + serverurl, + kind, + weburl, + identifiers, + lastchked, + mininterval, + ): self.id = repsrcid self.repsrcid = repsrcid self.name = name @@ -2446,7 +3321,7 @@ def generic_name(self): def db_create(version, dbcursor): logger.debug('Initializing new dbtable "reportsources"') RegzbotDbMeta.set_tableversion('reportsources', version, dbcursor) - dbcursor.execute(''' + dbcursor.execute(""" CREATE TABLE reportsources ( repsrcid INTEGER NOT NULL PRIMARY KEY, priority INTEGER NOT NULL, @@ -2457,50 +3332,147 @@ def db_create(version, dbcursor): identifiers STRING, lastchked STRING, mininterval INT - )''') + )""") @classmethod - def add_or_modify(cls, name, priority, serverurl, kind, weburl, identifiers=None, lastchked=None, mininterval=None): + def add_or_modify( + cls, + name, + priority, + serverurl, + kind, + weburl, + identifiers=None, + lastchked=None, + mininterval=None, + ): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT repsrcid, priority, serverurl, weburl, identifiers, mininterval FROM reportsources WHERE name LIKE (?) AND kind LIKE (?)', (name, kind)).fetchone() + 'SELECT repsrcid, priority, serverurl, weburl, identifiers, mininterval FROM reportsources WHERE name LIKE (?) AND kind LIKE (?)', + (name, kind), + ).fetchone() if not dbresult: - cls.add(name, priority, serverurl, kind, weburl, identifiers, lastchked, mininterval) + cls.add( + name, + priority, + serverurl, + kind, + weburl, + identifiers, + lastchked, + mininterval, + ) else: - (db_repsrcid, db_priority, db_serverurl, db_weburl, db_identifiers, db_mininterval) = dbresult - if db_priority != priority or db_serverurl != serverurl or db_weburl != weburl or \ - db_identifiers != identifiers or db_mininterval != mininterval: - cls.modify(dbresult[0], name, priority, serverurl, kind, weburl, identifiers, mininterval) + ( + db_repsrcid, + db_priority, + db_serverurl, + db_weburl, + db_identifiers, + db_mininterval, + ) = dbresult + if ( + db_priority != priority + or db_serverurl != serverurl + or db_weburl != weburl + or db_identifiers != identifiers + or db_mininterval != mininterval + ): + cls.modify( + dbresult[0], + name, + priority, + serverurl, + kind, + weburl, + identifiers, + mininterval, + ) return None - @staticmethod - def add(name, priority, serverurl, kind, weburl, identifiers=None, lastchked=None, mininterval=None): + def add( + name, + priority, + serverurl, + kind, + weburl, + identifiers=None, + lastchked=None, + mininterval=None, + ): dbcursor = DBCON.cursor() - dbcursor.execute('''INSERT INTO reportsources + dbcursor.execute( + """INSERT INTO reportsources (name, serverurl, kind, priority, weburl, identifiers, lastchked, mininterval) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)''', - (name, serverurl, kind, priority, weburl, identifiers, lastchked, mininterval)) - logger.debug('[db reportsources] insert (repsrcid:%s, name:%s, serverurl:%s, kind:%s, priority:%s, weburl:%s, identifiers:%s, lastchked:%s, mininterval:%s)' % ( - dbcursor.lastrowid, name, serverurl, kind, priority, weburl, identifiers, lastchked, mininterval)) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)""", + ( + name, + serverurl, + kind, + priority, + weburl, + identifiers, + lastchked, + mininterval, + ), + ) + logger.debug( + '[db reportsources] insert (repsrcid:%s, name:%s, serverurl:%s, kind:%s, priority:%s, weburl:%s, identifiers:%s, lastchked:%s, mininterval:%s)' + % ( + dbcursor.lastrowid, + name, + serverurl, + kind, + priority, + weburl, + identifiers, + lastchked, + mininterval, + ) + ) return dbcursor.lastrowid @staticmethod - def modify(repsrcid, name, priority, serverurl, kind, weburl, identifiers=None, mininterval=None): + def modify( + repsrcid, + name, + priority, + serverurl, + kind, + weburl, + identifiers=None, + mininterval=None, + ): dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE reportsources + dbcursor.execute( + """UPDATE reportsources SET serverurl = (?), priority = (?), weburl = (?), identifiers = (?), mininterval = (?) - WHERE repsrcid=(?)''', - (serverurl, priority, weburl, identifiers, mininterval, repsrcid)) - logger.debug('[db reportsources] updated (repsrcid:%s, name:%s, serverurl:%s, kind:%s, priority:%s, weburl:%s, identifiers:%s, mininterval:%s)' % ( - repsrcid, name, serverurl, kind, priority, weburl, identifiers, mininterval)) + WHERE repsrcid=(?)""", + (serverurl, priority, weburl, identifiers, mininterval, repsrcid), + ) + logger.debug( + '[db reportsources] updated (repsrcid:%s, name:%s, serverurl:%s, kind:%s, priority:%s, weburl:%s, identifiers:%s, mininterval:%s)' + % ( + repsrcid, + name, + serverurl, + kind, + priority, + weburl, + identifiers, + mininterval, + ) + ) def delete(self, dbcursor=None): if not dbcursor: dbcursor = DBCON.cursor() - dbresult = dbcursor.execute('''DELETE FROM reportsources - WHERE repsrcid=(?)''', - (self.repsrcid, )) + dbresult = dbcursor.execute( + """DELETE FROM reportsources + WHERE repsrcid=(?)""", + (self.repsrcid,), + ) if dbcursor.rowcount > 0: logger.debug('[db reportsources] deleted entry (%s)', dbresult) @@ -2528,7 +3500,8 @@ def get_by_id(cls, repsrcid, dbcursor=None): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM reportsources WHERE repsrcid=(?)', (repsrcid, )).fetchone() + 'SELECT * FROM reportsources WHERE repsrcid=(?)', (repsrcid,) + ).fetchone() if dbresult: return cls(*dbresult) return None @@ -2537,7 +3510,8 @@ def get_by_id(cls, repsrcid, dbcursor=None): def get_byweburl(url): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM reportsources WHERE weburl LIKE (?)', (url, )).fetchone() + 'SELECT * FROM reportsources WHERE weburl LIKE (?)', (url,) + ).fetchone() if dbresult: return ReportSource(*dbresult) return None @@ -2557,14 +3531,17 @@ def getall(): @staticmethod def getall_bykind(kind): dbcursor = DBCON.cursor() - for dbresult in dbcursor.execute('SELECT * FROM reportsources WHERE kind=(?) ORDER BY priority ASC', (kind, )): + for dbresult in dbcursor.execute( + 'SELECT * FROM reportsources WHERE kind=(?) ORDER BY priority ASC', (kind,) + ): yield ReportSource(*dbresult) @staticmethod def get_by_name(name): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM reportsources WHERE name LIKE (?)', (name, )).fetchone() + 'SELECT * FROM reportsources WHERE name LIKE (?)', (name,) + ).fetchone() if dbresult: return ReportSource(*dbresult) @@ -2572,7 +3549,9 @@ def get_by_name(name): def get_by_identifier(identifier): dbcursor = DBCON.cursor() dbresult = dbcursor.execute( - 'SELECT * FROM reportsources WHERE identifiers LIKE (?)', ('%%%s%%' % identifier, )).fetchone() + 'SELECT * FROM reportsources WHERE identifiers LIKE (?)', + ('%%%s%%' % identifier,), + ).fetchone() if dbresult: return ReportSource(*dbresult) return None @@ -2593,14 +3572,20 @@ def get_by_url(cls, url): elif lowered_wo_protocol.startswith('bugzilla.kernel.org/show_bug.cgi?id='): repsrc = cls.get_byweburl('https://bugzilla.kernel.org/show_bug.cgi?id=') if repsrc: - ticketid = lowered_wo_protocol.removeprefix('bugzilla.kernel.org/show_bug.cgi?id=') - repsrc = cls.get_byweburl('https://bugzilla.kernel.org/show_bug.cgi?id=') + ticketid = lowered_wo_protocol.removeprefix( + 'bugzilla.kernel.org/show_bug.cgi?id=' + ) + repsrc = cls.get_byweburl( + 'https://bugzilla.kernel.org/show_bug.cgi?id=' + ) return repsrc, ticketid elif lowered_url[2] == 'lore.kernel.org': if lowered_url[3] == 'all': logger.debug('ReportSource.get_by_url: FIXME') sys.exit(1) - repsrc = cls.get_byweburl('https://%s/%s/' % (lowered_url[2], lowered_url[3])) + repsrc = cls.get_byweburl( + 'https://%s/%s/' % (lowered_url[2], lowered_url[3]) + ) if repsrc: return repsrc, splitted_url[4] @@ -2620,7 +3605,11 @@ def url(self, entry, *, redirector=None, subentry=None): return entry elif self.kind == 'bugzilla': if subentry and subentry < 10000: - return '%s/show_bug.cgi?id=%s#c%s' % (self.serverurl.removeprefix('/'), entry, subentry) + return '%s/show_bug.cgi?id=%s#c%s' % ( + self.serverurl.removeprefix('/'), + entry, + subentry, + ) return '%s%s' % (self.weburl, entry) elif self.kind == 'lore': if redirector: @@ -2632,7 +3621,8 @@ def url(self, entry, *, redirector=None, subentry=None): elif self.kind == 'github': return '%s/issues/%s' % (self.serverurl.removeprefix('/'), entry) logger.critical( - "ReportSource doesn't yet known how to return a URL for %s", self.kind) + "ReportSource doesn't yet known how to return a URL for %s", self.kind + ) return None def set_lastchked(self, lastchked): @@ -2644,8 +3634,10 @@ def set_lastchked(self, lastchked): raise RuntimeError dbcursor = DBCON.cursor() - dbcursor.execute('''UPDATE reportsources SET lastchked = (?) WHERE repsrcid=(?)''', - (self.lastchked, self.repsrcid)) + dbcursor.execute( + """UPDATE reportsources SET lastchked = (?) WHERE repsrcid=(?)""", + (self.lastchked, self.repsrcid), + ) def update(self): if self.kind == 'generic': @@ -2673,7 +3665,7 @@ def supports_url(self, url_lowered, url_parsed): return url_parsed.geturl() -class ReportActivity(): +class ReportActivity: def __init__(self): # ensure self.id is present, but accept None: _ = self.id @@ -2687,7 +3679,7 @@ def web_url(self, *, redirector=None, subentry=None): return self.repsrc.url(self.reptrd.id, subentry=self.id) -class ReportThreadOffline(): +class ReportThreadOffline: def __init__(self, repsrc, id): self.id = id self.repsrc = repsrc @@ -2759,20 +3751,26 @@ def get_by_url(cls, url): def get_searchpattern(self): if not self.entryid: logger.critical( - "ReportSourceObsolete.get_searchpattern() called while self.entryid is unset") + 'ReportSourceObsolete.get_searchpattern() called while self.entryid is unset' + ) sys.exit(1) elif self.kind == 'generic': return self.entryid elif self.kind == 'lore': return 'https://lore.kernel.org/.*/%s' % urlencode(self.entryid) elif self.kind == 'bugzilla': - return '%s/show_bug.cgi?id=%s' % (self.serverurl.removeprefix('/'), self.entryid) + return '%s/show_bug.cgi?id=%s' % ( + self.serverurl.removeprefix('/'), + self.entryid, + ) elif self.kind == 'gitlab': return '%s/-/issues/%s' % (self.serverurl.removeprefix('/'), self.entryid) elif self.kind == 'github': return '%s/issues/%s' % (self.serverurl.removeprefix('/'), self.entryid) logger.critical( - "ReportSourceObsolete.get_searchpattern() doesn't yet known how to return a URL for %s", self.kind) + "ReportSourceObsolete.get_searchpattern() doesn't yet known how to return a URL for %s", + self.kind, + ) return None @@ -2810,10 +3808,10 @@ def db_create_all(dbcursor): logger.error("Aborting, directory '%s' exist already." % directory) sys.exit(1) - logger.info("Creating database in %s" % directory) + logger.info('Creating database in %s' % directory) dbcon = db_init(directory, create=True) if not dbcon: - logger.error("Aborting, failed creating database.") + logger.error('Aborting, failed creating database.') sys.exit(1) dbcursor = DBCON.cursor() @@ -2827,7 +3825,8 @@ def db_init(directory, create=False): if create: if os.path.isfile(dbfile): logger.warning( - "Database file '%s' already exists, skipping creation" % dbfile) + "Database file '%s' already exists, skipping creation" % dbfile + ) return False elif not os.path.isfile(dbfile): logger.warning("aborting, database file '%s' doesn't exist" % dbfile) @@ -2855,8 +3854,8 @@ def db_diff(filehdl_old, filehdl_new, filedesc_old='before', filedesc_new='after diff = difflib.unified_diff( filehdl_old.readlines(), filehdl_new.readlines(), - fromfile="%s" % filedesc_old, - tofile="%s" % filedesc_new, + fromfile='%s' % filedesc_old, + tofile='%s' % filedesc_new, n=1, ) @@ -2864,8 +3863,7 @@ def db_diff(filehdl_old, filehdl_new, filedesc_old='before', filedesc_new='after for line in diff: if differences is False: differences = True - sys.stdout.write( - "The results from don't match the expected results:\n") + sys.stdout.write("The results from don't match the expected results:\n") sys.stdout.write('#######\n') sys.stdout.write(line) @@ -2880,7 +3878,10 @@ def init_reposdir(directory): def days_delta(past): - return (datetime.datetime.now(datetime.timezone.utc) - datetime.datetime.fromtimestamp(past, datetime.timezone.utc)).days + return ( + datetime.datetime.now(datetime.timezone.utc) + - datetime.datetime.fromtimestamp(past, datetime.timezone.utc) + ).days def timendate_now(): @@ -2898,19 +3899,19 @@ def timendate_gmtime_to_dt(gmtime): def parse_link(url): tmpstring = url - if tmpstring.startswith("https://"): - tmpstring = tmpstring.removeprefix("https://") - elif tmpstring.startswith("http://"): - tmpstring = tmpstring.removeprefix("http://") + if tmpstring.startswith('https://'): + tmpstring = tmpstring.removeprefix('https://') + elif tmpstring.startswith('http://'): + tmpstring = tmpstring.removeprefix('http://') domain = mlist = msgid = None - if (tmpstring.startswith("lore.kernel.org") - or tmpstring.startswith("lkml.kernel.org")): - + if tmpstring.startswith('lore.kernel.org') or tmpstring.startswith( + 'lkml.kernel.org' + ): domain = 'lore.kernel.org' tmplist = tmpstring.split('/', maxsplit=2) if len(tmplist) <= 2: - logger.debug("Ignoring %s, failed to parse", url) + logger.debug('Ignoring %s, failed to parse', url) return None, None, None mlist = tmplist[1] @@ -2919,22 +3920,22 @@ def parse_link(url): msgid, _, _ = tmpstring.partition('/') if mlist == 'r': - if tmpstring.startswith("lkml.kernel.org"): + if tmpstring.startswith('lkml.kernel.org'): mlist = 'lkml' else: # FIXMELATER: this is the lore redirector; for now just assume it redirecting to LKML, which likely needs fixing later mlist = 'lkml' - elif tmpstring.startswith("bugzilla.kernel.org"): + elif tmpstring.startswith('bugzilla.kernel.org'): bugid = tmpstring.removeprefix('bugzilla.kernel.org/show_bug.cgi?id=') if bugid.isnumeric(): msgid = bugid domain = 'bugzilla.kernel.org' else: - logger.debug( - "Tried to get bugid from %s, but failed", url) + logger.debug('Tried to get bugid from %s, but failed', url) else: logger.debug( - "Tried to get msgid from %s, but don't known how to handle that domain", url) + "Tried to get msgid from %s, but don't known how to handle that domain", url + ) return domain, mlist, msgid @@ -2953,28 +3954,51 @@ def basicressource_checkdir_exists(directory, create=False): def basicressources_gittrees_setup(gittreesdir): # FIXMELATER: we should clone these ourselves, but for now leave that task to the user - for gittreedir in (os.path.join(gittreesdir, 'mainline'), - os.path.join(gittreesdir, 'next'), - os.path.join(gittreesdir, 'stable'), - ): + for gittreedir in ( + os.path.join(gittreesdir, 'mainline'), + os.path.join(gittreesdir, 'next'), + os.path.join(gittreesdir, 'stable'), + ): if not basicressource_checkdir_exists(gittreedir, create=False): logger.error( - "Aborting, as the directory '%s' does not exist yet; please create it and check clone the appropriate Linux tree into it." % gittreedir) + "Aborting, as the directory '%s' does not exist yet; please create it and check clone the appropriate Linux tree into it." + % gittreedir + ) sys.exit(1) gitdir = os.path.join(gittreedir, '.git') if not basicressource_checkdir_exists(gitdir, create=False): logger.error( - "Aborting, as the directory '%s' appears to not contain a git tree." % gittreedir) + "Aborting, as the directory '%s' appears to not contain a git tree." + % gittreedir + ) sys.exit(1) # hardcoded for now, too - GitTree.add('mainline', 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/', 'cgit', - 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/', 'master', 0) - GitTree.add('next', 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/', 'cgit', - 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/commit/', 'master', -1) - GitTree.add('stable', 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git', 'cgit', - 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/', r'linux-[0-9][0-9]*.[0-9][0-9]*\.y', 1) + GitTree.add( + 'mainline', + 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/', + 'cgit', + 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/', + 'master', + 0, + ) + GitTree.add( + 'next', + 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/', + 'cgit', + 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/commit/', + 'master', + -1, + ) + GitTree.add( + 'stable', + 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git', + 'cgit', + 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/', + r'linux-[0-9][0-9]*.[0-9][0-9]*\.y', + 1, + ) def basicressources_repsrces_setup(): @@ -2982,9 +4006,13 @@ def basicressources_repsrces_setup(): ReportSource.add('generic', 99, '', 'generic', '') ReportSource.add('lore_all', 98, '', 'lore', 'https://lore.kernel.org/all/') - ReportSource.add('bugzilla.kernel.org', 0, - 'https://bugzilla.kernel.org', - 'bugzilla', 'https://bugzilla.kernel.org/show_bug.cgi?id=') + ReportSource.add( + 'bugzilla.kernel.org', + 0, + 'https://bugzilla.kernel.org', + 'bugzilla', + 'https://bugzilla.kernel.org/show_bug.cgi?id=', + ) # these are optional; maybe they should be in a config file @@ -2993,219 +4021,523 @@ def basicressources_repsrces_setup(): # 'https://bugzilla.kernel.org', # 'bugzilla', 'https://bugzilla.kernel.org/show_bug.cgi?id=') - ReportSource.add('lkml', 1, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-kernel', - 'lore', 'https://lore.kernel.org/lkml/', identifiers='linux-kernel@vger.kernel.org') + ReportSource.add( + 'lkml', + 1, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-kernel', + 'lore', + 'https://lore.kernel.org/lkml/', + identifiers='linux-kernel@vger.kernel.org', + ) if is_running_citesting(): - ReportSource.add('regressions', 2, - 'nntp://nntp.lore.kernel.org/dev.linux.lists.regressions', - 'lore', 'https://lore.kernel.org/regressions/', identifiers='regressions@lists.linux.dev') + ReportSource.add( + 'regressions', + 2, + 'nntp://nntp.lore.kernel.org/dev.linux.lists.regressions', + 'lore', + 'https://lore.kernel.org/regressions/', + identifiers='regressions@lists.linux.dev', + ) else: - ReportSource.add('regressions', 2, - 'nntp://nntp.lore.kernel.org/dev.linux.lists.regressions', - 'lore', 'https://lore.kernel.org/regressions/', identifiers='regressions@lists.linux.dev', - lastchked=190) + ReportSource.add( + 'regressions', + 2, + 'nntp://nntp.lore.kernel.org/dev.linux.lists.regressions', + 'lore', + 'https://lore.kernel.org/regressions/', + identifiers='regressions@lists.linux.dev', + lastchked=190, + ) # basics - ReportSource.add('stable', 3, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.stable', - 'lore', 'https://lore.kernel.org/stable/', identifiers='stable@vger.kernel.org') - ReportSource.add('mm', 6, - 'nntp://nntp.lore.kernel.org/org.kvack.linux-mm', - 'lore', 'https://lore.kernel.org/linux-mm/', identifiers='linux-mm@kvack.org') - ReportSource.add('arch', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-arch', - 'lore', 'https://lore.kernel.org/linux-arch/', identifiers='linux-arch@vger.kernel.org') + ReportSource.add( + 'stable', + 3, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.stable', + 'lore', + 'https://lore.kernel.org/stable/', + identifiers='stable@vger.kernel.org', + ) + ReportSource.add( + 'mm', + 6, + 'nntp://nntp.lore.kernel.org/org.kvack.linux-mm', + 'lore', + 'https://lore.kernel.org/linux-mm/', + identifiers='linux-mm@kvack.org', + ) + ReportSource.add( + 'arch', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-arch', + 'lore', + 'https://lore.kernel.org/linux-arch/', + identifiers='linux-arch@vger.kernel.org', + ) # arch, mm, and virt - ReportSource.add('arm', 3, - 'nntp://nntp.lore.kernel.org/org.infradead.lists.linux-arm-kernel', - 'lore', 'https://lore.kernel.org/linux-arm-kernel/', identifiers='linux-arm-kernel@lists.infradead.org') - ReportSource.add('kvm', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.kvm', - 'lore', 'https://lore.kernel.org/kvm/', identifiers='kvm@vger.kernel.org') - ReportSource.add('mips', 3, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-mips', - 'lore', 'https://lore.kernel.org/linux-mips/', identifiers='linux-mips@vger.kernel.org') - ReportSource.add('ppc-dev', 3, - 'nntp://nntp.lore.kernel.org/org.ozlabs.lists.linuxppc-dev', - 'lore', 'https://lore.kernel.org/linuxppc-dev/', identifiers='linuxppc-dev@lists.ozlabs.org') - ReportSource.add('virtualization', 5, - 'nntp://nntp.lore.kernel.org/org.linuxfoundation.lists.virtualization', - 'lore', 'https://lore.kernel.org/virtualization/', identifiers='virtualization@lists.linux-foundation.org') + ReportSource.add( + 'arm', + 3, + 'nntp://nntp.lore.kernel.org/org.infradead.lists.linux-arm-kernel', + 'lore', + 'https://lore.kernel.org/linux-arm-kernel/', + identifiers='linux-arm-kernel@lists.infradead.org', + ) + ReportSource.add( + 'kvm', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.kvm', + 'lore', + 'https://lore.kernel.org/kvm/', + identifiers='kvm@vger.kernel.org', + ) + ReportSource.add( + 'mips', + 3, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-mips', + 'lore', + 'https://lore.kernel.org/linux-mips/', + identifiers='linux-mips@vger.kernel.org', + ) + ReportSource.add( + 'ppc-dev', + 3, + 'nntp://nntp.lore.kernel.org/org.ozlabs.lists.linuxppc-dev', + 'lore', + 'https://lore.kernel.org/linuxppc-dev/', + identifiers='linuxppc-dev@lists.ozlabs.org', + ) + ReportSource.add( + 'virtualization', + 5, + 'nntp://nntp.lore.kernel.org/org.linuxfoundation.lists.virtualization', + 'lore', + 'https://lore.kernel.org/virtualization/', + identifiers='virtualization@lists.linux-foundation.org', + ) # graphics - ReportSource.add('dri', 3, - 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.dri-devel', - 'lore', 'https://lore.kernel.org/dri-devel/', identifiers='dri-devel@lists.freedesktop.org') - ReportSource.add('amd-gfx', 5, - 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.amd-gfx', - 'lore', 'https://lore.kernel.org/amd-gfx/', identifiers='amd-gfx@lists.freedesktop.org') - ReportSource.add('fbdev', 7, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-fbdev', - 'lore', 'https://lore.kernel.org/linux-fbdev/', identifiers='linux-fbdev@vger.kernel.org') - ReportSource.add('nouveau', 5, - 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.nouveau', - 'lore', 'https://lore.kernel.org/nouveau/', identifiers='nouveau@lists.freedesktop.org') - ReportSource.add('intel-gfx', 5, - 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.intel-gfx', - 'lore', 'https://lore.kernel.org/intel-gfx/', identifiers='intel-gfxlists.freedesktop.org') + ReportSource.add( + 'dri', + 3, + 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.dri-devel', + 'lore', + 'https://lore.kernel.org/dri-devel/', + identifiers='dri-devel@lists.freedesktop.org', + ) + ReportSource.add( + 'amd-gfx', + 5, + 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.amd-gfx', + 'lore', + 'https://lore.kernel.org/amd-gfx/', + identifiers='amd-gfx@lists.freedesktop.org', + ) + ReportSource.add( + 'fbdev', + 7, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-fbdev', + 'lore', + 'https://lore.kernel.org/linux-fbdev/', + identifiers='linux-fbdev@vger.kernel.org', + ) + ReportSource.add( + 'nouveau', + 5, + 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.nouveau', + 'lore', + 'https://lore.kernel.org/nouveau/', + identifiers='nouveau@lists.freedesktop.org', + ) + ReportSource.add( + 'intel-gfx', + 5, + 'nntp://nntp.lore.kernel.org/org.freedesktop.lists.intel-gfx', + 'lore', + 'https://lore.kernel.org/intel-gfx/', + identifiers='intel-gfxlists.freedesktop.org', + ) # network - ReportSource.add('ath10k', 7, - 'nntp://nntp.lore.kernel.org/org.infradead.lists.ath10k', - 'lore', 'https://lore.kernel.org/ath10k/', identifiers='ath10k@lists.infradead.org') - ReportSource.add('ath11k', 7, - 'nntp://nntp.lore.kernel.org/org.infradead.lists.ath11k', - 'lore', 'https://lore.kernel.org/ath11k/', identifiers='ath10k@lists.infradead.org') - ReportSource.add('netdev', 3, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.netdev', - 'lore', 'https://lore.kernel.org/netdev/', identifiers='netdev@vger.kernel.org') - ReportSource.add('rdma', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-rdma', - 'lore', 'https://lore.kernel.org/linux-rdma/', identifiers='linux-rdma@vger.kernel.org') - ReportSource.add('wireless', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-wireless', - 'lore', 'https://lore.kernel.org/linux-wireless/', identifiers='linux-wireless@vger.kernel.org') - ReportSource.add('intel-wired-lan', 7, - 'nntp://nntp.lore.kernel.org/org.osuosl.intel-wired-lan', - 'lore', 'https://lore.kernel.org/intel-wired-lan/', identifiers='intel-wired-lan@lists.osuosl.org') + ReportSource.add( + 'ath10k', + 7, + 'nntp://nntp.lore.kernel.org/org.infradead.lists.ath10k', + 'lore', + 'https://lore.kernel.org/ath10k/', + identifiers='ath10k@lists.infradead.org', + ) + ReportSource.add( + 'ath11k', + 7, + 'nntp://nntp.lore.kernel.org/org.infradead.lists.ath11k', + 'lore', + 'https://lore.kernel.org/ath11k/', + identifiers='ath10k@lists.infradead.org', + ) + ReportSource.add( + 'netdev', + 3, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.netdev', + 'lore', + 'https://lore.kernel.org/netdev/', + identifiers='netdev@vger.kernel.org', + ) + ReportSource.add( + 'rdma', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-rdma', + 'lore', + 'https://lore.kernel.org/linux-rdma/', + identifiers='linux-rdma@vger.kernel.org', + ) + ReportSource.add( + 'wireless', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-wireless', + 'lore', + 'https://lore.kernel.org/linux-wireless/', + identifiers='linux-wireless@vger.kernel.org', + ) + ReportSource.add( + 'intel-wired-lan', + 7, + 'nntp://nntp.lore.kernel.org/org.osuosl.intel-wired-lan', + 'lore', + 'https://lore.kernel.org/intel-wired-lan/', + identifiers='intel-wired-lan@lists.osuosl.org', + ) # storage - ReportSource.add('block', 3, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-block', - 'lore', 'https://lore.kernel.org/linux-block/', identifiers='linux-block@vger.kernel.org') - ReportSource.add('mtd', 6, - 'nntp://nntp.lore.kernel.org/org.infradead.lists.linux-mtd', - 'lore', 'https://lore.kernel.org/linux-mtd/', identifiers='linux-mtd@lists.infradead.org') - ReportSource.add('nvme', 6, - 'nntp://nntp.lore.kernel.org/org.infradead.lists.linux-nvme', - 'lore', 'https://lore.kernel.org/linux-nvme/', identifiers='linux-nvme@lists.infradead.org') - ReportSource.add('raid', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-raid', - 'lore', 'https://lore.kernel.org/linux-raid/', identifiers='linux-raid@vger.kernel.org') - ReportSource.add('scsi', 3, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-scsi', - 'lore', 'https://lore.kernel.org/linux-scsi/', identifiers='linux-scsi@vger.kernel.org') + ReportSource.add( + 'block', + 3, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-block', + 'lore', + 'https://lore.kernel.org/linux-block/', + identifiers='linux-block@vger.kernel.org', + ) + ReportSource.add( + 'mtd', + 6, + 'nntp://nntp.lore.kernel.org/org.infradead.lists.linux-mtd', + 'lore', + 'https://lore.kernel.org/linux-mtd/', + identifiers='linux-mtd@lists.infradead.org', + ) + ReportSource.add( + 'nvme', + 6, + 'nntp://nntp.lore.kernel.org/org.infradead.lists.linux-nvme', + 'lore', + 'https://lore.kernel.org/linux-nvme/', + identifiers='linux-nvme@lists.infradead.org', + ) + ReportSource.add( + 'raid', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-raid', + 'lore', + 'https://lore.kernel.org/linux-raid/', + identifiers='linux-raid@vger.kernel.org', + ) + ReportSource.add( + 'scsi', + 3, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-scsi', + 'lore', + 'https://lore.kernel.org/linux-scsi/', + identifiers='linux-scsi@vger.kernel.org', + ) # filesystems - ReportSource.add('cifs', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-cifs', - 'lore', 'https://lore.kernel.org/linux-cifs/', identifiers='linux-cifs@vger.kernel.org') - ReportSource.add('btrfs', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-btrfs', - 'lore', 'https://lore.kernel.org/linux-btrfs/', identifiers='linux-btrfs@vger.kernel.org') - ReportSource.add('ext4', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-ext4', - 'lore', 'https://lore.kernel.org/linux-ext4/', identifiers='linux-ext4@vger.kernel.org') - ReportSource.add('fsdevel', 3, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-fsdevel', - 'lore', 'https://lore.kernel.org/linux-fsdevel/', identifiers='linux-fsdevel@vger.kernel.org') - ReportSource.add('nfs', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-nfs', - 'lore', 'https://lore.kernel.org/linux-nfs/', identifiers='linux-nfs@vger.kernel.org') - ReportSource.add('xfs', 4, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-xfs', - 'lore', 'https://lore.kernel.org/linux-xfs/', identifiers='linux-xfs@vger.kernel.org') + ReportSource.add( + 'cifs', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-cifs', + 'lore', + 'https://lore.kernel.org/linux-cifs/', + identifiers='linux-cifs@vger.kernel.org', + ) + ReportSource.add( + 'btrfs', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-btrfs', + 'lore', + 'https://lore.kernel.org/linux-btrfs/', + identifiers='linux-btrfs@vger.kernel.org', + ) + ReportSource.add( + 'ext4', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-ext4', + 'lore', + 'https://lore.kernel.org/linux-ext4/', + identifiers='linux-ext4@vger.kernel.org', + ) + ReportSource.add( + 'fsdevel', + 3, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-fsdevel', + 'lore', + 'https://lore.kernel.org/linux-fsdevel/', + identifiers='linux-fsdevel@vger.kernel.org', + ) + ReportSource.add( + 'nfs', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-nfs', + 'lore', + 'https://lore.kernel.org/linux-nfs/', + identifiers='linux-nfs@vger.kernel.org', + ) + ReportSource.add( + 'xfs', + 4, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-xfs', + 'lore', + 'https://lore.kernel.org/linux-xfs/', + identifiers='linux-xfs@vger.kernel.org', + ) # pci, pm, low-level, etc. - ReportSource.add('crypto', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-crypto', - 'lore', 'https://lore.kernel.org/linux-crypto/', identifiers='linux-crypto@vger.kernel.org') - ReportSource.add('edac', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-edac', - 'lore', 'https://lore.kernel.org/linux-edac/', identifiers='linux-edac@vger.kernel.org') - ReportSource.add('i2c', 5, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-i2c', - 'lore', 'https://lore.kernel.org/linux-i2c/', identifiers='linux-i2c@vger.kernel.org') - ReportSource.add('iio', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-iio', - 'lore', 'https://lore.kernel.org/linux-iio/', identifiers='linux-iio@vger.kernel.org') - ReportSource.add('iommu', 6, - 'nntp://nntp.lore.kernel.org/dev.linux.lists.iommu', - 'lore', 'https://lore.kernel.org/linux-iommu/', identifiers='iommu@lists.linux.dev') - ReportSource.add('pci', 5, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-pci', - 'lore', 'https://lore.kernel.org/linux-pci/', identifiers='linux-pci@vger.kernel.org') - ReportSource.add('pm', 5, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-pm', - 'lore', 'https://lore.kernel.org/linux-pm/', identifiers='linux-pm@vger.kernel.org') - ReportSource.add('serial', 7, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-serial', - 'lore', 'https://lore.kernel.org/linux-serial/', identifiers='linux-serial@vger.kernel.org') + ReportSource.add( + 'crypto', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-crypto', + 'lore', + 'https://lore.kernel.org/linux-crypto/', + identifiers='linux-crypto@vger.kernel.org', + ) + ReportSource.add( + 'edac', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-edac', + 'lore', + 'https://lore.kernel.org/linux-edac/', + identifiers='linux-edac@vger.kernel.org', + ) + ReportSource.add( + 'i2c', + 5, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-i2c', + 'lore', + 'https://lore.kernel.org/linux-i2c/', + identifiers='linux-i2c@vger.kernel.org', + ) + ReportSource.add( + 'iio', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-iio', + 'lore', + 'https://lore.kernel.org/linux-iio/', + identifiers='linux-iio@vger.kernel.org', + ) + ReportSource.add( + 'iommu', + 6, + 'nntp://nntp.lore.kernel.org/dev.linux.lists.iommu', + 'lore', + 'https://lore.kernel.org/linux-iommu/', + identifiers='iommu@lists.linux.dev', + ) + ReportSource.add( + 'pci', + 5, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-pci', + 'lore', + 'https://lore.kernel.org/linux-pci/', + identifiers='linux-pci@vger.kernel.org', + ) + ReportSource.add( + 'pm', + 5, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-pm', + 'lore', + 'https://lore.kernel.org/linux-pm/', + identifiers='linux-pm@vger.kernel.org', + ) + ReportSource.add( + 'serial', + 7, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-serial', + 'lore', + 'https://lore.kernel.org/linux-serial/', + identifiers='linux-serial@vger.kernel.org', + ) # other drivers - ReportSource.add('alsa', 5, - 'nntp://nntp.lore.kernel.org/org.alsa-project.alsa-devel', - 'lore', 'https://lore.kernel.org/alsa-devel/', identifiers='alsa-devel@alsa-project.org') - ReportSource.add('bluetooth', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-bluetooth', - 'lore', 'https://lore.kernel.org/linux-bluetooth/', identifiers='linux-bluetooth@vger.kernel.org') - ReportSource.add('hwmon', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-hwmon', - 'lore', 'https://lore.kernel.org/linux-hwmon/', identifiers='linux-hwmon@vger.kernel.org') - ReportSource.add('input', 6, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-input', - 'lore', 'https://lore.kernel.org/linux-input/', identifiers='linux-input@vger.kernel.org') - ReportSource.add('media', 5, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-media', - 'lore', 'https://lore.kernel.org/linux-media/', identifiers='linux-media@vger.kernel.org') - ReportSource.add('platform-driver-x86', 5, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.platform-driver-x86', - 'lore', 'https://lore.kernel.org/platform-driver-x86/', identifiers='platform-driver-x86@vger.kernel.org') - ReportSource.add('staging', 6, - 'nntp://nntp.lore.kernel.org/dev.linux.lists.linux-staging', - 'lore', 'https://lore.kernel.org/linux-staging/', identifiers='linux-staging@lists.linux.dev') - ReportSource.add('usb', 5, - 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-usb', - 'lore', 'https://lore.kernel.org/linux-usb/', identifiers='linux-usb@vger.kernel.org') + ReportSource.add( + 'alsa', + 5, + 'nntp://nntp.lore.kernel.org/org.alsa-project.alsa-devel', + 'lore', + 'https://lore.kernel.org/alsa-devel/', + identifiers='alsa-devel@alsa-project.org', + ) + ReportSource.add( + 'bluetooth', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-bluetooth', + 'lore', + 'https://lore.kernel.org/linux-bluetooth/', + identifiers='linux-bluetooth@vger.kernel.org', + ) + ReportSource.add( + 'hwmon', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-hwmon', + 'lore', + 'https://lore.kernel.org/linux-hwmon/', + identifiers='linux-hwmon@vger.kernel.org', + ) + ReportSource.add( + 'input', + 6, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-input', + 'lore', + 'https://lore.kernel.org/linux-input/', + identifiers='linux-input@vger.kernel.org', + ) + ReportSource.add( + 'media', + 5, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-media', + 'lore', + 'https://lore.kernel.org/linux-media/', + identifiers='linux-media@vger.kernel.org', + ) + ReportSource.add( + 'platform-driver-x86', + 5, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.platform-driver-x86', + 'lore', + 'https://lore.kernel.org/platform-driver-x86/', + identifiers='platform-driver-x86@vger.kernel.org', + ) + ReportSource.add( + 'staging', + 6, + 'nntp://nntp.lore.kernel.org/dev.linux.lists.linux-staging', + 'lore', + 'https://lore.kernel.org/linux-staging/', + identifiers='linux-staging@lists.linux.dev', + ) + ReportSource.add( + 'usb', + 5, + 'nntp://nntp.lore.kernel.org/org.kernel.vger.linux-usb', + 'lore', + 'https://lore.kernel.org/linux-usb/', + identifiers='linux-usb@vger.kernel.org', + ) + def basicressources_repsrces_add_or_modify(): - ReportSource.add_or_modify('drm/amd', 16, - 'https://gitlab.freedesktop.org/drm/amd', - 'gitlab', '', mininterval=3600) + ReportSource.add_or_modify( + 'drm/amd', + 16, + 'https://gitlab.freedesktop.org/drm/amd', + 'gitlab', + '', + mininterval=3600, + ) - ReportSource.add_or_modify('drm/i915/kernel', 16, - 'https://gitlab.freedesktop.org/drm/i915/kernel', - 'gitlab', '', mininterval=3600) + ReportSource.add_or_modify( + 'drm/i915/kernel', + 16, + 'https://gitlab.freedesktop.org/drm/i915/kernel', + 'gitlab', + '', + mininterval=3600, + ) - ReportSource.add_or_modify('drm/misc', 16, - 'https://gitlab.freedesktop.org/drm/misc/kernel', - 'gitlab', '', mininterval=3600) + ReportSource.add_or_modify( + 'drm/misc', + 16, + 'https://gitlab.freedesktop.org/drm/misc/kernel', + 'gitlab', + '', + mininterval=3600, + ) - ReportSource.add_or_modify('drm/msm', 16, - 'https://gitlab.freedesktop.org/drm/msm', - 'gitlab', '', mininterval=43200) + ReportSource.add_or_modify( + 'drm/msm', + 16, + 'https://gitlab.freedesktop.org/drm/msm', + 'gitlab', + '', + mininterval=43200, + ) - ReportSource.add_or_modify('drm/nouveau', 16, - 'https://gitlab.freedesktop.org/drm/nouveau', - 'gitlab', '', mininterval=3600) + ReportSource.add_or_modify( + 'drm/nouveau', + 16, + 'https://gitlab.freedesktop.org/drm/nouveau', + 'gitlab', + '', + mininterval=3600, + ) - ReportSource.add_or_modify('drm/tegra', 16, - 'https://gitlab.freedesktop.org/drm/tegra', - 'gitlab', '', mininterval=43200) + ReportSource.add_or_modify( + 'drm/tegra', + 16, + 'https://gitlab.freedesktop.org/drm/tegra', + 'gitlab', + '', + mininterval=43200, + ) + + ReportSource.add_or_modify( + 'drm/xe', + 16, + 'https://gitlab.freedesktop.org/drm/xe/kernel', + 'gitlab', + '', + mininterval=14400, + ) - ReportSource.add_or_modify('drm/xe', 16, - 'https://gitlab.freedesktop.org/drm/xe/kernel', - 'gitlab', '', mininterval=14400) + ReportSource.add_or_modify( + 'v9fs', + 16, + 'https://github.com/v9fs/linux', + 'github', + '', + mininterval=43200, + ) - ReportSource.add_or_modify('v9fs', 16, - 'https://github.com/v9fs/linux', - 'github', '', mininterval=43200) + ReportSource.add_or_modify( + 'AsahiLinux', + 16, + 'https://github.com/AsahiLinux/linux', + 'github', + '', + mininterval=43200, + ) - ReportSource.add_or_modify('AsahiLinux', 16, - 'https://github.com/AsahiLinux/linux', - 'github', '', mininterval=43200) + ReportSource.add_or_modify( + 'ClangBuiltLinux', + 16, + 'https://github.com/ClangBuiltLinux/linux', + 'github', + '', + mininterval=14400, + ) - ReportSource.add_or_modify('ClangBuiltLinux', 16, - 'https://github.com/ClangBuiltLinux/linux', - 'github', '', mininterval=14400) + ReportSource.add_or_modify( + 'thesofproject', + 16, + 'https://github.com/thesofproject/linux', + 'github', + '', + mininterval=43200, + ) - ReportSource.add_or_modify('thesofproject', 16, - 'https://github.com/thesofproject/linux', - 'github', '', mininterval=43200) -def basicressources_get_dirs(databasedir=None, gittreesdir=None, websitesdir=None, tmpdir=None): +def basicressources_get_dirs( + databasedir=None, gittreesdir=None, websitesdir=None, tmpdir=None +): # constructs the directory paths # use default path, unless tmpdir if given; but even then use the default, if the variable is set to 'True' @@ -3231,9 +4563,12 @@ def basicressources_get_dirs(databasedir=None, gittreesdir=None, websitesdir=Non return configfile, databasedir, gittreesdir, websitesdir -def basicressources_setup(databasedir=None, gittreesdir=None, websitesdir=None, tmpdir=None): +def basicressources_setup( + databasedir=None, gittreesdir=None, websitesdir=None, tmpdir=None +): _, databasedir, gittreesdir, websitesdir = basicressources_get_dirs( - databasedir, gittreesdir, websitesdir, tmpdir) + databasedir, gittreesdir, websitesdir, tmpdir + ) db_create(databasedir) @@ -3247,11 +4582,14 @@ def basicressources_setup(databasedir=None, gittreesdir=None, websitesdir=None, db_commit() -def basicressources_init(databasedir=None, gittreesdir=None, websitesdir=None, tmpdir=None): +def basicressources_init( + databasedir=None, gittreesdir=None, websitesdir=None, tmpdir=None +): from random import randrange configfile, databasedir, gittreesdir, websitesdir = basicressources_get_dirs( - databasedir, gittreesdir, websitesdir, tmpdir) + databasedir, gittreesdir, websitesdir, tmpdir + ) global CONFIGURATION CONFIGURATION = configparser.ConfigParser() @@ -3262,7 +4600,7 @@ def basicressources_init(databasedir=None, gittreesdir=None, websitesdir=None, t # occational cleanup if randrange(500) == 250: - DBCON.execute("VACUUM") + DBCON.execute('VACUUM') RegzbotDbMeta.update() RecordProcessedMsgids.cleanup(30) @@ -3304,7 +4642,9 @@ def redo_regressions(msgids): for msgid in msgids: regression = RegressionBasic.get_by_entry(urldecode(msgid)) if not regression: - logger.critical('Aborting, could not find any regression with msgid %s', msgid) + logger.critical( + 'Aborting, could not find any regression with msgid %s', msgid + ) sys.exit(1) # store everything we need later @@ -3339,8 +4679,7 @@ def redo_regressions(msgids): tmpfile_before.seek(0) tmpfile_after.seek(0) if db_diff(tmpfile_before, tmpfile_after): - answer = input( - "Enter 'a' to abort, anything else to move on") + answer = input("Enter 'a' to abort, anything else to move on") if answer.lower() == 'a': sys.exit(1) @@ -3353,6 +4692,7 @@ def recheck(msgids): db_commit() from export_web import RegExportWeb + RegExportWeb.compile() db_close() @@ -3370,6 +4710,7 @@ def run(): # update webpages from export_web import RegExportWeb + RegExportWeb.compile() # we are done @@ -3381,6 +4722,7 @@ def generate_web(): # update webpages from export_web import RegExportWeb + RegExportWeb.compile() # we are done @@ -3422,6 +4764,6 @@ def inspectobj(obj): for att in dir(obj): try: ref = getattr(obj, att) - print("%s: %s (%s)" % (att, getattr(obj, att), type(ref))) + print('%s: %s (%s)' % (att, getattr(obj, att), type(ref))) except Exception: - print("ERROR: inspection of %s.%s failed" % (type(obj), att)) + print('ERROR: inspection of %s.%s failed' % (type(obj), att)) diff --git a/regzbot/_rbcmd.py b/regzbot/_rbcmd.py index e80b967..cfdd46a 100644 --- a/regzbot/_rbcmd.py +++ b/regzbot/_rbcmd.py @@ -6,11 +6,13 @@ import re -if __name__ != "__main__": +if __name__ != '__main__': import regzbot + logger = regzbot.logger else: import logging + logger = logging # if False: if True: @@ -20,6 +22,7 @@ class RegressionCreatedException(Exception): pass + class RegressionNotFound(Exception): pass @@ -79,13 +82,19 @@ def _cmd_duplicate(self, regression): def _cmd_duplicate_overthere(self, regression): for url in self.parameters.split(): - reptrd_other = regzbot.ReportThread.from_url(url, repact=self._rbcmd_stack.repact) + reptrd_other = regzbot.ReportThread.from_url( + url, repact=self._rbcmd_stack.repact + ) regression_created = regression.cmd_duplicate(self, reptrd_other) if regression_created: - self._rbcmd_stack.add_related_activities(reptrd_other, regression_created) + self._rbcmd_stack.add_related_activities( + reptrd_other, regression_created + ) def _cmd_duplicate_this(self): - reptrd_other = regzbot.ReportThread.from_url(self.parameters.split()[0], repact=self._rbcmd_stack.repact) + reptrd_other = regzbot.ReportThread.from_url( + self.parameters.split()[0], repact=self._rbcmd_stack.repact + ) regression_other = None for actimon in regzbot.RegActivityMonitor.get_by_reptrd(reptrd_other): if actimon.regid: @@ -116,6 +125,7 @@ def _remove_quoting_chars(pattern): def _cmd_from(self, regression): if '<' in self.parameters and '>' in self.parameters: from email.utils import parseaddr + realname, username = parseaddr(self.parameters) else: realname = self.parameters @@ -138,7 +148,11 @@ def _cmd_relate(self, regression): regression.cmd_monitor(self, url, description) except regzbot.RepDownloadError: regzbot.UnhandledEvent.add( - self.repact.web_url, "unable to relate thread %s, download failed" % url, gmtime=self.repact.gmtime, subject=self.repact.summary) + self.repact.web_url, + 'unable to relate thread %s, download failed' % url, + gmtime=self.repact.gmtime, + subject=self.repact.summary, + ) def _cmd_relatebrief(self, regression): url, description = self._parse_link_and_description(self.parameters) @@ -158,11 +172,19 @@ def _cmd_unrelate(self, regression): try: if not regression.cmd_unlink(self, url): regzbot.UnhandledEvent.add( - self.repact.web_url, "unable to unrelate thread %s, not related yet" % url, gmtime=self.repact.gmtime, subject=self.repact.summary) + self.repact.web_url, + 'unable to unrelate thread %s, not related yet' % url, + gmtime=self.repact.gmtime, + subject=self.repact.summary, + ) return False except regzbot.RepDownloadError: regzbot.UnhandledEvent.add( - self.repact.web_url, "unable to unrelate thread %s, parsing failed" % url, gmtime=self.repact.gmtime, subject=self.repact.summary) + self.repact.web_url, + 'unable to unrelate thread %s, parsing failed' % url, + gmtime=self.repact.gmtime, + subject=self.repact.summary, + ) def process(self, regression, regression_topmost_duplicate): regression_created = None @@ -180,27 +202,35 @@ def process(self, regression, regression_topmost_duplicate): regression_created = self._cmd_introduced(None) regression = regression_created elif self.cmd in ( - 'backburn', - 'duplicate', - 'fix', - 'from', - 'inconclusive', - 'introduced', - 'relate', - 'relatebrief', - 'resolve', - 'summary', - 'unbackburn', - 'unrelate', + 'backburn', + 'duplicate', + 'fix', + 'from', + 'inconclusive', + 'introduced', + 'relate', + 'relatebrief', + 'resolve', + 'summary', + 'unbackburn', + 'unrelate', ): succeeded = getattr(self, '_cmd_%s' % self.cmd)(regression) - if regression_topmost_duplicate and self.cmd not in ('relate', 'relatebrief', 'duplicate'): + if regression_topmost_duplicate and self.cmd not in ( + 'relate', + 'relatebrief', + 'duplicate', + ): # some command needs to act on topmost regression as well getattr(self, '_cmd_%s' % self.cmd)(regression_topmost_duplicate) regression_topmost_duplicate.add_history_event(self) else: regzbot.UnhandledEvent.add( - self.repact.web_url, "unknown regzbot command: %s" % self.cmd, gmtime=self.repact.gmtime, subject=self.repact.summary) + self.repact.web_url, + 'unknown regzbot command: %s' % self.cmd, + gmtime=self.repact.gmtime, + subject=self.repact.summary, + ) return # create the history event and let caller know if we created a regression @@ -221,10 +251,15 @@ def __init__(self, repact, regression): def _add_command(self, cmd, parameters): if cmd in ('use', 'report'): try: - self.reptrd = regzbot.ReportThread.from_url(self._parse_pointer(parameters), repact=self.repact) + self.reptrd = regzbot.ReportThread.from_url( + self._parse_pointer(parameters), repact=self.repact + ) except regzbot.RepDownloadError: regzbot.UnhandledEvent.add( - self.repact.web_url, "unable to find a regression for %s", self._parse_pointer(parameters)) + self.repact.web_url, + 'unable to find a regression for %s', + self._parse_pointer(parameters), + ) raise RegressionNotFound for actimon in regzbot.RegActivityMonitor.get_by_reptrd(self.reptrd): if actimon.regid: @@ -275,7 +310,7 @@ def _set_regressions(self, regression): self.regression_topmost_duplicate = duplicate def _parse_pointer(self, pointer): - if not pointer in ('^', '/', '~'): + if pointer not in ('^', '/', '~'): return pointer if not self.reptrd.supports_relatives: return self.reptrd.web_url @@ -289,7 +324,9 @@ def _parse_pointer(self, pointer): # only should be executed in the contect of commands like duplicate and introduced; and in the latter case only # after all commands have been executed def add_related_activities(self, reptrd, regression): - reptrd.update(None, None, triggering_repact=self.repact, actimon=regression.actimon) + reptrd.update( + None, None, triggering_repact=self.repact, actimon=regression.actimon + ) def process_commands(self): def _walk_commands(): @@ -306,7 +343,7 @@ def _walk_commands(): yield single_command regression_created = False - assert (self.reptrd) + assert self.reptrd for single_command in _walk_commands(): if single_command.cmd == 'introduced': regression_created = single_command.process(self.regression, None) @@ -320,7 +357,11 @@ def _walk_commands(): continue if not self.regression: regzbot.UnhandledEvent.add( - self.repact.web_url, "regzbot tag in a thread not associated with a regression", gmtime=self.repact.gmtime, subject=self.repact.summary) + self.repact.web_url, + 'regzbot tag in a thread not associated with a regression', + gmtime=self.repact.gmtime, + subject=self.repact.summary, + ) continue single_command.process(self.regression, self.regression_topmost_duplicate) @@ -350,7 +391,11 @@ def _parse(cmd_section): # * the end of the section, as indicated by two newlines; optionally with a ; before the first and # space characters before the second) # * either a newline or a combination of semicolon and space characters that are followed '#regzbot' - for cmd_line_raw in re.finditer(r'((^|\n|;\s+)#regzbot\s+)(.*?)(?=(;?\n\s*$|;?\s+#regzbot))', cmd_section, re.MULTILINE | re.IGNORECASE | re.DOTALL): + for cmd_line_raw in re.finditer( + r'((^|\n|;\s+)#regzbot\s+)(.*?)(?=(;?\n\s*$|;?\s+#regzbot))', + cmd_section, + re.MULTILINE | re.IGNORECASE | re.DOTALL, + ): # guess there is a better way to handle "#regzbot activity-\nignore" better, but whatever cmd_line = re.sub(r'\-\n', '-', cmd_line_raw[3]) # remove linebreaks @@ -362,14 +407,17 @@ def _parse(cmd_section): # optional, as not every command has parameters (optional) # - (.*)?: the parameters (optional) splitted = re.split(r'^([\^\w-]+)(:?\n?\s+)?(.*)?$', cmd_line) - yield(splitted[1], splitted[3]) + yield (splitted[1], splitted[3]) def process_activity(activity, *, triggering_repact=None, actimon=None): def _handle_activity(activity, actimon): regression = None - if re.search(r'((^|\n|;\s+)#regzbot\s+)(ignore-activity|poke)(?=(;?\n\s*$|;?\s+#regzbot))', '\n' + - activity.message + '\n\n', re.MULTILINE | re.IGNORECASE | re.DOTALL): + if re.search( + r'((^|\n|;\s+)#regzbot\s+)(ignore-activity|poke)(?=(;?\n\s*$|;?\s+#regzbot))', + '\n' + activity.message + '\n\n', + re.MULTILINE | re.IGNORECASE | re.DOTALL, + ): ignore_activity = True else: ignore_activity = False @@ -392,7 +440,11 @@ def _handle_regzbot_commands(activity, regression): # The following loop locates sections with regzbot commands seperated by newlines; # note, it adds a newline at the start and two at the end of the processed input, as the # regzbot command might be right at its start or end. - for cmd_section in re.finditer(r'^\r?\n#regzbot.*?\r?\n(?=\s*\r?\n)$', '\n' + activity.message + '\n\n', re.MULTILINE | re.IGNORECASE | re.DOTALL): + for cmd_section in re.finditer( + r'^\r?\n#regzbot.*?\r?\n(?=\s*\r?\n)$', + '\n' + activity.message + '\n\n', + re.MULTILINE | re.IGNORECASE | re.DOTALL, + ): cmd_stack = RbCmdStackNew(activity, regression) try: for command, parameter in _parse(cmd_section[0].replace('\r', '')): @@ -404,14 +456,19 @@ def _handle_regzbot_commands(activity, regression): def _handle_expected_threads(activity): if activity.repsrc.kind != 'lore': return - for regression in regzbot.RegressionBasic.get_expected_by_subject(activity.summary): + for regression in regzbot.RegressionBasic.get_expected_by_subject( + activity.summary + ): for actimon in regzbot.RegActivityMonitor.get_by_reptrd(activity.reptrd): if actimon.regid == regression.regid: # already monitored, nothing to do return cmd_stack = RbCmdStackNew(activity, regression) - cmd_stack._add_command('relate', "%s %s [implicit, subject is expected]" % - (activity.web_url, activity.summary)) + cmd_stack._add_command( + 'relate', + '%s %s [implicit, subject is expected]' + % (activity.web_url, activity.summary), + ) cmd_stack.process_commands() def _handle_msgs_linking_regressions(activity): @@ -424,7 +481,11 @@ def _already_monitored(activity, regression): return False message_wo_quotes = re.sub(r'^>.*\n?', '', activity.message, flags=re.MULTILINE) - for match in re.finditer(r'^(\#regzbot |Link: |Closes: |.*)?(\n)?((http://|https://)\S*)', message_wo_quotes, re.MULTILINE | re.IGNORECASE): + for match in re.finditer( + r'^(\#regzbot |Link: |Closes: |.*)?(\n)?((http://|https://)\S*)', + message_wo_quotes, + re.MULTILINE | re.IGNORECASE, + ): linktag = False url = False @@ -453,24 +514,31 @@ def _already_monitored(activity, regression): if linktag is True: cmd_stack = RbCmdStackNew(activity, regression) - cmd_stack._add_command('relate', "%s %s [implicit due to Link/Closes tag]" % - (activity.web_url, activity.summary)) + cmd_stack._add_command( + 'relate', + '%s %s [implicit due to Link/Closes tag]' + % (activity.web_url, activity.summary), + ) cmd_stack.process_commands() elif url: cmd_stack = RbCmdStackNew(activity, regression) - cmd_stack._add_command('note', "%s %s [implicit due to link]" % (url, activity.summary)) + cmd_stack._add_command( + 'note', '%s %s [implicit due to link]' % (url, activity.summary) + ) cmd_stack.process_commands() def _handle_msgs_mentioning_culprits(activity): open_regressions = {} - for match in re.finditer('^(Fixes: )([0-9,a-e]{12})', activity.message, re.MULTILINE): + for match in re.finditer( + '^(Fixes: )([0-9,a-e]{12})', activity.message, re.MULTILINE + ): # only fill this now, as we only need it if we found a Fixes: tag if len(open_regressions) == 0: for regression in regzbot.RegressionBasic.get_all(only_unsolved=True): if '..' not in regression.introduced: open_regressions[regression.regid] = regression.introduced[0:12] - if not match.group(2) in open_regressions.values(): + if match.group(2) not in open_regressions.values(): continue for regid in open_regressions.keys(): if not open_regressions[regid] == match.group(2): @@ -481,9 +549,16 @@ def _handle_msgs_mentioning_culprits(activity): continue # no activity, only a history entry, as it might be about different bug in the same commit - regzbot.RegHistory.event(regid, activity.gmtime, activity.reptrd.id, activity.summary, - activity.realname, repsrcid=activity.repsrc.id, - regzbotcmd='note: "%s" contains a \'Fixes:\' tag for the culprit of this regression' % activity.summary) + regzbot.RegHistory.event( + regid, + activity.gmtime, + activity.reptrd.id, + activity.summary, + activity.realname, + repsrcid=activity.repsrc.id, + regzbotcmd='note: "%s" contains a \'Fixes:\' tag for the culprit of this regression' + % activity.summary, + ) if 'until' in regzbot._TESTING and activity.created_at >= regzbot._TESTING['until']: logger.debug('[rbcmd] skip processing %s', activity.web_url) @@ -506,12 +581,13 @@ def _handle_msgs_mentioning_culprits(activity): raise RegressionCreatedException -if __name__ == "__main__": +if __name__ == '__main__': __TESTDATA = [] # __TESTDATA.append("#regzbot introduced foo") # __TESTDATA.append("#regzbot introduced foo\n#regzbot title bar") __TESTDATA.append( - "#regzbot introduced\nfoo bar \nand more for and bar; and foobar, too;\n#regzbot ignore; #regzbot title foo;\n#regzbot title: baz;") + '#regzbot introduced\nfoo bar \nand more for and bar; and foobar, too;\n#regzbot ignore; #regzbot title foo;\n#regzbot title: baz;' + ) for i in __TESTDATA: print('#########') print('"""\n%s """' % i) diff --git a/regzbot/_repsources/_bugzilla.py b/regzbot/_repsources/_bugzilla.py index 5e8e587..bb2a26e 100644 --- a/regzbot/_repsources/_bugzilla.py +++ b/regzbot/_repsources/_bugzilla.py @@ -14,17 +14,19 @@ import regzbot._repsources._trackers from regzbot import PatchKind -if __name__ != "__main__": +if __name__ != '__main__': import regzbot + logger = regzbot.logger else: import logging + logger = logging if False: # if True: logger.basicConfig(level=logging.DEBUG) - logging.getLogger("bugzilla").setLevel(logging.WARNING) - logging.getLogger("urllib3").setLevel(logging.WARNING) + logging.getLogger('bugzilla').setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.WARNING) _CACHE_INSTANCES = {} @@ -43,7 +45,9 @@ def __init__(self, bz_issue, *, comment=None, status_change=None): self._pybz_comment = comment self._creator = self._pybz_comment['creator'] self._patchkind = None - self.created_at = datetime.datetime.fromisoformat(self._pybz_comment['creation_time']) + self.created_at = datetime.datetime.fromisoformat( + self._pybz_comment['creation_time'] + ) self.id = self._pybz_comment['count'] self.message = self._pybz_comment['text'] # username is available here, but is a email address we should not expose due to typical privacy policies @@ -83,19 +87,24 @@ def is_patch_in_attachment(): bz_project = self.bz_issue.bz_project attachment = bz_project.attachment( - self._pybz_comment['attachment_id'], exclude_fields='data') - attachment_details = attachment['attachments'][str(self._pybz_comment['attachment_id'])] + self._pybz_comment['attachment_id'], exclude_fields='data' + ) + attachment_details = attachment['attachments'][ + str(self._pybz_comment['attachment_id']) + ] if attachment_details['is_patch'] is not True: return False if attachment_details['content_type'] != 'text/plain': return False # now get the attachment - attachment = bz_project.attachment( - self._pybz_comment['attachment_id']) - attachment_details = attachment['attachments'][str(self._pybz_comment['attachment_id'])] + attachment = bz_project.attachment(self._pybz_comment['attachment_id']) + attachment_details = attachment['attachments'][ + str(self._pybz_comment['attachment_id']) + ] attachment_details['decoded_data'] = base64.b64decode( - attachment_details['data']).decode('utf-8') + attachment_details['data'] + ).decode('utf-8') self._pybz_comment['attachment'] = attachment_details return True @@ -105,7 +114,9 @@ def is_patch_in_attachment(): else: self._summary = '%s: new comment (#%s)' % (self.summary_prefix, self.id) if is_patch_in_attachment(): - self._patchkind = PatchKind.getby_content(self._pybz_comment['attachment']['decoded_data']) + self._patchkind = PatchKind.getby_content( + self._pybz_comment['attachment']['decoded_data'] + ) self._summary = '%s with patch' % self._summary else: self._patchkind = 0 @@ -114,7 +125,7 @@ def is_patch_in_attachment(): # mock class to stay in line with what _gitlab.py and _github.py do, as with # bugzilla it makes no sense to differentiate between a instance and a project -class BzInstance(): +class BzInstance: def __init__(self, url, token): logger.debug('[bugzilla] %s: connecting', url.removeprefix('https://')) self._pybz_bugzilla = bugzilla.Bugzilla(url, force_rest=True, api_key=token) @@ -126,16 +137,27 @@ def project(self): class BzIssue(regzbot._repsources._trackers._issue): - INCLUDE_FIELDS = ['attachment_id', 'creator', 'creation_time', 'id', 'status', 'summary'] + INCLUDE_FIELDS = [ + 'attachment_id', + 'creator', + 'creation_time', + 'id', + 'status', + 'summary', + ] def __init__(self, bz_project, _pybz_bug): self.bz_project = bz_project self._pybz_bug = _pybz_bug self.id = _pybz_bug.id - self.created_at = datetime.datetime.fromisoformat(_pybz_bug.creation_time.replace("Z", "+00:00")) + self.created_at = datetime.datetime.fromisoformat( + _pybz_bug.creation_time.replace('Z', '+00:00') + ) self.message = '' - self.realname = self.bz_project.realname(_pybz_bug.creator, realname=_pybz_bug.creator_detail['real_name']) + self.realname = self.bz_project.realname( + _pybz_bug.creator, realname=_pybz_bug.creator_detail['real_name'] + ) self.state = _pybz_bug.status self.summary = _pybz_bug.summary self.web_url = '%s/show_bug.cgi?id=%s' % (bz_project.web_url, _pybz_bug.id) @@ -169,7 +191,7 @@ def activities(self, *, since=None, until=None): yield activity -class BzProject(): +class BzProject: _usercache = {} def __init__(self, bz_bugzilla, pybz_bugzilla): @@ -181,13 +203,22 @@ def attachment(self, attachment_ids, include_fields=None, exclude_fields=None): msg_suffix = '' if exclude_fields and 'data' in exclude_fields: msg_suffix = ' (without data)' - logger.debug("[bugzilla] %s: retrieving attachment-id '%s%s'", self.web_url[8:], attachment_ids, msg_suffix) - return self._pybz_bugzilla.get_attachments(None, attachment_ids, include_fields, exclude_fields) + logger.debug( + "[bugzilla] %s: retrieving attachment-id '%s%s'", + self.web_url[8:], + attachment_ids, + msg_suffix, + ) + return self._pybz_bugzilla.get_attachments( + None, attachment_ids, include_fields, exclude_fields + ) def issue(self, id): - logger.debug("[bugzilla] %s: retrieving metadata for issue '%s'", self.web_url[8:], id) + logger.debug( + "[bugzilla] %s: retrieving metadata for issue '%s'", self.web_url[8:], id + ) query = self._pybz_bugzilla.build_query() - query["include_fields"] = BzIssue.INCLUDE_FIELDS + query['include_fields'] = BzIssue.INCLUDE_FIELDS query['bug_id'] = id for result in self._pybz_bugzilla.query(query): return BzIssue(self, result) @@ -195,7 +226,11 @@ def issue(self, id): def realname(self, creator, *, realname=None): if creator not in self._usercache: if realname is None: - logger.debug('[bugzilla] %s: retrieving details for creator %s', self.web_url[8:], creator) + logger.debug( + '[bugzilla] %s: retrieving details for creator %s', + self.web_url[8:], + creator, + ) realname = self._pybz_bugzilla.getuser(creator).real_name if not realname: # do what bugzilla does in case realname is unset: use first half of email address @@ -205,8 +240,12 @@ def realname(self, creator, *, realname=None): def search(self, pattern, since, *, until=None): if since: - logger.debug("[bugzilla] %s: searching for '%s' in comments updated after %s", - self.web_url[8:], pattern, since) + logger.debug( + "[bugzilla] %s: searching for '%s' in comments updated after %s", + self.web_url[8:], + pattern, + since, + ) else: logger.debug("[bugzilla] %s: searching for '%s'", self.web_url[8:], pattern) query = self._pybz_bugzilla.build_query() @@ -215,43 +254,57 @@ def search(self, pattern, since, *, until=None): # query["longdesc_type"] = 'casesubstring' # query["query_format"] = 'advanced' # hence approach things from a different angle: - query["f1"] = 'longdesc' - query["o1"] = 'casesubstring' - query["v1"] = pattern - query["query_format"] = 'advanced' - query["include_fields"] = BzIssue.INCLUDE_FIELDS - query["j_top"] = 'AND_G' - query["f2"] = 'longdesc' - query["o2"] = 'changedafter' - query["v2"] = since.strftime("%Y-%m-%d-%H:%M:%S") - query["f3"] = 'longdesc' - query["o3"] = 'changedbefore' + query['f1'] = 'longdesc' + query['o1'] = 'casesubstring' + query['v1'] = pattern + query['query_format'] = 'advanced' + query['include_fields'] = BzIssue.INCLUDE_FIELDS + query['j_top'] = 'AND_G' + query['f2'] = 'longdesc' + query['o2'] = 'changedafter' + query['v2'] = since.strftime('%Y-%m-%d-%H:%M:%S') + query['f3'] = 'longdesc' + query['o3'] = 'changedbefore' if until: - query["v3"] = until.strftime("%Y-%m-%d-%H:%M:%S") + query['v3'] = until.strftime('%Y-%m-%d-%H:%M:%S') else: - query["v3"] = 'Now' + query['v3'] = 'Now' for result in self._pybz_bugzilla.query(query): - if 'bugzilla-only-ids' in regzbot._TESTING and result.id not in regzbot._TESTING['bugzilla-only-ids']: + if ( + 'bugzilla-only-ids' in regzbot._TESTING + and result.id not in regzbot._TESTING['bugzilla-only-ids'] + ): continue yield BzPossibleSearchHit(BzIssue(self, result), pattern, since) def updated_issues(self, since, until=None): if until: - logger.debug("[bugzilla] %s: retrieving list of issues updated between '%s' and '%s'", - self.web_url[8:], since, until) + logger.debug( + "[bugzilla] %s: retrieving list of issues updated between '%s' and '%s'", + self.web_url[8:], + since, + until, + ) else: - logger.debug("[bugzilla] %s: retrieving list of issues updated since '%s'", self.web_url[8:], since) + logger.debug( + "[bugzilla] %s: retrieving list of issues updated since '%s'", + self.web_url[8:], + since, + ) query = self._pybz_bugzilla.build_query() - query["include_fields"] = BzIssue.INCLUDE_FIELDS - query["chfieldfrom"] = since.strftime("%Y-%m-%d-%H:%M:%S") + query['include_fields'] = BzIssue.INCLUDE_FIELDS + query['chfieldfrom'] = since.strftime('%Y-%m-%d-%H:%M:%S') if until: - query["chfieldto"] = until.strftime("%Y-%m-%d-%H:%M:%S") + query['chfieldto'] = until.strftime('%Y-%m-%d-%H:%M:%S') else: - query["chfieldto"] = 'Now' + query['chfieldto'] = 'Now' for result in self._pybz_bugzilla.query(query): - if 'bugzilla-only-ids' in regzbot._TESTING and result.id not in regzbot._TESTING['bugzilla-only-ids']: + if ( + 'bugzilla-only-ids' in regzbot._TESTING + and result.id not in regzbot._TESTING['bugzilla-only-ids'] + ): continue yield BzIssue(self, result) @@ -300,7 +353,9 @@ def supports_url(self, url_lowered, url_parsed): if url_lowered.startswith(self.serverurl): # there might be a comma or something else that might need to be removed: # https://lore.kernel.org/linux-wireless/170844096394.7.10031732457351764961.271076804@slmail.me/ - stripped = ''.join(filter(str.isdigit, url_parsed.query.removeprefix('id='))) + stripped = ''.join( + filter(str.isdigit, url_parsed.query.removeprefix('id=')) + ) if not stripped: return False return int(stripped) @@ -315,7 +370,7 @@ def thread(self, *, id=None, url=None, issue=None): if not id: id = self.supports_url(url) if not id: - logger.error("[bugzilla] cound not parse %s", url) + logger.error('[bugzilla] cound not parse %s', url) raise regzbot.RepDownloadError issue = self._bz_project.issue(id) return BzRepTrd(self, issue) @@ -354,23 +409,22 @@ def connect(instance_name, *, token=None): def __test(): # main issue used for testing (chosen without much thought): https://bugzilla.kernel.org/show_bug.cgi?id=217678 TESTDATA = { - 'project': "https://bugzilla.kernel.org", + 'project': 'https://bugzilla.kernel.org', 'issue': { 'total': 37, 'issue_id': 217678, - 'expected': ''' => {'created_at': '2023-07-17 17:44:27+00:00', 'message': '', 'realname': 'hq.dev+kernel', 'state': 'RESOLVED', 'summary': 'Unexplainable packet drop starting at v6.4', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678'}''' + 'expected': """ => {'created_at': '2023-07-17 17:44:27+00:00', 'message': '', 'realname': 'hq.dev+kernel', 'state': 'RESOLVED', 'summary': 'Unexplainable packet drop starting at v6.4', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678'}""", }, 'comments_recent': { 'since': datetime.datetime.fromisoformat('2023-10-17 04:39:50+00:00'), - 'expected': ''' => {'created_at': '2023-10-17 04:39:55+00:00', 'message': 'It is currently in next-queue. Since 6.6.-rc6 is already out, I hope it makes i…', 'realname': 'Tirthendu Sarkar', 'summary': 'bugzilla.kernel.org, issue 217678: new comment (#33)', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678#c33'}''' + 'expected': """ => {'created_at': '2023-10-17 04:39:55+00:00', 'message': 'It is currently in next-queue. Since 6.6.-rc6 is already out, I hope it makes i…', 'realname': 'Tirthendu Sarkar', 'summary': 'bugzilla.kernel.org, issue 217678: new comment (#33)', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678#c33'}""", }, 'commits_recent': { 'count': None, 'since': datetime.datetime.fromisoformat('2023-09-29 11:21:10+00:00'), - 'expected': ''' => {'created_at': '2023-09-29 11:21:20+00:00', 'message': 'Created attachment 305161 Patch with temp fix and debug prints Hi, Thanks for…', 'realname': 'Tirthendu Sarkar', 'summary': 'bugzilla.kernel.org, issue 217678: new comment (#27) with patch', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678#c27'}''', - 'patchkind': 3 + 'expected': """ => {'created_at': '2023-09-29 11:21:20+00:00', 'message': 'Created attachment 305161 Patch with temp fix and debug prints Hi, Thanks for…', 'realname': 'Tirthendu Sarkar', 'summary': 'bugzilla.kernel.org, issue 217678: new comment (#27) with patch', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678#c27'}""", + 'patchkind': 3, }, - 'search_since': { 'pattern': 'd42b1c47570eb2ed818dc3fe94b2678124af109d', 'date': datetime.datetime.fromisoformat('2023-07-08 00:00:00+00:00'), @@ -380,9 +434,9 @@ def __test(): 'pattern': 'd42b1c47570eb2ed818dc3fe94b2678124af109d', 'total': 2, 'since': datetime.datetime.fromisoformat('2023-07-18 03:40:27+00:00'), - 'expected': ''' => {'created_at': '2023-07-18 03:40:27+00:00', 'message': '(In reply to hq.dev+kernel from comment #4) > Created attachment 304648 [detail…', 'realname': 'Bagas Sanjaya', 'summary': 'bugzilla.kernel.org, issue 217678: new comment (#7)', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678#c7'}''' + 'expected': """ => {'created_at': '2023-07-18 03:40:27+00:00', 'message': '(In reply to hq.dev+kernel from comment #4) > Created attachment 304648 [detail…', 'realname': 'Bagas Sanjaya', 'summary': 'bugzilla.kernel.org, issue 217678: new comment (#7)', 'username': '', 'web_url': 'https://bugzilla.kernel.org/show_bug.cgi?id=217678#c7'}""", }, - 'search_days_updated': 3 + 'search_days_updated': 3, } def _testing_check_result(kind, value, expected): @@ -393,9 +447,12 @@ def _testing_check_result(kind, value, expected): print(" %s (unknown, apparently '%s')" % (kind, value)) return else: - print('\n%s: mismatch; expected vs retrieved view:\n%s\n%s' % (kind, expected, value)) + print( + '\n%s: mismatch; expected vs retrieved view:\n%s\n%s' + % (kind, expected, value) + ) if len(sys.argv) < 3 or sys.argv[2] != '--warn': - print(" Aborting.") + print(' Aborting.') sys.exit(1) # = setup = @@ -412,66 +469,104 @@ def _testing_check_result(kind, value, expected): project = instance.project() # = go = - print("Checking basic issue:", flush=True, end='') + print('Checking basic issue:', flush=True, end='') issue = project.issue(TESTDATA['issue']['issue_id']) _testing_check_result('data', str(issue), TESTDATA['issue']['expected']) - _testing_check_result('total', len(list(issue.activities())), - TESTDATA['issue']['total']) - print("; succeeded.") + _testing_check_result( + 'total', len(list(issue.activities())), TESTDATA['issue']['total'] + ) + print('; succeeded.') - print("Checking a comment:", flush=True, end='') + print('Checking a comment:', flush=True, end='') for comment in issue.activities(since=TESTDATA['comments_recent']['since']): - _testing_check_result('firsthit', str(comment), TESTDATA['comments_recent']['expected']) + _testing_check_result( + 'firsthit', str(comment), TESTDATA['comments_recent']['expected'] + ) break - print("; succeeded.") + print('; succeeded.') - print("Checking a commit:", flush=True, end='') + print('Checking a commit:', flush=True, end='') for commit in issue.activities(since=TESTDATA['commits_recent']['since']): - _testing_check_result('firsthit, ', str(commit), TESTDATA['commits_recent']['expected']) - _testing_check_result('patchkind of firsthit', commit.patchkind, TESTDATA['commits_recent']['patchkind']) + _testing_check_result( + 'firsthit, ', str(commit), TESTDATA['commits_recent']['expected'] + ) + _testing_check_result( + 'patchkind of firsthit', + commit.patchkind, + TESTDATA['commits_recent']['patchkind'], + ) break - print("; succeeded.") + print('; succeeded.') if 'search_since' in TESTDATA: - print("Checking search:", flush=True, end='') + print('Checking search:', flush=True, end='') results_search_broad = [] - for result in project.search(TESTDATA['search_since']['pattern'], datetime.datetime.fromisoformat('2020-01-01T00:00:00.00Z')): + for result in project.search( + TESTDATA['search_since']['pattern'], + datetime.datetime.fromisoformat('2020-01-01T00:00:00.00Z'), + ): for hit in result._hits(): results_search_broad.append(hit) results_search_narrow = [] - for result in project.search(TESTDATA['search_since']['pattern'], TESTDATA['search_since']['date']): + for result in project.search( + TESTDATA['search_since']['pattern'], TESTDATA['search_since']['date'] + ): for hit in result._hits(): results_search_narrow.append(hit) - _testing_check_result('total', len(results_search_broad), TESTDATA['search_since']['total']) - _testing_check_result('difference', len(results_search_broad) - len(results_search_narrow), 1) - print("; succeeded.") + _testing_check_result( + 'total', len(results_search_broad), TESTDATA['search_since']['total'] + ) + _testing_check_result( + 'difference', len(results_search_broad) - len(results_search_narrow), 1 + ) + print('; succeeded.') if 'search_comment' in TESTDATA: - print("Checking search (pattern in comment):", flush=True, end='') + print('Checking search (pattern in comment):', flush=True, end='') results_search_comments = [] - for result in project.search(TESTDATA['search_comment']['pattern'], since=TESTDATA['search_comment']['since']): + for result in project.search( + TESTDATA['search_comment']['pattern'], + since=TESTDATA['search_comment']['since'], + ): for hit in result._hits(): results_search_comments.append(hit) - _testing_check_result('firsthit', str(results_search_comments[0]), TESTDATA['search_comment']['expected']) - _testing_check_result('total', len(results_search_comments), TESTDATA['search_comment']['total']) - print("; succeeded.") + _testing_check_result( + 'firsthit', + str(results_search_comments[0]), + TESTDATA['search_comment']['expected'], + ) + _testing_check_result( + 'total', len(results_search_comments), TESTDATA['search_comment']['total'] + ) + print('; succeeded.') if 'search_issue' in TESTDATA: - print("Checking search (pattern in issue):", flush=True, end='') + print('Checking search (pattern in issue):', flush=True, end='') results_search_issue = [] - for result in project.search(TESTDATA['search_issue']['pattern'], since=TESTDATA['search_issue']['since']): + for result in project.search( + TESTDATA['search_issue']['pattern'], since=TESTDATA['search_issue']['since'] + ): for hit in result._hits(): results_search_issue.append(hit) - _testing_check_result('firsthit', str(results_search_issue[0]), TESTDATA['search_issue']['expected']) - _testing_check_result('total', len(results_search_issue), TESTDATA['search_issue']['total']) - print("; succeeded.") - - print('All issues updated between %s and 7 days ago:' % TESTDATA['search_days_updated']) + _testing_check_result( + 'firsthit', + str(results_search_issue[0]), + TESTDATA['search_issue']['expected'], + ) + _testing_check_result( + 'total', len(results_search_issue), TESTDATA['search_issue']['total'] + ) + print('; succeeded.') + + print( + 'All issues updated between %s and 7 days ago:' + % TESTDATA['search_days_updated'] + ) until = datetime.datetime.now() - datetime.timedelta(days=7) since = until - datetime.timedelta(days=TESTDATA['search_days_updated']) for issue in project.updated_issues(since, until=until): print(issue.web_url, issue.summary[0:80]) -if __name__ == "__main__": +if __name__ == '__main__': __test() diff --git a/regzbot/_repsources/_github.py b/regzbot/_repsources/_github.py index 70258ac..07068a8 100644 --- a/regzbot/_repsources/_github.py +++ b/regzbot/_repsources/_github.py @@ -13,17 +13,19 @@ from regzbot import PatchKind import regzbot._repsources._trackers -if __name__ != "__main__": +if __name__ != '__main__': import regzbot + logger = regzbot.logger else: import logging + logger = logging if False: # if True: logger.basicConfig(level=logging.DEBUG) - logging.getLogger("urllib3").setLevel(logging.WARNING) - logging.getLogger("github").setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.WARNING) + logging.getLogger('github').setLevel(logging.WARNING) _CACHE_INSTANCES = {} _CACHE_PROJECTS = {} @@ -61,14 +63,17 @@ def __init__(self, gh_issue, ghpy_event, *, comment_number=None): self.summary = 'A commit referenced this issue' # there must be a better way to access this, but I failed to find one :/ self.web_url = ghpy_event.commit_url.replace( - 'api.github.com/repos/', 'github.com/').replace('/commits/', '/commit/') + 'api.github.com/repos/', 'github.com/' + ).replace('/commits/', '/commit/') elif ghpy_event.event == 'closed': self.message = '' self.summary = 'Status is now: closed' # there must be a better way to access this, but I failed to find one :/ self.web_url = gh_issue.web_url else: - logger.critical('[github] GhActivity called with an unknown event; aborting.') + logger.critical( + '[github] GhActivity called with an unknown event; aborting.' + ) sys.exit(1) @cached_property @@ -80,7 +85,7 @@ def patchkind(self): return PatchKind.getby_commit_header(commit.commit.message) -class GhInstance(): +class GhInstance: def __init__(self, instance_name, token): if instance_name != 'github.com': raise NotImplementedError @@ -93,7 +98,9 @@ def project(self, project_name): logger.debug('[github] github.com: opening project %s', project_name) if len(_CACHE_PROJECTS) > 12: del _CACHE_PROJECTS[(next(iter(_CACHE_PROJECTS)))] - _CACHE_PROJECTS[project_name] = GhProject(self, self._ghpy_instance.get_repo(project_name)) + _CACHE_PROJECTS[project_name] = GhProject( + self, self._ghpy_instance.get_repo(project_name) + ) return _CACHE_PROJECTS[project_name] def search_issues(self, pattern): @@ -120,7 +127,7 @@ def __init__(self, gh_project, ghpy_issue): def _activities(self): activities = [] activities.append(GhActivity(self, None)) - logger.debug("[github] %s: retrieving events", self.web_url[8:]) + logger.debug('[github] %s: retrieving events', self.web_url[8:]) comment_count = 0 for event in self._ghpy_issue.get_timeline(): # ignore 'mentioned' and 'subscribed'; also 'cross-referenced' for @@ -147,12 +154,16 @@ def activities(self, *, since=None, until=None): def comments(self, since): # pygithub get_events for issues only allows to retrieve all events; to reduce the network load thus first # check only the latest comments, as that is possible with pygithub; - logger.debug("[github] %s: retrieving comments submitted since %s", self.web_url[8:], since) + logger.debug( + '[github] %s: retrieving comments submitted since %s', + self.web_url[8:], + since, + ) for comment in self._ghpy_issue.comments(since=since): yield comment -class GhProject(): +class GhProject: def __init__(self, gh_instance, ghpy_project): self.gh_instance = gh_instance self._ghpy_project = ghpy_project @@ -182,14 +193,20 @@ def issue(self, *, id=None, url=None): def search(self, pattern, since): search_string = ['is:issue'] search_string.append('repo:%s' % self.name) - search_string.append('updated:>=%s' % (since.strftime("%Y-%m-%d"))) + search_string.append('updated:>=%s' % (since.strftime('%Y-%m-%d'))) search_string.append(pattern) for issue in self.gh_instance.search_issues(' '.join(search_string)): yield GhPossibleSearchHit(GhIssue(self, issue), pattern, since) def updated_issues(self, since): - logger.debug('[github] %s: retrieving issues updated since %s', self.web_url[8:], since, ) - for issue in self._ghpy_project.get_issues(state='all', sort='updated', since=since): + logger.debug( + '[github] %s: retrieving issues updated since %s', + self.web_url[8:], + since, + ) + for issue in self._ghpy_project.get_issues( + state='all', sort='updated', since=since + ): # skip merge requests if issue.pull_request: continue @@ -203,7 +220,9 @@ def __init__(self, gh_issue, pattern, since): super().__init__(gh_issue.id, pattern, since) def is_hit_in_submission(self): - if self.issue.created_at >= self._since and self._check_pattern(self.issue.message): + if self.issue.created_at >= self._since and self._check_pattern( + self.issue.message + ): return self.issue def matching_activities(self): @@ -246,12 +265,15 @@ def __init__(self, *args, **kwargs): def _gh_project(self): parsed_url = urllib.parse.urlparse(self.serverurl) instance_name = parsed_url.netloc - project_name = parsed_url.path.strip("/") + project_name = parsed_url.path.strip('/') instance = connect(instance_name) project = instance.project(project_name) if self.serverurl != project.web_url: - logger.error("[github] self.serverurl (%s) and project.web_url (%s) do not match" % (self.serverurl, project.web_url) ) + logger.error( + '[github] self.serverurl (%s) and project.web_url (%s) do not match' + % (self.serverurl, project.web_url) + ) raise AssertionError return project @@ -314,16 +336,16 @@ def __test(): 'issue': { 'total': 22, 'issue_id': 4455, - 'expected': ''' => {'created_at': '2023-07-05 07:10:18+00:00', 'message': 'Commit 05cbb391aa8d2fd16c23bd43b9f1845e0a6dc333 introduced a regression. Som…', 'realname': 'Sam Edwards', 'state': 'closed', 'summary': '[BUG] [Regression] Intel hda-dai doesn't recover gracefully from underruns; aud…', 'username': 'CFSworks', 'web_url': 'https://github.com/thesofproject/linux/issues/4455'}''' + 'expected': """ => {'created_at': '2023-07-05 07:10:18+00:00', 'message': 'Commit 05cbb391aa8d2fd16c23bd43b9f1845e0a6dc333 introduced a regression. Som…', 'realname': 'Sam Edwards', 'state': 'closed', 'summary': '[BUG] [Regression] Intel hda-dai doesn't recover gracefully from underruns; aud…', 'username': 'CFSworks', 'web_url': 'https://github.com/thesofproject/linux/issues/4455'}""", }, 'comments_recent': { 'since': datetime.datetime.fromisoformat('2023-07-23T03:17:13.35Z'), - 'expected': ''' => {'created_at': '2023-07-24 15:45:04+00:00', 'message': '@CFSworks I have updated the PR now. Could you please help check if it applies …', 'realname': 'Ranjani Sridharan', 'summary': 'github.com/thesofproject/linux/issues/4455, issue 4455: new comment (#10)', 'username': 'ranj063', 'web_url': 'https://github.com/thesofproject/linux/issues/4455#issuecomment-1648167580'}''' + 'expected': """ => {'created_at': '2023-07-24 15:45:04+00:00', 'message': '@CFSworks I have updated the PR now. Could you please help check if it applies …', 'realname': 'Ranjani Sridharan', 'summary': 'github.com/thesofproject/linux/issues/4455, issue 4455: new comment (#10)', 'username': 'ranj063', 'web_url': 'https://github.com/thesofproject/linux/issues/4455#issuecomment-1648167580'}""", }, 'commits_recent': { 'since': datetime.datetime.fromisoformat('2023-07-24 20:10:17+00:00'), - 'expected': ''' => {'created_at': '2023-07-24 20:10:18+00:00', 'message': '', 'realname': 'Ranjani Sridharan', 'summary': 'A commit referenced this issue', 'username': 'ranj063', 'web_url': 'https://github.com/ranj063/linux/commit/3dfc905dbeb49cb5363762ad133ee4478e1b43c…'}''', - 'patchkind': 7 + 'expected': """ => {'created_at': '2023-07-24 20:10:18+00:00', 'message': '', 'realname': 'Ranjani Sridharan', 'summary': 'A commit referenced this issue', 'username': 'ranj063', 'web_url': 'https://github.com/ranj063/linux/commit/3dfc905dbeb49cb5363762ad133ee4478e1b43c…'}""", + 'patchkind': 7, }, 'search_since': { 'pattern': 'https://bugzilla.kernel.org/show_bug.cgi.*id=217673', @@ -334,15 +356,15 @@ def __test(): 'pattern': 'The comments in https://bugzilla.kernel.org/show_bug.cgi.*id=217673', 'total': 1, 'since': datetime.datetime.fromisoformat('2023-07-21T10:25:00.00Z'), - 'expected': ''' => {'created_at': '2023-07-21 10:28:54+00:00', 'message': 'I did a potentially duplicated new bug at https://github.com/thesofproject/linu…', 'realname': 'Kai Vehmanen', 'summary': 'github.com/thesofproject/linux/issues/4455, issue 4455: new comment (#7)', 'username': 'kv2019i', 'web_url': 'https://github.com/thesofproject/linux/issues/4455#issuecomment-1645363342'}''' + 'expected': """ => {'created_at': '2023-07-21 10:28:54+00:00', 'message': 'I did a potentially duplicated new bug at https://github.com/thesofproject/linu…', 'realname': 'Kai Vehmanen', 'summary': 'github.com/thesofproject/linux/issues/4455, issue 4455: new comment (#7)', 'username': 'kv2019i', 'web_url': 'https://github.com/thesofproject/linux/issues/4455#issuecomment-1645363342'}""", }, 'search_issue': { 'pattern': 'Filing an issue to track https://bugzilla.kernel.org/show_bug.cgi.*id=217673', 'since': datetime.datetime.fromisoformat('2023-07-21T10:22:00.00Z'), 'total': 1, - 'expected': ''' => {'created_at': '2023-07-21 10:23:48+00:00', 'message': 'Filing an issue to track https://bugzilla.kernel.org/show_bug.cgi?id=217673 …', 'realname': 'Kai Vehmanen', 'summary': 'github.com/thesofproject/linux/issues/4482, issue 4482: submission', 'username': 'kv2019i', 'web_url': 'https://github.com/thesofproject/linux/issues/4482'}''' + 'expected': """ => {'created_at': '2023-07-21 10:23:48+00:00', 'message': 'Filing an issue to track https://bugzilla.kernel.org/show_bug.cgi?id=217673 …', 'realname': 'Kai Vehmanen', 'summary': 'github.com/thesofproject/linux/issues/4482, issue 4482: submission', 'username': 'kv2019i', 'web_url': 'https://github.com/thesofproject/linux/issues/4482'}""", }, - 'search_days_updated': 4 + 'search_days_updated': 4, } def _testing_check_result(kind, value, expected): @@ -353,9 +375,12 @@ def _testing_check_result(kind, value, expected): print(" %s (unknown, apparently '%s')" % (kind, value)) return else: - print('\n%s: mismatch; expected vs retrieved view:\n%s\n%s' % (kind, expected, value)) + print( + '\n%s: mismatch; expected vs retrieved view:\n%s\n%s' + % (kind, expected, value) + ) if len(sys.argv) < 3 or sys.argv[2] != '--warn': - print(" Aborting.") + print(' Aborting.') sys.exit(1) # = setup = @@ -369,70 +394,107 @@ def _testing_check_result(kind, value, expected): sys.exit(1) parsed_url = urllib.parse.urlparse(TESTDATA['project']) - name_project = parsed_url.path.strip("/") + name_project = parsed_url.path.strip('/') instance = connect('github.com', token=sys.argv[1]) project = instance.project(name_project) # = go = - print("Checking basic issue:", flush=True, end='') + print('Checking basic issue:', flush=True, end='') issue = project.issue(id=TESTDATA['issue']['issue_id']) _testing_check_result('data', str(issue), TESTDATA['issue']['expected']) - _testing_check_result('total', len(list(issue.activities())), - TESTDATA['issue']['total']) - print("; succeeded.") + _testing_check_result( + 'total', len(list(issue.activities())), TESTDATA['issue']['total'] + ) + print('; succeeded.') - print("Checking a comment:", flush=True, end='') + print('Checking a comment:', flush=True, end='') for comment in issue.activities(since=TESTDATA['comments_recent']['since']): - _testing_check_result('firsthit', str(comment), TESTDATA['comments_recent']['expected']) + _testing_check_result( + 'firsthit', str(comment), TESTDATA['comments_recent']['expected'] + ) break - print("; succeeded.") + print('; succeeded.') - print("Checking a commit:", flush=True, end='') + print('Checking a commit:', flush=True, end='') for commit in issue.activities(since=TESTDATA['commits_recent']['since']): - _testing_check_result('firsthit', str(commit), TESTDATA['commits_recent']['expected']) - _testing_check_result('patchkind of firsthit', commit.patchkind, TESTDATA['commits_recent']['patchkind']) + _testing_check_result( + 'firsthit', str(commit), TESTDATA['commits_recent']['expected'] + ) + _testing_check_result( + 'patchkind of firsthit', + commit.patchkind, + TESTDATA['commits_recent']['patchkind'], + ) break - print("; succeeded.") + print('; succeeded.') if 'search_since' in TESTDATA: - print("Checking search:", flush=True, end='') + print('Checking search:', flush=True, end='') results_search_broad = [] - for result in project.search(TESTDATA['search_since']['pattern'], datetime.datetime.fromisoformat('2020-01-01T00:00:00.00Z')): + for result in project.search( + TESTDATA['search_since']['pattern'], + datetime.datetime.fromisoformat('2020-01-01T00:00:00.00Z'), + ): for hit in result._hits(): results_search_broad.append(hit) results_search_narrow = [] - for result in project.search(TESTDATA['search_since']['pattern'], TESTDATA['search_since']['date']): + for result in project.search( + TESTDATA['search_since']['pattern'], TESTDATA['search_since']['date'] + ): for hit in result._hits(): results_search_narrow.append(hit) - _testing_check_result('total', len(results_search_broad), TESTDATA['search_since']['total']) - _testing_check_result('difference', len(results_search_broad) - len(results_search_narrow), 1) - print("; succeeded.") + _testing_check_result( + 'total', len(results_search_broad), TESTDATA['search_since']['total'] + ) + _testing_check_result( + 'difference', len(results_search_broad) - len(results_search_narrow), 1 + ) + print('; succeeded.') if 'search_comment' in TESTDATA: - print("Checking search (pattern in comment):", flush=True, end='') + print('Checking search (pattern in comment):', flush=True, end='') results_search_comments = [] - for result in project.search(TESTDATA['search_comment']['pattern'], since=TESTDATA['search_comment']['since']): + for result in project.search( + TESTDATA['search_comment']['pattern'], + since=TESTDATA['search_comment']['since'], + ): for hit in result._hits(): results_search_comments.append(hit) - _testing_check_result('firsthit', str(results_search_comments[0]), TESTDATA['search_comment']['expected']) - _testing_check_result('total', len(results_search_comments), TESTDATA['search_comment']['total']) - print("; succeeded.") + _testing_check_result( + 'firsthit', + str(results_search_comments[0]), + TESTDATA['search_comment']['expected'], + ) + _testing_check_result( + 'total', len(results_search_comments), TESTDATA['search_comment']['total'] + ) + print('; succeeded.') if 'search_issue' in TESTDATA: - print("Checking search (pattern in issue):", flush=True, end='') + print('Checking search (pattern in issue):', flush=True, end='') results_search_issue = [] - for result in project.search(TESTDATA['search_issue']['pattern'], since=TESTDATA['search_issue']['since']): + for result in project.search( + TESTDATA['search_issue']['pattern'], since=TESTDATA['search_issue']['since'] + ): for hit in result._hits(): results_search_issue.append(hit) - _testing_check_result('firsthit', str(results_search_issue[0]), TESTDATA['search_issue']['expected']) - _testing_check_result('total', len(results_search_issue), TESTDATA['search_issue']['total']) - print("; succeeded.") + _testing_check_result( + 'firsthit', + str(results_search_issue[0]), + TESTDATA['search_issue']['expected'], + ) + _testing_check_result( + 'total', len(results_search_issue), TESTDATA['search_issue']['total'] + ) + print('; succeeded.') print('All issues updated in the past %s days:' % TESTDATA['search_days_updated']) - since = datetime.datetime.now() - datetime.timedelta(days=TESTDATA['search_days_updated']) + since = datetime.datetime.now() - datetime.timedelta( + days=TESTDATA['search_days_updated'] + ) for issue in project.updated_issues(since): print(issue.web_url, issue.summary[0:80]) -if __name__ == "__main__": +if __name__ == '__main__': __test() diff --git a/regzbot/_repsources/_gitlab.py b/regzbot/_repsources/_gitlab.py index 9881241..2065d06 100644 --- a/regzbot/_repsources/_gitlab.py +++ b/regzbot/_repsources/_gitlab.py @@ -13,23 +13,27 @@ from regzbot import PatchKind import regzbot._repsources._trackers -if __name__ != "__main__": +if __name__ != '__main__': import regzbot + logger = regzbot.logger else: import logging + logger = logging if False: # if True: logger.basicConfig(level=logging.DEBUG) - logging.getLogger("urllib3").setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.WARNING) _CACHE_INSTANCES = {} _CACHE_PROJECTS = {} class GlActivity(regzbot._repsources._trackers._activity): - def __init__(self, gl_issue, *, comment=None, comment_number=None, commit=None, event=None): + def __init__( + self, gl_issue, *, comment=None, comment_number=None, commit=None, event=None + ): self.id = None self.patchkind = 0 summary_prefix = '%s, issue %s' % (gl_issue.gl_project.longname, gl_issue.id) @@ -48,24 +52,32 @@ def __init__(self, gl_issue, *, comment=None, comment_number=None, commit=None, self.realname = comment.author['name'] if commit: self.patchkind = PatchKind.getby_commit_header(commit.message) - self.summary = '%s: gitlab noticed a commit referencing this issue' % summary_prefix + self.summary = ( + '%s: gitlab noticed a commit referencing this issue' + % summary_prefix + ) else: - self.summary = '%s: new comment (#%s)' % (summary_prefix, comment_number) + self.summary = '%s: new comment (#%s)' % ( + summary_prefix, + comment_number, + ) self.username = comment.author['username'] self.web_url = '%s#note_%s' % (gl_issue.web_url, comment.id) elif event: self.created_at = datetime.datetime.fromisoformat(event.created_at) self.message = '' self.realname = event.user['name'] - self.summary = "%s: state changed to: %s" % (summary_prefix, event.state) + self.summary = '%s: state changed to: %s' % (summary_prefix, event.state) self.username = event.user['username'] self.web_url = gl_issue.web_url else: - logger.critical('[gitlab] GlActivity called with something unknown; aborting.') + logger.critical( + '[gitlab] GlActivity called with something unknown; aborting.' + ) sys.exit(1) -class GlInstance(): +class GlInstance: def __init__(self, netloc, token): logger.debug('[gitlab] %s: connecting', netloc) self.web_url = 'https://%s' % netloc @@ -77,7 +89,9 @@ def project(self, project_name): logger.debug('[gitlab] %s: opening project %s', self.web_url, project_name) if len(_CACHE_PROJECTS) > 12: del _CACHE_PROJECTS[(next(iter(_CACHE_PROJECTS)))] - _CACHE_PROJECTS[project_name] = GlProject(self, self._glpy_instance.projects.get(project_name)) + _CACHE_PROJECTS[project_name] = GlProject( + self, self._glpy_instance.projects.get(project_name) + ) return _CACHE_PROJECTS[project_name] @@ -101,7 +115,7 @@ def _get_commit(comment): # ohh boy, there must be a better way to do this, but I looked hard and did not find one :-/ if type(comment.body) is set and comment.body[0] == 'mentioned in commit ': commit_def = comment.body[1] - elif comment.body.startswith("mentioned in commit "): + elif comment.body.startswith('mentioned in commit '): commit_def = comment.body[20:] else: return None @@ -109,7 +123,10 @@ def _get_commit(comment): if '@' in commit_def: projectname, hexsha = commit_def.split('@') if '/' not in projectname: - projectname = '%s/%s' % (self.gl_project.namespace_path, projectname) + projectname = '%s/%s' % ( + self.gl_project.namespace_path, + projectname, + ) gl_instance = self.gl_project.gl_instance project = gl_instance.project(projectname) else: @@ -119,7 +136,11 @@ def _get_commit(comment): try: return project.commit(hexsha) except gitlab.exceptions.GitlabGetError: - logger.debug('[gitlab] %s: ignoring commit %s, download failed', self.web_url[8:], hexsha) + logger.debug( + '[gitlab] %s: ignoring commit %s, download failed', + self.web_url[8:], + hexsha, + ) return None # walk comments (and thus commits) first, then events; @@ -136,8 +157,14 @@ def _get_commit(comment): continue if not commit: comment_counter += 1 - activities.append(GlActivity(self, comment=comment, comment_number=comment_counter, commit=commit)) - for event in self._glpy_issue.resourcestateevents.list(sort='asc', iterator=True): + activities.append( + GlActivity( + self, comment=comment, comment_number=comment_counter, commit=commit + ) + ) + for event in self._glpy_issue.resourcestateevents.list( + sort='asc', iterator=True + ): activities.append(GlActivity(self, event=event)) # sort @@ -153,7 +180,7 @@ def activities(self, *, since=None, until=None): yield activity -class GlProject(): +class GlProject: def __init__(self, gl_instance, glpy_project): self.gl_instance = gl_instance self._glpy_project = glpy_project @@ -183,25 +210,57 @@ def search(self, pattern, since): additional_msg = '' if since: additional_msg = ' submitted after %s' % since - logger.debug("[gitlab] %s: searching for '%s' in issues%s", self.web_url[8:], pattern, additional_msg) - for searchresult in self._glpy_project.search(gitlab.const.SearchScope.ISSUES, pattern, order_by='updated_at', sort='asc', iterator=True): + logger.debug( + "[gitlab] %s: searching for '%s' in issues%s", + self.web_url[8:], + pattern, + additional_msg, + ) + for searchresult in self._glpy_project.search( + gitlab.const.SearchScope.ISSUES, + pattern, + order_by='updated_at', + sort='asc', + iterator=True, + ): if datetime.datetime.fromisoformat(searchresult['created_at']) < since: continue - yield GlPossibleSearchHit(self, searchresult['iid'], pattern, since, is_hit_in_submission=True) - logger.debug("[gitlab] %s: searching for '%s' in comments%s", self.web_url[8:], pattern, additional_msg) - for searchresult in self._glpy_project.search(gitlab.const.SearchScope.PROJECT_NOTES, pattern, order_by='updated_at', sort='asc', iterator=True): + yield GlPossibleSearchHit( + self, searchresult['iid'], pattern, since, is_hit_in_submission=True + ) + logger.debug( + "[gitlab] %s: searching for '%s' in comments%s", + self.web_url[8:], + pattern, + additional_msg, + ) + for searchresult in self._glpy_project.search( + gitlab.const.SearchScope.PROJECT_NOTES, + pattern, + order_by='updated_at', + sort='asc', + iterator=True, + ): if datetime.datetime.fromisoformat(searchresult['created_at']) < since: continue - yield GlPossibleSearchHit(self, searchresult['noteable_iid'], pattern, since) + yield GlPossibleSearchHit( + self, searchresult['noteable_iid'], pattern, since + ) def updated_issues(self, since): - logger.debug('[gitlab] %s: retrieving issues updated since %s', self.web_url[8:], since) - for issue in self._glpy_project.issues.list(iterator=True, order_by='updated_at', updated_after=since): + logger.debug( + '[gitlab] %s: retrieving issues updated since %s', self.web_url[8:], since + ) + for issue in self._glpy_project.issues.list( + iterator=True, order_by='updated_at', updated_after=since + ): yield GlIssue(self, issue) class GlPossibleSearchHit(regzbot._repsources._trackers._possible_search_result): - def __init__(self, gl_project, issue_id, pattern, since, *, is_hit_in_submission=False): + def __init__( + self, gl_project, issue_id, pattern, since, *, is_hit_in_submission=False + ): self._gl_project = gl_project self._issue = None self._hit_in_submission = is_hit_in_submission @@ -242,7 +301,7 @@ def __init__(self, *args, **kwargs): def _gl_project(self): parsed_url = urllib.parse.urlparse(self.serverurl) instance_name = parsed_url.netloc - project_name = parsed_url.path.strip("/") + project_name = parsed_url.path.strip('/') instance = connect(instance_name) project = instance.project(project_name) @@ -307,16 +366,16 @@ def __test(): 'issue': { 'total': 16, 'issue_id': 8357, - 'expected': ''' => {'created_at': '2023-04-11 16:17:04.368000+00:00', 'message': 'I'm working on a "hatch/jinlon" Chromebook which is a Cometlake-U device, and h…', 'realname': 'Ross Zwisler', 'state': 'closed', 'summary': 'CML-U: external 5120x2160 monitor can't play video', 'username': 'zwisler', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/8357'}''' + 'expected': """ => {'created_at': '2023-04-11 16:17:04.368000+00:00', 'message': 'I'm working on a "hatch/jinlon" Chromebook which is a Cometlake-U device, and h…', 'realname': 'Ross Zwisler', 'state': 'closed', 'summary': 'CML-U: external 5120x2160 monitor can't play video', 'username': 'zwisler', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/8357'}""", }, 'comments_recent': { 'since': datetime.datetime.fromisoformat('2023-04-18T16:37:00.000Z'), - 'expected': ''' => {'created_at': '2023-04-18 16:37:48.523000+00:00', 'message': '[0001-drm-i915-Check-pipe-source-size-when-using-skl-scale.patch](/uploads/d3b7…', 'realname': 'Ville Syrjälä', 'summary': 'drm/intel, issue 8357: new comment (#4)', 'username': 'vsyrjala', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/8357#note_1873234'}''' + 'expected': """ => {'created_at': '2023-04-18 16:37:48.523000+00:00', 'message': '[0001-drm-i915-Check-pipe-source-size-when-using-skl-scale.patch](/uploads/d3b7…', 'realname': 'Ville Syrjälä', 'summary': 'drm/intel, issue 8357: new comment (#4)', 'username': 'vsyrjala', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/8357#note_1873234'}""", }, 'commits_recent': { 'since': datetime.datetime.fromisoformat('2023-05-06T00:00:00.000Z'), - 'expected': ''' => {'created_at': '2023-05-17 19:20:40.224000+00:00', 'message': 'mentioned in commit superm1/linux@74a03d3c8d895a7d137bb4be8e40cae886f5d973', 'realname': 'Ville Syrjälä', 'summary': 'drm/intel, issue 8357: gitlab noticed a commit referencing this issue', 'username': 'vsyrjala', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/8357#note_1912677'}''', - 'patchkind': 7 + 'expected': """ => {'created_at': '2023-05-17 19:20:40.224000+00:00', 'message': 'mentioned in commit superm1/linux@74a03d3c8d895a7d137bb4be8e40cae886f5d973', 'realname': 'Ville Syrjälä', 'summary': 'drm/intel, issue 8357: gitlab noticed a commit referencing this issue', 'username': 'vsyrjala', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/8357#note_1912677'}""", + 'patchkind': 7, }, 'search_since': { 'pattern': '805f04d42a6b5f4187935b43c9c39ae03ccfa761', @@ -327,15 +386,15 @@ def __test(): 'pattern': '805f04d42a6b5f4187935b43c9c39ae03ccfa761', 'total': 1, 'since': datetime.datetime.fromisoformat('2022-08-27 00:00:01+00:00'), - 'expected': ''' => {'created_at': '2022-08-27 13:26:12+00:00', 'message': 'After taking the twelve ehm 15 step program :D $ git bisect log - bad: [f2906a…', 'realname': 'JackCasual', 'summary': 'drm/intel, issue 6652: new comment (#6)', 'username': 'JackCasual', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/6652#note_1526397'}''' + 'expected': """ => {'created_at': '2022-08-27 13:26:12+00:00', 'message': 'After taking the twelve ehm 15 step program :D $ git bisect log - bad: [f2906a…', 'realname': 'JackCasual', 'summary': 'drm/intel, issue 6652: new comment (#6)', 'username': 'JackCasual', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/6652#note_1526397'}""", }, 'search_issue': { 'pattern': '805f04d42a6b5f4187935b43c9c39ae03ccfa761', 'since': datetime.datetime.fromisoformat('2022-08-26 00:00:01+00:00'), 'total': 2, - 'expected': ''' => {'created_at': '2022-08-26 04:24:15.380000+00:00', 'message': 'I have a new Framework Laptop with an i7-1280P and Xe graphics, running Debian …', 'realname': 'Brian Tarricone', 'summary': 'drm/intel, issue 6679: submission', 'username': 'kelnos', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/6679'}''' + 'expected': """ => {'created_at': '2022-08-26 04:24:15.380000+00:00', 'message': 'I have a new Framework Laptop with an i7-1280P and Xe graphics, running Debian …', 'realname': 'Brian Tarricone', 'summary': 'drm/intel, issue 6679: submission', 'username': 'kelnos', 'web_url': 'https://gitlab.freedesktop.org/drm/intel/-/issues/6679'}""", }, - 'search_days_updated': 1 + 'search_days_updated': 1, } def _testing_check_result(kind, value, expected): @@ -346,9 +405,12 @@ def _testing_check_result(kind, value, expected): print(" %s (unknown, apparently '%s')" % (kind, value)) return else: - print('\n%s: mismatch; expected vs retrieved view:\n%s\n%s' % (kind, expected, value)) + print( + '\n%s: mismatch; expected vs retrieved view:\n%s\n%s' + % (kind, expected, value) + ) if len(sys.argv) < 3 or sys.argv[2] != '--warn': - print(" Aborting.") + print(' Aborting.') sys.exit(1) # = setup = @@ -363,70 +425,107 @@ def _testing_check_result(kind, value, expected): parsed_url = urllib.parse.urlparse(TESTDATA['project']) name_instance = parsed_url.netloc - name_project = parsed_url.path.strip("/") + name_project = parsed_url.path.strip('/') instance = connect(name_instance, token=sys.argv[1]) project = instance.project(name_project) # = go = - print("Checking basic issue:", flush=True, end='') + print('Checking basic issue:', flush=True, end='') issue = project.issue(id=TESTDATA['issue']['issue_id']) _testing_check_result('data', str(issue), TESTDATA['issue']['expected']) - _testing_check_result('total', len(list(issue.activities())), - TESTDATA['issue']['total']) - print("; succeeded.") + _testing_check_result( + 'total', len(list(issue.activities())), TESTDATA['issue']['total'] + ) + print('; succeeded.') - print("Checking a comment:", flush=True, end='') + print('Checking a comment:', flush=True, end='') for comment in issue.activities(since=TESTDATA['comments_recent']['since']): - _testing_check_result('firsthit', str(comment), TESTDATA['comments_recent']['expected']) + _testing_check_result( + 'firsthit', str(comment), TESTDATA['comments_recent']['expected'] + ) break - print("; succeeded.") + print('; succeeded.') - print("Checking a commit:", flush=True, end='') + print('Checking a commit:', flush=True, end='') for commit in issue.activities(since=TESTDATA['commits_recent']['since']): - _testing_check_result('firsthit, ', str(commit), TESTDATA['commits_recent']['expected']) - _testing_check_result('patchkind of firsthit', commit.patchkind, TESTDATA['commits_recent']['patchkind']) + _testing_check_result( + 'firsthit, ', str(commit), TESTDATA['commits_recent']['expected'] + ) + _testing_check_result( + 'patchkind of firsthit', + commit.patchkind, + TESTDATA['commits_recent']['patchkind'], + ) break - print("; succeeded.") + print('; succeeded.') if 'search_since' in TESTDATA: - print("Checking search:", flush=True, end='') + print('Checking search:', flush=True, end='') results_search_broad = [] - for result in project.search(TESTDATA['search_since']['pattern'], datetime.datetime.fromisoformat('2020-01-01T00:00:00.00Z')): + for result in project.search( + TESTDATA['search_since']['pattern'], + datetime.datetime.fromisoformat('2020-01-01T00:00:00.00Z'), + ): for hit in result._hits(): results_search_broad.append(hit) results_search_narrow = [] - for result in project.search(TESTDATA['search_since']['pattern'], TESTDATA['search_since']['date']): + for result in project.search( + TESTDATA['search_since']['pattern'], TESTDATA['search_since']['date'] + ): for hit in result._hits(): results_search_narrow.append(hit) - _testing_check_result('total', len(results_search_broad), TESTDATA['search_since']['total']) - _testing_check_result('difference', len(results_search_broad) - len(results_search_narrow), 1) - print("; succeeded.") + _testing_check_result( + 'total', len(results_search_broad), TESTDATA['search_since']['total'] + ) + _testing_check_result( + 'difference', len(results_search_broad) - len(results_search_narrow), 1 + ) + print('; succeeded.') if 'search_comment' in TESTDATA: - print("Checking search (pattern in comment):", flush=True, end='') + print('Checking search (pattern in comment):', flush=True, end='') results_search_comments = [] - for result in project.search(TESTDATA['search_comment']['pattern'], since=TESTDATA['search_comment']['since']): + for result in project.search( + TESTDATA['search_comment']['pattern'], + since=TESTDATA['search_comment']['since'], + ): for hit in result._hits(): results_search_comments.append(hit) - _testing_check_result('firsthit', str(results_search_comments[0]), TESTDATA['search_comment']['expected']) - _testing_check_result('total', len(results_search_comments), TESTDATA['search_comment']['total']) - print("; succeeded.") + _testing_check_result( + 'firsthit', + str(results_search_comments[0]), + TESTDATA['search_comment']['expected'], + ) + _testing_check_result( + 'total', len(results_search_comments), TESTDATA['search_comment']['total'] + ) + print('; succeeded.') if 'search_issue' in TESTDATA: - print("Checking search (pattern in issue):", flush=True, end='') + print('Checking search (pattern in issue):', flush=True, end='') results_search_issue = [] - for result in project.search(TESTDATA['search_issue']['pattern'], since=TESTDATA['search_issue']['since']): + for result in project.search( + TESTDATA['search_issue']['pattern'], since=TESTDATA['search_issue']['since'] + ): for hit in result._hits(): results_search_issue.append(hit) - _testing_check_result('firsthit', str(results_search_issue[0]), TESTDATA['search_issue']['expected']) - _testing_check_result('total', len(results_search_issue), TESTDATA['search_issue']['total']) - print("; succeeded.") + _testing_check_result( + 'firsthit', + str(results_search_issue[0]), + TESTDATA['search_issue']['expected'], + ) + _testing_check_result( + 'total', len(results_search_issue), TESTDATA['search_issue']['total'] + ) + print('; succeeded.') print('All issues updated in the past %s days:' % TESTDATA['search_days_updated']) - since = datetime.datetime.now() - datetime.timedelta(days=TESTDATA['search_days_updated']) + since = datetime.datetime.now() - datetime.timedelta( + days=TESTDATA['search_days_updated'] + ) for issue in project.updated_issues(since): print(issue.web_url, issue.summary[0:80]) -if __name__ == "__main__": +if __name__ == '__main__': __test() diff --git a/regzbot/_repsources/_lore.py b/regzbot/_repsources/_lore.py index 04443fa..248646f 100644 --- a/regzbot/_repsources/_lore.py +++ b/regzbot/_repsources/_lore.py @@ -20,11 +20,13 @@ from regzbot import ReportThread from functools import cached_property -if __name__ != "__main__": +if __name__ != '__main__': import regzbot + logger = regzbot.logger else: import logging + logger = logging # if False: if True: @@ -34,7 +36,7 @@ _NNTP_CONNECTION = None -class LoreNntp(): +class LoreNntp: # without this, occasionally [as on 20210831] errors like "nntplib.NNTPDataError: line too long" occur; not sure, # might be a bug in the public-inbox code behind lore nntplib._MAXLINE = 65536 @@ -59,7 +61,9 @@ def _article(self, id, group): self.__init_connection(forced_reconnect=True) self._group(group) _, article = self._nntp_connection.article(id) - return email.message_from_bytes(b'\n'.join(article.lines), policy=email.policy.default) + return email.message_from_bytes( + b'\n'.join(article.lines), policy=email.policy.default + ) def _group(self, groupname): splitted = groupname.split('/', maxsplit=4) @@ -77,35 +81,43 @@ def _over(self, id_first, id_last): yield id, over -class LoreHttps(): +class LoreHttps: @staticmethod def download_thread(msgid, *, repsrc=None): if regzbot.is_running_citesting('offline'): import os + found_something = False for directory in regzbot._TESTING['emaildirs']: - filename = os.path.join(directory, "%s.regzbot" % msgid) + filename = os.path.join(directory, '%s.regzbot' % msgid) if not os.path.isfile(filename): continue if not found_something: found_something = True for mboxmsg in mailbox.mbox(filename): - yield email.message_from_bytes(mboxmsg.as_bytes(), policy=email.policy.default) + yield email.message_from_bytes( + mboxmsg.as_bytes(), policy=email.policy.default + ) if not found_something: raise regzbot.RepDownloadError else: with tempfile.NamedTemporaryFile() as tmpfile: url = 'https://lore.kernel.org/all/%s/t.mbox.gz' % msgid try: - logger.debug("[lore] downloading %s", url) + logger.debug('[lore] downloading %s', url) with urllib.request.urlopen(url) as response: with gzip.open(response) as uncompressed: shutil.copyfileobj(uncompressed, tmpfile) except urllib.error.HTTPError as err: - logger.critical('[lore] failed to download thread from %s: %s', url, err) + logger.critical( + '[lore] failed to download thread from %s: %s', url, err + ) raise regzbot.RepDownloadError for message in mailbox.mbox(tmpfile.name): - yield email.message_from_bytes(message.as_bytes(), policy=email.policy.default) + yield email.message_from_bytes( + message.as_bytes(), policy=email.policy.default + ) + # unused as of now # @@ -133,7 +145,7 @@ def download_thread(msgid, *, repsrc=None): # return email.message_from_string(tmpfile.read().decode('utf-8', errors='ignore'), policy=email.policy.default) -class LoActivity(): +class LoActivity: def __init__(self, lo_thread, msg): self.lo_thread = lo_thread self._msg = msg @@ -199,23 +211,39 @@ def recipients(self): # https://lore.kernel.org/all/20211005053239.3E8DEC4338F@smtp.codeaurora.org/raw # https://lore.kernel.org/all/20210925074531.10446-1-tomm.merciai@gmail.com/raw # related: https://bugs.python.org/issue39100 - logger.warning('Ignoring "%s" in %s due to and exception: "AttributeError: %s"', - field, self.validate_msgid(self._msg['message-id']), err) + logger.warning( + 'Ignoring "%s" in %s due to and exception: "AttributeError: %s"', + field, + self.validate_msgid(self._msg['message-id']), + err, + ) except ValueError as err: # Workaround for https://lore.kernel.org/all/1634261360.fed2opbgxw.astroid@bobo.none/raw # -> "ValueError: invalid arguments; address parts cannot contain CR or LF" - logger.warning('Ignoring "%s" in %s due to and exception: "ValueError: %s"', - field, self.validate_msgid(self._msg['message-id']), err) + logger.warning( + 'Ignoring "%s" in %s due to and exception: "ValueError: %s"', + field, + self.validate_msgid(self._msg['message-id']), + err, + ) except IndexError as err: # workaround for the "=?utf-8?q?=2C?=linux-arm-msm@vger.kernel.org" in # https://lore.kernel.org/linux-pci/166983076821.2517843.6476270112700027226.robh@kernel.org/raw - logger.warning('Ignoring "field" in %s due to an exception: "HeaderParseError: %s"', - field, self.validate_msgid(self._msg['message-id']), err) + logger.warning( + 'Ignoring "field" in %s due to an exception: "HeaderParseError: %s"', + field, + self.validate_msgid(self._msg['message-id']), + err, + ) except TypeError as err: # workaround for the ".@3429e2599065" in # https://lore.kernel.org/all/202312271450.C9YmLJn2-lkp@intel.com/ - logger.warning('Ignoring "field" in %s due to an exception: "TypeError: %s"', - field, self.validate_msgid(self._msg['message-id']), err) + logger.warning( + 'Ignoring "field" in %s due to an exception: "TypeError: %s"', + field, + self.validate_msgid(self._msg['message-id']), + err, + ) return recipients @cached_property @@ -230,7 +258,9 @@ def patchkind(self): mocked_msg = email.message.EmailMessage() mocked_msg.set_content(attachment.get_content()) if 'subject' in mocked_msg: - newpatchkind = PatchKind.getby_content(mocked_msg.get_content(), subject=mocked_msg['subject']) + newpatchkind = PatchKind.getby_content( + mocked_msg.get_content(), subject=mocked_msg['subject'] + ) else: newpatchkind = PatchKind.getby_content(mocked_msg.get_content()) if newpatchkind > patchkind: @@ -262,7 +292,18 @@ def username(self): return self._username def __str__(self): - return _describe(self, ('created_at', 'message', 'realname', 'patchkind', 'summary', 'username', 'web_url')) + return _describe( + self, + ( + 'created_at', + 'message', + 'realname', + 'patchkind', + 'summary', + 'username', + 'web_url', + ), + ) def _headerparse_from(self): self._realname, self._username = email.utils.parseaddr(self._msg['From']) @@ -282,26 +323,28 @@ def _headerparse_inreplyto(self): @staticmethod def validate_msgid(msgid): # this gets rid of everything after > (some email clients insert something there...) - msgid = msgid.split(">", 1) + msgid = msgid.split('>', 1) return msgid[0].strip(' <>') @staticmethod def _validate_subject(subject): - return subject.replace("\n", "").strip() + return subject.replace('\n', '').strip() @staticmethod def _subject_tagless(subject): return re.sub(r'^ *\[regression\] *', '', subject, flags=re.IGNORECASE) -class LoreThread(): +class LoreThread: def __init__(self, *, msgid=None, msg=None): if msgid and not msg: self._id = urllib.parse.unquote(msgid) self._init_activity = {} elif msg and not msgid: loact = LoActivity(self, msg) - self._init_activity = {loact.id: loact, } + self._init_activity = { + loact.id: loact, + } self._id = loact.id else: raise RuntimeError @@ -385,7 +428,9 @@ def __init__(self, reptrd, lo_activity): if reptrd.id == lo_activity.id: self.reptrd = reptrd else: - self.reptrd = LoRepTrd(self.repsrc, self.lo_thread, lo_activity=self.lo_activity) + self.reptrd = LoRepTrd( + self.repsrc, self.lo_thread, lo_activity=self.lo_activity + ) self.id = None super().__init__() @@ -393,12 +438,14 @@ def __init__(self, reptrd, lo_activity): class LoRepSrc(ReportSource): def supports_url(self, url_lowered, url_parsed): - if url_parsed.netloc in ('lore.kernel.org', 'lkml.kernel.org') and (self.name == 'lore_all' or regzbot.is_running_citesting('offline')): + if url_parsed.netloc in ('lore.kernel.org', 'lkml.kernel.org') and ( + self.name == 'lore_all' or regzbot.is_running_citesting('offline') + ): path_split = url_parsed.path.split('/', maxsplit=3) if len(path_split) < 3: raise regzbot.RepDownloadError if not path_split[2]: - logger.error("[lore] cound not parse %s", url_parsed.geturl()) + logger.error('[lore] cound not parse %s', url_parsed.geturl()) raise regzbot.RepDownloadError return path_split[2] @@ -407,7 +454,7 @@ def thread(self, *, id=None, url=None): url id = self.supports_url(url.lower()) if not id: - logger.error("[lore] cound not parse %s", url) + logger.error('[lore] cound not parse %s', url) raise regzbot.RepDownloadError lo_thread = LoreThread(msgid=id) return LoRepTrd(self, lo_thread) @@ -429,15 +476,22 @@ def update(self): if regzbot.is_running_citesting('offline'): import pathlib import os - filenames = sorted(pathlib.Path(self.serverurl).iterdir(), key=os.path.getmtime) + + filenames = sorted( + pathlib.Path(self.serverurl).iterdir(), key=os.path.getmtime + ) for file in filenames: if os.path.islink(file): continue for mboxmsg in mailbox.mbox(file): - msg = email.message_from_bytes(mboxmsg.as_bytes(), policy=email.policy.default) + msg = email.message_from_bytes( + mboxmsg.as_bytes(), policy=email.policy.default + ) lo_thread = LoreThread(msg=msg) lo_retrd = LoRepTrd(self, lo_thread) - if regzbot.RecordProcessedMsgids.check_presence(lo_retrd.id, lo_retrd.gmtime): + if regzbot.RecordProcessedMsgids.check_presence( + lo_retrd.id, lo_retrd.gmtime + ): continue lo_retrd.process_single() else: @@ -450,7 +504,9 @@ def update(self): if not self.lastchked: self.set_lastchked(id_first) logger.info( - '[lore] seeing %s for the first time, starting to monitor it from now on', self.serverurl) + '[lore] seeing %s for the first time, starting to monitor it from now on', + self.serverurl, + ) self.set_lastchked(id_last) return elif self.lastchked == id_last: @@ -462,16 +518,27 @@ def update(self): msgid = LoActivity.validate_msgid(over['message-id']) gmtime = email.utils.mktime_tz(email.utils.parsedate_tz(over['date'])) if regzbot.RecordProcessedMsgids.check_presence(msgid, gmtime): - logger.debug('[lore] skipping "%s", we already encountered it it', msgid) + logger.debug( + '[lore] skipping "%s", we already encountered it it', msgid + ) continue msg = lorenntp._article(id, self.serverurl) - if 'subject' in msg and msg['subject'].startswith(regzbot.REPORT_SUBJECT_PREFIX): - logger.debug("[lore] skipping mail %s, as it's a report we send", msgid) + if 'subject' in msg and msg['subject'].startswith( + regzbot.REPORT_SUBJECT_PREFIX + ): + logger.debug( + "[lore] skipping mail %s, as it's a report we send", msgid + ) continue if 'from' in msg: - if 'bugzilla-daemon@kernel.org' in msg['from'] or 'bugbot@kernel.org' in msg['from']: - logger.debug("[lore] skipping mail %s, as it's a bugzilla mail", msgid) + if ( + 'bugzilla-daemon@kernel.org' in msg['from'] + or 'bugbot@kernel.org' in msg['from'] + ): + logger.debug( + "[lore] skipping mail %s, as it's a bugzilla mail", msgid + ) continue lo_thread = LoreThread(msg=msg) lo_retrd = LoRepTrd(self, lo_thread) @@ -531,12 +598,18 @@ def update(self, since, until, *, actimon=None, triggering_repact=None): # handle this here and don't feed the msgs through the regular parsing code, as they might already have been # processed earlier try: - for activity in self._lo_thread.activities(msgid=self.id, since=since, until=until): + for activity in self._lo_thread.activities( + msgid=self.id, since=since, until=until + ): # add the activity to the list of processed ids, as we might not have seen it yet; but nevertheless # process it again, as it might have been irrelevant earlier, but that might have changed - regzbot.RecordProcessedMsgids.check_presence(activity.id, gmtime=activity.gmtime) + regzbot.RecordProcessedMsgids.check_presence( + activity.id, gmtime=activity.gmtime + ) repact = LoRepAct(self, activity) - regzbot._rbcmd.process_activity(repact, actimon=actimon, triggering_repact=triggering_repact) + regzbot._rbcmd.process_activity( + repact, actimon=actimon, triggering_repact=triggering_repact + ) except regzbot._rbcmd.RegressionCreatedException: # the handled activity contained a #regzbot introduced that created a regression for this issue; during that diff --git a/regzbot/_repsources/_trackers.py b/regzbot/_repsources/_trackers.py index f1e2a07..7c38bb5 100644 --- a/regzbot/_repsources/_trackers.py +++ b/regzbot/_repsources/_trackers.py @@ -11,21 +11,35 @@ import regzbot -class _activity(): +class _activity: def __str__(self): - return _describe(self, ('created_at', 'message', 'realname', 'summary', 'username', 'web_url')) + return _describe( + self, + ('created_at', 'message', 'realname', 'summary', 'username', 'web_url'), + ) -class _issue(): +class _issue: def __str__(self): - return _describe(self, ('created_at', 'message', 'realname', 'state', 'summary', 'username', 'web_url')) + return _describe( + self, + ( + 'created_at', + 'message', + 'realname', + 'state', + 'summary', + 'username', + 'web_url', + ), + ) @classmethod def activities(cls, *, since=None): raise NotImplementedError -class _possible_search_result(): +class _possible_search_result: def __init__(self, issue_id, pattern, since): self.id = issue_id self.issue_id = issue_id @@ -33,7 +47,7 @@ def __init__(self, issue_id, pattern, since): self._since = since def __str__(self): - return _describe(self, ('id', )) + return _describe(self, ('id',)) def _check_pattern(self, body): return bool(re.search(self._pattern, body)) @@ -56,7 +70,9 @@ class _reptrd(regzbot.ReportThread): def update(self, since, until, *, actimon=None, triggering_repact=None): try: for activity in self.activities(since=since, until=until): - regzbot._rbcmd.process_activity(activity, actimon=actimon, triggering_repact=triggering_repact) + regzbot._rbcmd.process_activity( + activity, actimon=actimon, triggering_repact=triggering_repact + ) except regzbot._rbcmd.RegressionCreatedException: # the handled activity contained a #regzbot introduced that created a regression for this issue; during that # process all activities (both older and younger) for it will be added by calling this method again, so @@ -77,7 +93,9 @@ def update(self): check_last = check_started - datetime.timedelta(days=14) if self.lastchked and self.mininterval: - earliest_check = regzbot.timendate_gmtime_to_dt(self.lastchked + self.mininterval) + earliest_check = regzbot.timendate_gmtime_to_dt( + self.lastchked + self.mininterval + ) if earliest_check > check_started: return diff --git a/regzbot/commandl.py b/regzbot/commandl.py index ecd380b..1c9fcb2 100644 --- a/regzbot/commandl.py +++ b/regzbot/commandl.py @@ -6,7 +6,6 @@ import argparse -import glob import logging import os import tempfile @@ -85,12 +84,19 @@ def cmd(): ) # basics - parser.add_argument('--version', action='version', - version=regzbot.__VERSION__) - parser.add_argument('--debug', action='store_true', default=False, - help='Enable debugging info in output') - parser.add_argument('--quiet', action='store_true', default=False, - help='Only print critical information') + parser.add_argument('--version', action='version', version=regzbot.__VERSION__) + parser.add_argument( + '--debug', + action='store_true', + default=False, + help='Enable debugging info in output', + ) + parser.add_argument( + '--quiet', + action='store_true', + default=False, + help='Only print critical information', + ) # subcommands subparsers = parser.add_subparsers(help='sub-command help', dest='subcmd') @@ -109,7 +115,9 @@ def cmd(): # recheck sparser_recheck = subparsers.add_parser('recheck', help='Recheck messages') - sparser_recheck.add_argument(dest='msgids_to_check', help='msgids to recheck', nargs='+') + sparser_recheck.add_argument( + dest='msgids_to_check', help='msgids to recheck', nargs='+' + ) sparser_recheck.set_defaults(func=cmd_recheck) # status @@ -120,10 +128,18 @@ def cmd(): if get_testresults_datadir(): sparser_test = subparsers.add_parser('test', help='run tests') sparser_test.add_argument( - '--tmpdir', dest='tmpdir', default=None, help='Directory for creating repos and mails for testing') + '--tmpdir', + dest='tmpdir', + default=None, + help='Directory for creating repos and mails for testing', + ) for mode in regzbot.testing.SUPPORTED_TESTMODES.keys(): sparser_test.add_argument( - '--%s' % mode, action='store_true', default=False, help='Run only %s tests' % mode) + '--%s' % mode, + action='store_true', + default=False, + help='Run only %s tests' % mode, + ) sparser_test.set_defaults(func=cmd_test) # parse diff --git a/regzbot/export_csv.py b/regzbot/export_csv.py index 85125cd..6e7f785 100644 --- a/regzbot/export_csv.py +++ b/regzbot/export_csv.py @@ -6,6 +6,7 @@ import regzbot + logger = regzbot.logger @@ -14,12 +15,15 @@ def __init__(self, *args): super().__init__(*args) def csv(self): - if self.repsrcid \ - and self.entry \ - and regzbot.RegActivityMonitor.ismonitored( - self.entry, self.regid, self.repsrcid): - return "%s, %s [monitored]" % (self.subject, self.link) - return "%s, %s, %s, %s" % (self.subject, self.link, self.author, self.gmtime) + if ( + self.repsrcid + and self.entry + and regzbot.RegActivityMonitor.ismonitored( + self.entry, self.regid, self.repsrcid + ) + ): + return '%s, %s [monitored]' % (self.subject, self.link) + return '%s, %s, %s, %s' % (self.subject, self.link, self.author, self.gmtime) class RegHistoryCSV(regzbot.RegHistory): @@ -27,7 +31,13 @@ def __init__(self, *args): super().__init__(*args) def csv(self): - return "%s, %s, %s, %s, %s" % (self.subject, self.gmtime, self.author, self.url(), self.regzbotcmd) + return '%s, %s, %s, %s, %s' % ( + self.subject, + self.gmtime, + self.author, + self.url(), + self.regzbotcmd, + ) class RegActivityEventCSV(regzbot.RegActivityEvent): @@ -35,7 +45,13 @@ def __init__(self, *args): super().__init__(*args) def csv(self): - return "%s, %s, %s, %s, PatchKind(%s)" % (self.subject, self.author, self.url(), self.gmtime, self.patchkind.name) + return '%s, %s, %s, %s, PatchKind(%s)' % ( + self.subject, + self.author, + self.url(), + self.gmtime, + self.patchkind.name, + ) class RegressionFullCSV(regzbot.RegressionFull): @@ -67,15 +83,30 @@ def add_basics(self, compiled): if len(flags) == 0: flags.append('no flags') - compiled.append("REGRESSION: %s, %s (%s), %s, %s, %s, %s: %s" % - (self.subject, self._introduced_short, self._introduced_presentable, - self._introduced_url, self.treename, self._branchname, self.versionline, ', '.join(flags))) + compiled.append( + 'REGRESSION: %s, %s (%s), %s, %s, %s, %s: %s' + % ( + self.subject, + self._introduced_short, + self._introduced_presentable, + self._introduced_url, + self.treename, + self._branchname, + self.versionline, + ', '.join(flags), + ) + ) reportlist = list() for regression in self, *self._dupes: report = regression._actim_report - content = ("%s, %s, %s, %s, %s" % (report.gmtime, report.subject, report.authorname, - report.authormail, regzbot.ReportSource.get_by_id(report.repsrcid).url(report.entry))) + content = '%s, %s, %s, %s, %s' % ( + report.gmtime, + report.subject, + report.authorname, + report.authormail, + regzbot.ReportSource.get_by_id(report.repsrcid).url(report.entry), + ) if report == self._actim_report: reportlist.insert(0, 'INITIAL_REPORT: %s' % content) else: @@ -86,13 +117,22 @@ def add_basics(self, compiled): def add_solved(self, compiled): if self.solved_duplicateof: - duplicatetext = (" [duplicate of %s]" % self.solved_duplicateof) + duplicatetext = ' [duplicate of %s]' % self.solved_duplicateof else: duplicatetext = '' if self.solved_reason or self.solved_duplicateof: - compiled.append("SOLVED: %s, %s, %s, %s, %s%s" % - (self.solved_reason, self.solved_gmtime, self._solved_entry_presentable, self.solved_url, self.solved_subject, duplicatetext)) + compiled.append( + 'SOLVED: %s, %s, %s, %s, %s%s' + % ( + self.solved_reason, + self.solved_gmtime, + self._solved_entry_presentable, + self.solved_url, + self.solved_subject, + duplicatetext, + ) + ) return compiled def add_links(self, compiled): @@ -112,7 +152,7 @@ def add_history(self, compiled): def add_latest(self, compiled): if self._actievents: - compiled.append("LATEST: " + self._actievents[-1].csv()) + compiled.append('LATEST: ' + self._actievents[-1].csv()) return compiled def dump(self): @@ -124,8 +164,17 @@ def __init__(self, *args): super().__init__(*args) def dump(self): - return "UNHANDLED: %s, %s, %s, %s, %s, %s, %s, %s, %s\n" % (self.unhanid, self.link, self.note, self.gmtime, self.regid, - self.subject, self.solved_gmtime, self.solved_link, self.solved_subject) + return 'UNHANDLED: %s, %s, %s, %s, %s, %s, %s, %s, %s\n' % ( + self.unhanid, + self.link, + self.note, + self.gmtime, + self.regid, + self.subject, + self.solved_gmtime, + self.solved_link, + self.solved_subject, + ) def dumpall_csv(order='regid'): diff --git a/regzbot/export_mail.py b/regzbot/export_mail.py index b5a6300..0db5f29 100644 --- a/regzbot/export_mail.py +++ b/regzbot/export_mail.py @@ -6,7 +6,6 @@ from collections import Counter import datetime -import re from email.message import EmailMessage import email.utils import tempfile @@ -25,18 +24,25 @@ def __init__(self, *args): def mailreport(self): if self.author: monitored = '' - if self.repsrcid \ - and self.entry \ - and regzbot.RegActivityMonitor.ismonitored( - self.entry, self.regid, self.repsrcid): + if ( + self.repsrcid + and self.entry + and regzbot.RegActivityMonitor.ismonitored( + self.entry, self.regid, self.repsrcid + ) + ): monitored = '; thread monitored.' - authored = "\n %s days ago, by %s%s" % (regzbot.days_delta(self.gmtime), self.author, monitored) + authored = '\n %s days ago, by %s%s' % ( + regzbot.days_delta(self.gmtime), + self.author, + monitored, + ) else: authored = '' if self.subject == self.link: - return('* %s%s' % (self.subject, authored)) - return('* %s\n %s%s' % (self.subject, self.link, authored)) + return '* %s%s' % (self.subject, authored) + return '* %s\n %s%s' % (self.subject, self.link, authored) class RegressionMailReport(regzbot.RegressionFull): @@ -47,26 +53,38 @@ def __init__(self, *args): def compile(self, lastreport_gmtime): if lastreport_gmtime < self.gmtime_filed: - subject = "[ *NEW* ] %s" % self.subject + subject = '[ *NEW* ] %s' % self.subject else: subject = self.subject report = list() report.append(subject) - report.append('-'*len(subject)) - report.append('https://linux-regtracking.leemhuis.info/regzbot/regression/%s/%s/' % - (self._actim_report.repsrc.generic_name, self._actim_report.repsrc.entryid)) - report.append(regzbot.ReportSource.get_by_id(self._actim_report.repsrcid).url(self._actim_report.entry)) + report.append('-' * len(subject)) + report.append( + 'https://linux-regtracking.leemhuis.info/regzbot/regression/%s/%s/' + % ( + self._actim_report.repsrc.generic_name, + self._actim_report.repsrc.entryid, + ) + ) + report.append( + regzbot.ReportSource.get_by_id(self._actim_report.repsrcid).url( + self._actim_report.entry + ) + ) for regression in self._dupes: - report.append(regzbot.ReportSource.get_by_id( - regression._actim_report.repsrcid).url(regression._actim_report.entry)) + report.append( + regzbot.ReportSource.get_by_id(regression._actim_report.repsrcid).url( + regression._actim_report.entry + ) + ) statusline = [] actireports = list() for regression in self, *self._dupes: actireports.append(regression._actim_report) - statusline.append("\nBy ") + statusline.append('\nBy ') for actireport in actireports: if actireport.authorname: statusline.append(actireport.authorname) @@ -81,18 +99,20 @@ def compile(self, lastreport_gmtime): else: statusline.append(', ') - statusline.append("; ") + statusline.append('; ') statusline.append(str(regzbot.days_delta(self.gmtime))) - statusline.append(" days ago; ") + statusline.append(' days ago; ') statusline.append(str(len(self._actievents))) - statusline.append(" activities") + statusline.append(' activities') if len(self._actievents) > 0: - statusline.append(", latest ") + statusline.append(', latest ') statusline.append(str(regzbot.days_delta(self._actievents[-1].gmtime))) - statusline.append(" days ago") + statusline.append(' days ago') if self.poked: - statusline.append('; poked %s days ago' % regzbot.days_delta(self.poked.gmtime)) + statusline.append( + '; poked %s days ago' % regzbot.days_delta(self.poked.gmtime) + ) statusline.append('.') report.append(''.join(statusline)) @@ -135,9 +155,12 @@ def add_latestpatch(self, report): continue if patchcount == 1: - report.append("\nOne patch associated with this regression:") + report.append('\nOne patch associated with this regression:') else: - report.append("\n%s patch postings are associated with this regression, the latest is this:" % patchcount) + report.append( + '\n%s patch postings are associated with this regression, the latest is this:' + % patchcount + ) # avoid mentioning a patch twice for link in self._links: @@ -147,9 +170,12 @@ def add_latestpatch(self, report): self._links.remove(link) return report - report.append("* %s" % actievent.subject) - report.append(" %s" % actievent.url()) - report.append(" %s days ago, by %s" % (regzbot.days_delta(actievent.gmtime), actievent.author)) + report.append('* %s' % actievent.subject) + report.append(' %s' % actievent.url()) + report.append( + ' %s days ago, by %s' + % (regzbot.days_delta(actievent.gmtime), actievent.author) + ) break @@ -167,12 +193,18 @@ def add_involved(self, report, lastreport_gmtime): involved = '' prefix = '' for name, count in counted.most_common(): - involved += "%s%s (%s)" % (prefix, name, count) + involved += '%s%s (%s)' % (prefix, name, count) if prefix == '': prefix = ', ' wrapped = [''] - wrapped.extend(textwrap.wrap("Recent activities from: %s" % involved, width=72, subsequent_indent=' ')) + wrapped.extend( + textwrap.wrap( + 'Recent activities from: %s' % involved, + width=72, + subsequent_indent=' ', + ) + ) report.append('\n'.join(wrapped)) return report @@ -186,11 +218,22 @@ def add_links(self, report): return report def mailreport(self, lastreport_gmtime): - return('\n'.join(self.compile(lastreport_gmtime))) - - -class RegExportMailReport(): - def __init__(self, entry, gmtime_report, gmtime_filed, gmtime_activity, treename, versionline, backburner, identified, reporttext): + return '\n'.join(self.compile(lastreport_gmtime)) + + +class RegExportMailReport: + def __init__( + self, + entry, + gmtime_report, + gmtime_filed, + gmtime_activity, + treename, + versionline, + backburner, + identified, + reporttext, + ): self.entry = entry self.gmtime_report = gmtime_report self.gmtime_filed = gmtime_filed @@ -204,8 +247,14 @@ def __init__(self, entry, gmtime_report, gmtime_filed, gmtime_activity, treename @classmethod def __create_mail(cls, content, treename): msg = EmailMessage() - msg['To'] = 'LKML , Linus Torvalds , Linux regressions mailing list ' - msg['Subject'] = '%s for %s [%s]' % (regzbot.REPORT_SUBJECT_PREFIX, treename, datetime.date.today()) + msg['To'] = ( + 'LKML , Linus Torvalds , Linux regressions mailing list ' + ) + msg['Subject'] = '%s for %s [%s]' % ( + regzbot.REPORT_SUBJECT_PREFIX, + treename, + datetime.date.today(), + ) msg['Date'] = email.utils.localtime() msg['Message-ID'] = email.utils.make_msgid(domain='leemhuis.info') msg.set_content(content, cte='quoted-printable') @@ -216,7 +265,7 @@ def pagecreate(cls, categories, treename, lastreport_msgid): def repintro(report, number_issues, treename): intro = list() - print("Enter/Paste your intro for %s and hit Ctrl-D to save it." % treename) + print('Enter/Paste your intro for %s and hit Ctrl-D to save it.' % treename) while True: try: line = input() @@ -226,52 +275,79 @@ def repintro(report, number_issues, treename): if report: intro.append('\n---\n') - intro.append("Hi, this is regzbot, the Linux kernel regression tracking bot.") - intro.append("\nCurrently I'm aware of %s regressions in linux-%s. Find the" % (number_issues, treename)) - intro.append("current status below and the latest on the web:") - intro.append("\nhttps://linux-regtracking.leemhuis.info/regzbot/%s/" % treename) - intro.append("\nBye bye, hope to see you soon for the next report.") - intro.append(" Regzbot (on behalf of Thorsten Leemhuis)") - intro.append("\n") + intro.append( + 'Hi, this is regzbot, the Linux kernel regression tracking bot.' + ) + intro.append( + "\nCurrently I'm aware of %s regressions in linux-%s. Find the" + % (number_issues, treename) + ) + intro.append('current status below and the latest on the web:') + intro.append( + '\nhttps://linux-regtracking.leemhuis.info/regzbot/%s/' % treename + ) + intro.append('\nBye bye, hope to see you soon for the next report.') + intro.append(' Regzbot (on behalf of Thorsten Leemhuis)') + intro.append('\n') report.insert(0, '\n'.join(intro)) return report def repsectionheader(report, headline): - report.append('='*len(headline)) + report.append('=' * len(headline)) report.append(headline) - report.append('='*len(headline)) + report.append('=' * len(headline)) report.append('') return report def repfooter(report, lastreport_msgid): intro = "All regressions marked '[ *NEW* ]' were added since the previous report" if not lastreport_msgid: - report.append("%s." % intro) + report.append('%s.' % intro) else: - report.append("%s," % intro) - report.append("which can be found here:") - report.append("https://lore.kernel.org/r/%s\n" % lastreport_msgid) + report.append('%s,' % intro) + report.append('which can be found here:') + report.append('https://lore.kernel.org/r/%s\n' % lastreport_msgid) intro = None - report.append("Thanks for your attention, have a nice day!") - report.append("\n Regzbot, your hard working Linux kernel regression tracking robot") - report.append("\n\nP.S.: Wanna know more about regzbot or how to use it to track regressions") - report.append("for your subsystem? Then check out the getting started guide or the") - report.append("reference documentation:") - report.append("\nhttps://gitlab.com/knurd42/regzbot/-/blob/main/docs/getting_started.md") - report.append("https://gitlab.com/knurd42/regzbot/-/blob/main/docs/reference.md") - report.append("\nThe short version: if you see a regression report you want to see") - report.append("tracked, just send a reply to the report where you Cc") - report.append("regressions@lists.linux.dev with a line like this:") - report.append("\n#regzbot introduced: v5.13..v5.14-rc1") - report.append("\nIf you want to fix a tracked regression, just do what is expected") + report.append('Thanks for your attention, have a nice day!') + report.append( + '\n Regzbot, your hard working Linux kernel regression tracking robot' + ) + report.append( + '\n\nP.S.: Wanna know more about regzbot or how to use it to track regressions' + ) + report.append( + 'for your subsystem? Then check out the getting started guide or the' + ) + report.append('reference documentation:') + report.append( + '\nhttps://gitlab.com/knurd42/regzbot/-/blob/main/docs/getting_started.md' + ) + report.append( + 'https://gitlab.com/knurd42/regzbot/-/blob/main/docs/reference.md' + ) + report.append( + '\nThe short version: if you see a regression report you want to see' + ) + report.append('tracked, just send a reply to the report where you Cc') + report.append('regressions@lists.linux.dev with a line like this:') + report.append('\n#regzbot introduced: v5.13..v5.14-rc1') + report.append( + '\nIf you want to fix a tracked regression, just do what is expected' + ) report.append("anyway: add a 'Link:' tag with the url to the report, e.g.:") - report.append("\nLink: https://lore.kernel.org/all/30th.anniversary.repost@klaava.Helsinki.FI/") + report.append( + '\nLink: https://lore.kernel.org/all/30th.anniversary.repost@klaava.Helsinki.FI/' + ) return report number_issues = 0 report = list() - if treename == 'resolved' or treename == 'unassociated' or treename == 'dormant': + if ( + treename == 'resolved' + or treename == 'unassociated' + or treename == 'dormant' + ): # no reports for those return report elif treename == 'next' or treename == 'stable': @@ -300,11 +376,11 @@ def repfooter(report, lastreport_msgid): report.append('') # add footer and header - report = repsectionheader(report, "End of report") + report = repsectionheader(report, 'End of report') report = repfooter(report, lastreport_msgid) report = repintro(report, number_issues, treename) - return ('\n'.join(report)) + return '\n'.join(report) @classmethod def categorize(cls, regressionlist, lastreport_gmtime): @@ -314,12 +390,14 @@ def categorize(cls, regressionlist, lastreport_gmtime): if regzbot.LATEST_VERSIONS['indevelopment'] == False: indevelopment_descriptive = '%s-post' % regzbot.LATEST_VERSIONS['latest'] else: - indevelopment_descriptive = '%s-rc' % regzbot.LATEST_VERSIONS['indevelopment'] + indevelopment_descriptive = ( + '%s-rc' % regzbot.LATEST_VERSIONS['indevelopment'] + ) categories = { 'next': { 'identified': { - 'desc': "culprit identified", + 'desc': 'culprit identified', 'entries': list(), }, 'default': { @@ -333,27 +411,39 @@ def categorize(cls, regressionlist, lastreport_gmtime): }, 'mainline': { 'identified_indevelopment': { - 'desc': "current cycle (%s.. aka %s), culprit identified" % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), + 'desc': 'current cycle (%s.. aka %s), culprit identified' + % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), 'entries': list(), }, 'unidentified_indevelopment': { - 'desc': "current cycle (%s.. aka %s), unknown culprit" % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), + 'desc': 'current cycle (%s.. aka %s), unknown culprit' + % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), 'entries': list(), }, 'identified_latest': { - 'desc': "previous cycle (%s..%s), culprit identified, with activity in the past three months" % (regzbot.LATEST_VERSIONS['previous'], regzbot.LATEST_VERSIONS['latest']), + 'desc': 'previous cycle (%s..%s), culprit identified, with activity in the past three months' + % ( + regzbot.LATEST_VERSIONS['previous'], + regzbot.LATEST_VERSIONS['latest'], + ), 'entries': list(), }, 'identified_old': { - 'desc': "older cycles (..%s), culprit identified, with activity in the past three months" % regzbot.LATEST_VERSIONS['previous'], + 'desc': 'older cycles (..%s), culprit identified, with activity in the past three months' + % regzbot.LATEST_VERSIONS['previous'], 'entries': list(), }, 'unidentified_latest': { - 'desc': "previous cycle (%s..%s), unknown culprit, with activity in the past three weeks" % (regzbot.LATEST_VERSIONS['previous'], regzbot.LATEST_VERSIONS['latest']), + 'desc': 'previous cycle (%s..%s), unknown culprit, with activity in the past three weeks' + % ( + regzbot.LATEST_VERSIONS['previous'], + regzbot.LATEST_VERSIONS['latest'], + ), 'entries': list(), }, 'unidentified_old': { - 'desc': 'older cycles (..%s), unknown culprit, with activity in the past three weeks' % regzbot.LATEST_VERSIONS['previous'], + 'desc': 'older cycles (..%s), unknown culprit, with activity in the past three weeks' + % regzbot.LATEST_VERSIONS['previous'], 'entries': list(), }, 'default': { @@ -367,7 +457,7 @@ def categorize(cls, regressionlist, lastreport_gmtime): }, 'stable': { 'identified': { - 'desc': "culprit identified", + 'desc': 'culprit identified', 'entries': list(), }, 'default': { @@ -396,8 +486,12 @@ def categorize(cls, regressionlist, lastreport_gmtime): } for regression in regressionlist: - filed_days = (datetime.datetime.now(datetime.timezone.utc) - - datetime.datetime.fromtimestamp(regression.gmtime_filed, datetime.timezone.utc)).days + filed_days = ( + datetime.datetime.now(datetime.timezone.utc) + - datetime.datetime.fromtimestamp( + regression.gmtime_filed, datetime.timezone.utc + ) + ).days last_activity_days = regzbot.days_delta(regression.gmtime_activity) if regression.backburner: @@ -407,7 +501,9 @@ def categorize(cls, regressionlist, lastreport_gmtime): elif regression.treename == 'next' or regression.treename == 'stable': # only create reports for mainline for now continue - categories[regression.treename]['backburner']['entries'].append(regression) + categories[regression.treename]['backburner']['entries'].append( + regression + ) elif last_activity_days > 90: continue elif regression.treename == 'next' or regression.treename == 'stable': @@ -416,30 +512,48 @@ def categorize(cls, regressionlist, lastreport_gmtime): continue # if regression.identified: - categories[regression.treename]['identified']['entries'].append(regression) + categories[regression.treename]['identified']['entries'].append( + regression + ) else: - categories[regression.treename]['default']['entries'].append(regression) + categories[regression.treename]['default']['entries'].append( + regression + ) elif regression.treename == 'mainline': if regression.versionline == 'indevelopment': if regression.identified: - categories[regression.treename]['identified_indevelopment']['entries'].append(regression) + categories[regression.treename]['identified_indevelopment'][ + 'entries' + ].append(regression) else: - categories[regression.treename]['unidentified_indevelopment']['entries'].append(regression) + categories[regression.treename]['unidentified_indevelopment'][ + 'entries' + ].append(regression) # # for now only create reports for regression introduced in the current cycle elif True: continue # elif regression.versionline == 'latest' and regression.identified: - categories[regression.treename]['identified_latest']['entries'].append(regression) + categories[regression.treename]['identified_latest'][ + 'entries' + ].append(regression) elif regression.versionline == 'latest' and last_activity_days < 21: - categories[regression.treename]['unidentified_latest']['entries'].append(regression) + categories[regression.treename]['unidentified_latest'][ + 'entries' + ].append(regression) elif regression.identified: - categories[regression.treename]['identified_old']['entries'].append(regression) + categories[regression.treename]['identified_old']['entries'].append( + regression + ) elif last_activity_days < 21: - categories[regression.treename]['unidentified_old']['entries'].append(regression) + categories[regression.treename]['unidentified_old'][ + 'entries' + ].append(regression) else: - categories[regression.treename]['default']['entries'].append(regression) + categories[regression.treename]['default']['entries'].append( + regression + ) else: categories['unassociated']['default']['entries'].append(regression) @@ -447,14 +561,16 @@ def categorize(cls, regressionlist, lastreport_gmtime): @classmethod def compile(cls): - logger.debug("[reportmail] generating") + logger.debug('[reportmail] generating') lastreport_msgid = regzbot.RegzbotState.get('lastreport_mainline_msgid') lastreport_gmtime = regzbot.RegzbotState.get('lastreport_mainline_gmtime') if lastreport_gmtime: lastreport_gmtime = int(lastreport_gmtime) else: - lastreport_gmtime = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + lastreport_gmtime = int( + datetime.datetime.now(datetime.timezone.utc).timestamp() + ) logger.debug('[reportmail] lastreport was %s' % lastreport_gmtime) @@ -472,9 +588,19 @@ def compile(cls): last_activity = regression._actievents[-1].gmtime else: last_activity = regression._histevents[-1].gmtime - regressionslist.append(cls(regression._actim_report.entry, regression.gmtime, regression.gmtime_filed, - last_activity, regression.treename, regression.versionline, - regression.backburner, regression.identified, regression.mailreport(lastreport_gmtime))) + regressionslist.append( + cls( + regression._actim_report.entry, + regression.gmtime, + regression.gmtime_filed, + last_activity, + regression.treename, + regression.versionline, + regression.backburner, + regression.identified, + regression.mailreport(lastreport_gmtime), + ) + ) regressionslist.sort(key=lambda x: x.gmtime_activity, reverse=True) categories = cls.categorize(regressionslist, lastreport_gmtime) @@ -482,31 +608,40 @@ def compile(cls): report_gmtime = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) with tempfile.TemporaryDirectory() as tmpdirname: for counter, treename in enumerate(categories.keys()): - report = cls.pagecreate(categories[treename], treename, lastreport_msgid) + report = cls.pagecreate( + categories[treename], treename, lastreport_msgid + ) if not report: logger.info('Nothing to report for %s' % treename) continue - filename = os.path.join(tmpdirname, "%s-regzbotreport-%s" % (counter, treename)) + filename = os.path.join( + tmpdirname, '%s-regzbotreport-%s' % (counter, treename) + ) msg = cls.__create_mail(report, treename) lastreport_msgid = msg['Message-ID'].strip('<>') - print('#'*120) + print('#' * 120) print('\n%s\n' % filename) - print('#'*120) + print('#' * 120) print(report) with open(filename, 'w') as out: gen = email.generator.Generator(out) gen.flatten(msg) - print('#'*120) + print('#' * 120) - print("Review the reports in %s and sent them using \"git send-email --from='Regzbot (on behalf of Thorsten Leemhuis) ' --suppress-cc=self --to '' %s/*\"" % (tmpdirname, tmpdirname)) - answer = input('Enter c to confirm you sent the report, anything else to abort: ') + print( + "Review the reports in %s and sent them using \"git send-email --from='Regzbot (on behalf of Thorsten Leemhuis) ' --suppress-cc=self --to '' %s/*\"" + % (tmpdirname, tmpdirname) + ) + answer = input( + 'Enter c to confirm you sent the report, anything else to abort: ' + ) if answer.lower() != 'c': return regzbot.RegzbotState.set('lastreport_mainline_gmtime', report_gmtime) regzbot.RegzbotState.set('lastreport_mainline_msgid', lastreport_msgid) lastreport_msgid = regzbot.RegzbotState.get('lastreport_mainline_msgid') - logger.debug("[report] generated") + logger.debug('[report] generated') diff --git a/regzbot/export_web.py b/regzbot/export_web.py index 81ccd09..8f56e49 100644 --- a/regzbot/export_web.py +++ b/regzbot/export_web.py @@ -12,7 +12,6 @@ import yattag import regzbot -from regzbot import days_delta from regzbot import PatchKind logger = regzbot.logger @@ -33,16 +32,32 @@ def html(self, yattagdoc, regression): yattagdoc.text(self.subject) if self.author: - with yattagdoc.tag('div', style="padding-left: 3em;"): - yattagdoc.asis('' % self.gmtime) + with yattagdoc.tag('div', style='padding-left: 3em;'): + yattagdoc.asis( + '' + % self.gmtime + ) yattagdoc.text(', by %s' % self.author) - if self.repsrcid and self.entry and regzbot.RegActivityMonitor.ismonitored(self.entry, self.regid, self.repsrcid): - yattagdoc.text(" (monitored)") - if self._for_regression and not regression.regid == self._for_regression.regid: - yattagdoc.text(" [") - with yattagdoc.tag('a', href='../regression/%s/' % self._for_regression._actim_report.entry): - yattagdoc.text("via dup") - yattagdoc.text("]") + if ( + self.repsrcid + and self.entry + and regzbot.RegActivityMonitor.ismonitored( + self.entry, self.regid, self.repsrcid + ) + ): + yattagdoc.text(' (monitored)') + if ( + self._for_regression + and not regression.regid == self._for_regression.regid + ): + yattagdoc.text(' [') + with yattagdoc.tag( + 'a', + href='../regression/%s/' + % self._for_regression._actim_report.entry, + ): + yattagdoc.text('via dup') + yattagdoc.text(']') return yattagdoc @@ -59,22 +74,24 @@ def html(self, yattagdoc): else: regzbotcmd = self.regzbotcmd with yattagdoc.tag('a', href=self.url()): - yattagdoc.text("%s" % regzbotcmd) + yattagdoc.text('%s' % regzbotcmd) else: with yattagdoc.tag('a', href=self.url()): - yattagdoc.text("%s" % self.subject) + yattagdoc.text('%s' % self.subject) - with yattagdoc.tag('div', style="padding-left: 2em;"): - yattagdoc.asis('' % self.gmtime) + with yattagdoc.tag('div', style='padding-left: 2em;'): + yattagdoc.asis( + '' % self.gmtime + ) if self.author: - yattagdoc.text(", by %s" % self.author) + yattagdoc.text(', by %s' % self.author) return yattagdoc def html_event(self, yattagdoc): - with yattagdoc.tag('div', style="padding-left: 3em;"): - yattagdoc.text("History: ") + with yattagdoc.tag('div', style='padding-left: 3em;'): + yattagdoc.text('History: ') with yattagdoc.tag('i'): if self.regzbotcmd: @@ -83,14 +100,16 @@ def html_event(self, yattagdoc): else: regzbotcmd = self.regzbotcmd with yattagdoc.tag('a', href=self.url()): - yattagdoc.text("%s; " % regzbotcmd) + yattagdoc.text('%s; ' % regzbotcmd) else: with yattagdoc.tag('a', href=self.url()): - yattagdoc.text("%s; " % self.subject) + yattagdoc.text('%s; ' % self.subject) - yattagdoc.asis('' % self.gmtime) + yattagdoc.asis( + '' % self.gmtime + ) if self.author: - yattagdoc.text(", by %s" % self.author) + yattagdoc.text(', by %s' % self.author) return yattagdoc @@ -107,37 +126,51 @@ def set_for_regression(self, regression): def html(self, yattagdoc, regression): with yattagdoc.tag('i'): with yattagdoc.tag('a', href=self.url()): - yattagdoc.text("%s" % self.subject) - with yattagdoc.tag('div', style="padding-left: 2em;"): - yattagdoc.asis('' % self.gmtime) - yattagdoc.text(", by %s" % self.author) + yattagdoc.text('%s' % self.subject) + with yattagdoc.tag('div', style='padding-left: 2em;'): + yattagdoc.asis( + '' % self.gmtime + ) + yattagdoc.text(', by %s' % self.author) if int(self.patchkind) > 0: - if (PatchKind.DIFF | PatchKind.SUBJECT | PatchKind.SIGNEDOFF) in self.patchkind: + if ( + PatchKind.DIFF | PatchKind.SUBJECT | PatchKind.SIGNEDOFF + ) in self.patchkind: yattagdoc.text('; contains a signed-off patch') elif (PatchKind.DIFF | PatchKind.SUBJECT) in self.patchkind: yattagdoc.text('; contains a proper patch') else: yattagdoc.text('; contains a simple patch') - if self._for_regression and not regression.regid == self._for_regression.regid: - yattagdoc.text(" [") + if ( + self._for_regression + and not regression.regid == self._for_regression.regid + ): + yattagdoc.text(' [') repsrc = self._for_regression._actim_report.repsrc - with yattagdoc.tag('a', href='../regression/%s/%s/' % (repsrc.generic_name, repsrc.entryid)): - yattagdoc.text("via dup") - yattagdoc.text("]") + with yattagdoc.tag( + 'a', + href='../regression/%s/%s/' % (repsrc.generic_name, repsrc.entryid), + ): + yattagdoc.text('via dup') + yattagdoc.text(']') return yattagdoc def html_event(self, yattagdoc): - with yattagdoc.tag('div', style="padding-left: 3em;"): - yattagdoc.text("Activity: ") + with yattagdoc.tag('div', style='padding-left: 3em;'): + yattagdoc.text('Activity: ') with yattagdoc.tag('i'): with yattagdoc.tag('a', href=self.url()): - yattagdoc.text("%s" % self.subject) - yattagdoc.text("; ") - yattagdoc.asis('' % self.gmtime) - yattagdoc.text(", by %s" % self.author) + yattagdoc.text('%s' % self.subject) + yattagdoc.text('; ') + yattagdoc.asis( + '' % self.gmtime + ) + yattagdoc.text(', by %s' % self.author) if int(self.patchkind) > 0: - if (PatchKind.DIFF | PatchKind.SUBJECT | PatchKind.SIGNEDOFF) in self.patchkind: + if ( + PatchKind.DIFF | PatchKind.SUBJECT | PatchKind.SIGNEDOFF + ) in self.patchkind: yattagdoc.text('; contains a signed-off patch') elif (PatchKind.DIFF | PatchKind.SUBJECT) in self.patchkind: yattagdoc.text('; contains a proper patch') @@ -157,11 +190,15 @@ def __init__(self, *args): def event_intro(self): html = yattag.Doc() - html.text("Regression ") + html.text('Regression ') report_repsrc = self._actim_report.repsrc - with html.tag('a', href='../regression/%s/%s/' % (report_repsrc.generic_name, report_repsrc.entryid)): + with html.tag( + 'a', + href='../regression/%s/%s/' + % (report_repsrc.generic_name, report_repsrc.entryid), + ): html.text(self.subject) - html.text(":") + html.text(':') return html def events(self, gmtime_offset, event_intro): @@ -176,7 +213,7 @@ def fresh(gmtime): yield { 'gmtime': actievent.gmtime, 'htmlevent_intro': event_intro, - 'htmlevent_content': actievent.html_event(yattag.Doc()) + 'htmlevent_content': actievent.html_event(yattag.Doc()), } for histevent in reversed(self._histevents): @@ -185,26 +222,26 @@ def fresh(gmtime): yield { 'gmtime': histevent.gmtime, 'htmlevent_intro': event_intro, - 'htmlevent_content': histevent.html_event(yattag.Doc()) + 'htmlevent_content': histevent.html_event(yattag.Doc()), } def html(self): def cell1(yattagdoc): - with yattagdoc.tag('div', style="padding-left: 1em;"): + with yattagdoc.tag('div', style='padding-left: 1em;'): with yattagdoc.tag('li'): if self._introduced_url: with yattagdoc.tag('a', href=self._introduced_url): yattagdoc.text(self._introduced_short) if self._introduced_presentable: with yattagdoc.tag('div'): - yattagdoc.text("(%s)" % - self._introduced_presentable) + yattagdoc.text('(%s)' % self._introduced_presentable) else: yattagdoc.text(self._introduced_short) if self._introduced_presentable: with yattagdoc.tag('div'): - yattagdoc.text("(within %s)" % - self._introduced_presentable) + yattagdoc.text( + '(within %s)' % self._introduced_presentable + ) def cell2(yattagdoc): def add_introduced(yattagdoc): @@ -212,18 +249,24 @@ def add_introduced(yattagdoc): def __resolution(): if self.solved_reason == 'fixed': - return('resolution') + return 'resolution' elif self.solved_reason == 'to_be_fixed': - return('fix incoming') - elif self.solved_reason == 'resolved' or self.solved_reason == 'invalid': - return('marked resolved') + return 'fix incoming' + elif ( + self.solved_reason == 'resolved' or self.solved_reason == 'invalid' + ): + return 'marked resolved' elif self.solved_reason == 'inconclusive': - return('marked inconclusive') + return 'marked inconclusive' elif self.solved_reason is not None: - return('%s' % self.solved_reason) - - with yattagdoc_line.tag('details', id='regression-details', style="padding-left: 1em;"): - with yattagdoc_line.tag('summary', style="list-style-position: outside;"): + return '%s' % self.solved_reason + + with yattagdoc_line.tag( + 'details', id='regression-details', style='padding-left: 1em;' + ): + with yattagdoc_line.tag( + 'summary', style='list-style-position: outside;' + ): # hide dupes that where just used for forward a bug and have no relevant activity: regids_just_forwarded = [] @@ -241,7 +284,7 @@ def __resolution(): actireports = list() for regression in self, *self._dupes: - if not regression.regid in regids_just_forwarded: + if regression.regid not in regids_just_forwarded: actireports.append(regression._actim_report) actireports_sorted = sorted(actireports, key=lambda x: x.gmtime) actireports = None @@ -269,7 +312,12 @@ def __resolution(): yattagdoc.text(' by ') len_actireports = len(actireports_sorted) for actireport in actireports_sorted: - with yattagdoc.tag('a', href=regzbot.ReportSource.get_by_id(actireport.repsrcid).url(actireport.entry)): + with yattagdoc.tag( + 'a', + href=regzbot.ReportSource.get_by_id( + actireport.repsrcid + ).url(actireport.entry), + ): authorname = actireport.authorname if not authorname: authorname = 'Unknown' @@ -294,26 +342,38 @@ def __resolution(): latest_event = self._histevents[-1] report_repsrc = actireport.repsrc - with yattagdoc.tag('a', href='../regression/%s/%s/' % (report_repsrc.generic_name, report_repsrc.entryid)): + with yattagdoc.tag( + 'a', + href='../regression/%s/%s/' + % (report_repsrc.generic_name, report_repsrc.entryid), + ): yattagdoc.text('activity') yattagdoc.text(': ') if earliest_event is latest_event: - yattagdoc.asis('' % - earliest_event.gmtime) + yattagdoc.asis( + '' + % earliest_event.gmtime + ) else: with yattagdoc.tag('a', href=earliest_event.url()): - yattagdoc.asis('' % - earliest_event.gmtime) + yattagdoc.asis( + '' + % earliest_event.gmtime + ) yattagdoc.text(' & ') with yattagdoc.tag('a', href=latest_event.url()): - yattagdoc.asis('' % - latest_event.gmtime) + yattagdoc.asis( + '' + % latest_event.gmtime + ) if self.poked: yattagdoc.text('; poked ') with yattagdoc.tag('a', href=self.poked.url()): - yattagdoc.asis('' % - self.poked.gmtime) + yattagdoc.asis( + '' + % self.poked.gmtime + ) yattagdoc.text('.') @@ -325,7 +385,7 @@ def __resolution(): else: yattagdoc.text(', ') with yattagdoc.tag('a', href=link.link): - yattagdoc.text("[%s]" % counter) + yattagdoc.text('[%s]' % counter) if self.solved_reason or self.solved_duplicateof: if self.solved_reason: @@ -335,7 +395,9 @@ def __resolution(): else: yattagdoc.text(', ') - with yattagdoc.tag('mark', style='background-color: #D0D0D0;'): + with yattagdoc.tag( + 'mark', style='background-color: #D0D0D0;' + ): yattagdoc.text('[') if self.solved_url is None: yattagdoc.text(__resolution()) @@ -350,11 +412,24 @@ def __resolution(): else: yattagdoc.text(', ') - regression_duplicateof = self.get_by_regid(self.solved_duplicateof) - __dup_report_repsrc = regression_duplicateof._actim_report.repsrc - with yattagdoc.tag('a', href='https://linux-regtracking.leemhuis.info/regzbot/regression/%s/%s/' % (__dup_report_repsrc.generic_name, __dup_report_repsrc.entryid)): - with yattagdoc.tag('mark', style='background-color: #D0D0D0;'): - yattagdoc.text("[is a duplicate]") + regression_duplicateof = self.get_by_regid( + self.solved_duplicateof + ) + __dup_report_repsrc = ( + regression_duplicateof._actim_report.repsrc + ) + with yattagdoc.tag( + 'a', + href='https://linux-regtracking.leemhuis.info/regzbot/regression/%s/%s/' + % ( + __dup_report_repsrc.generic_name, + __dup_report_repsrc.entryid, + ), + ): + with yattagdoc.tag( + 'mark', style='background-color: #D0D0D0;' + ): + yattagdoc.text('[is a duplicate]') else: for actievent in reversed(actievents_sorted): if int(actievent.patchkind) == 0: @@ -369,7 +444,11 @@ def __resolution(): yattagdoc.text('[') with yattagdoc.tag('a', href=actievent.url()): yattagdoc.text('patch') - if (PatchKind.DIFF | PatchKind.SUBJECT | PatchKind.SIGNEDOFF) in actievent.patchkind: + if ( + PatchKind.DIFF + | PatchKind.SUBJECT + | PatchKind.SIGNEDOFF + ) in actievent.patchkind: yattagdoc.text(' (SOB)') yattagdoc.text(']') break @@ -382,10 +461,12 @@ def __resolution(): yattagdoc.text('On back burner: ') with yattagdoc.tag('i'): with yattagdoc.tag('a', href=self.backburner.report_url()): - yattagdoc.text("%s" % self.backburner.subject) - with yattagdoc.tag('div', style="padding-left: 1em;"): - yattagdoc.asis('' % - self.backburner.gmtime) + yattagdoc.text('%s' % self.backburner.subject) + with yattagdoc.tag('div', style='padding-left: 1em;'): + yattagdoc.asis( + '' + % self.backburner.gmtime + ) yattagdoc.text(', by %s' % self.backburner.author) for counter, link in enumerate(links_sorted, start=1): @@ -402,29 +483,39 @@ def __resolution(): def solved_explanation(yattagdoc): with yattagdoc.tag('i'): - if self.solved_reason == 'fixed' or self.solved_reason == 'to_be_fixed': + if ( + self.solved_reason == 'fixed' + or self.solved_reason == 'to_be_fixed' + ): if self.solved_entry: - yattagdoc.text('%s' % - self.solved_entry[:12]) + yattagdoc.text('%s' % self.solved_entry[:12]) if self.solved_subject and self.solved_entry: - yattagdoc.text(' ("%s")' % - self.solved_subject) + yattagdoc.text(' ("%s")' % self.solved_subject) elif self.solved_subject: - yattagdoc.text('%s' % - self.solved_subject) + yattagdoc.text('%s' % self.solved_subject) elif self.solved_subject: yattagdoc.text(self.solved_subject) + if self.solved_url is None: solved_explanation(yattagdoc) else: with yattagdoc.tag('a', href=self.solved_url): solved_explanation(yattagdoc) - with yattagdoc.tag('div', style="padding-left: 3em;"): - yattagdoc.asis('' % - self.solved_gmtime) - if self.solved_entry and self._solved_entry_presentable and not self._solved_entry_presentable == self.solved_entry[:12]: - yattagdoc.text(' in %s' % self._solved_entry_presentable) + with yattagdoc.tag('div', style='padding-left: 3em;'): + yattagdoc.asis( + '' + % self.solved_gmtime + ) + if ( + self.solved_entry + and self._solved_entry_presentable + and not self._solved_entry_presentable + == self.solved_entry[:12] + ): + yattagdoc.text( + ' in %s' % self._solved_entry_presentable + ) else: latest_shown = False earlier_patches = 0 @@ -435,14 +526,22 @@ def solved_explanation(yattagdoc): latest_shown = True yattagdoc.text('Latest patch: ') with yattagdoc.tag('a', href=actievent.url()): - yattagdoc.text("%s" % actievent.subject) - with yattagdoc.tag('div', style="padding-left: 3em;"): - yattagdoc.asis('' % - actievent.gmtime) - yattagdoc.text(", by %s; " % actievent.author) - if (PatchKind.DIFF | PatchKind.SUBJECT | PatchKind.SIGNEDOFF) in actievent.patchkind: + yattagdoc.text('%s' % actievent.subject) + with yattagdoc.tag('div', style='padding-left: 3em;'): + yattagdoc.asis( + '' + % actievent.gmtime + ) + yattagdoc.text(', by %s; ' % actievent.author) + if ( + PatchKind.DIFF + | PatchKind.SUBJECT + | PatchKind.SIGNEDOFF + ) in actievent.patchkind: yattagdoc.text('signed-off-by present') - elif (PatchKind.DIFF | PatchKind.SUBJECT) in actievent.patchkind: + elif ( + PatchKind.DIFF | PatchKind.SUBJECT + ) in actievent.patchkind: yattagdoc.text('proper patch') else: yattagdoc.text('simple patch') @@ -455,25 +554,33 @@ def solved_explanation(yattagdoc): earlier_patches += 1 with yattagdoc.tag('a', href=actievent.url()): - yattagdoc.text("%s" % earlier_patches) + yattagdoc.text('%s' % earlier_patches) with yattagdoc_line.tag('p'): if len(actievents_sorted) == 0: pass elif len(actievents_sorted) > 5: - yattagdoc.text("Latest five known activities:") + yattagdoc.text('Latest five known activities:') else: - yattagdoc.text("All known activities:") - with yattagdoc_line.tag('ul', style='padding-left: 5px; margin-top: -1em;'): + yattagdoc.text('All known activities:') + with yattagdoc_line.tag( + 'ul', style='padding-left: 5px; margin-top: -1em;' + ): for actievent in reversed(actievents_sorted[-5:]): - with yattagdoc.tag('li', style="list-style-position: inside;"): + with yattagdoc.tag( + 'li', style='list-style-position: inside;' + ): actievent.html(yattagdoc, self) with yattagdoc_line.tag('p'): - yattagdoc.text("Regzbot command history:") - with yattagdoc_line.tag('ul', style='padding-left: 5px; margin-top: -1em;'): + yattagdoc.text('Regzbot command history:') + with yattagdoc_line.tag( + 'ul', style='padding-left: 5px; margin-top: -1em;' + ): for histevent in reversed(self._histevents): - with yattagdoc.tag('li', style="list-style-position: inside;"): + with yattagdoc.tag( + 'li', style='list-style-position: inside;' + ): histevent.html(yattagdoc) if self.solved_reason: @@ -481,14 +588,22 @@ def solved_explanation(yattagdoc): with yattagdoc.tag('p'): yattagdoc.text( - "When fixing, add this to the commit message to make regzbot notice patch postings and commits to resolve the issue:") - with yattagdoc_line.tag('ul', style='padding-left: 1em; margin-top: -1em; font-style: italic; list-style-type: none;'): + 'When fixing, add this to the commit message to make regzbot notice patch postings and commits to resolve the issue:' + ) + with yattagdoc_line.tag( + 'ul', + style='padding-left: 1em; margin-top: -1em; font-style: italic; list-style-type: none;', + ): with yattagdoc.tag('li'): # use self._introduced_url here, as that will avoid ranges and commits we could not find if self._introduced_url: - commitsummary = regzbot.GitTree.commit_summary(self.introduced) - yattagdoc.text('Fixes: %s ("%s")' % ( - self.introduced[0:12], commitsummary)) + commitsummary = regzbot.GitTree.commit_summary( + self.introduced + ) + yattagdoc.text( + 'Fixes: %s ("%s")' + % (self.introduced[0:12], commitsummary) + ) reports = [] for regression in self, *self._dupes: @@ -496,7 +611,8 @@ def solved_explanation(yattagdoc): continue actireport = regression._actim_report reportedlink = regzbot.ReportSource.get_by_id( - actireport.repsrcid).url(actireport.entry, redirector=True) + actireport.repsrcid + ).url(actireport.entry, redirector=True) if not actireport.authorname: # there are a few old database entry where authorname and authormail are missing # just ignore them @@ -504,9 +620,14 @@ def solved_explanation(yattagdoc): elif actireport.authorname == 'Unknown': reportedby = '' elif not actireport.authormail: - reportedby = "Reported-by: %s" % (actireport.authorname, ) + reportedby = 'Reported-by: %s' % ( + actireport.authorname, + ) else: - reportedby = "Reported-by: %s <%s>" % (actireport.authorname, actireport.authormail) + reportedby = 'Reported-by: %s <%s>' % ( + actireport.authorname, + actireport.authormail, + ) reports.append((reportedby, reportedlink)) reports.sort(key=lambda x: x[0]) @@ -523,7 +644,7 @@ def solved_explanation(yattagdoc): yattagdoc.text(report[1]) yattagdoc_line = yattag.Doc() - with yattagdoc_line.tag('td', style="width: 200px;"): + with yattagdoc_line.tag('td', style='width: 200px;'): cell1(yattagdoc_line) with yattagdoc_line.tag('td'): cell2(yattagdoc_line) @@ -552,17 +673,30 @@ def cell2(yattagdoc): yattagdoc.text(self.note) # put everything together - with yattagdoc.tag('tr', style="vertical-align:top;"): + with yattagdoc.tag('tr', style='vertical-align:top;'): with yattagdoc.tag('td'): cell1(yattagdoc) with yattagdoc.tag('td'): cell2(yattagdoc) -class RegExportWeb(): +class RegExportWeb: eventslist = list() - def __init__(self, repsrc, gmtime_report, gmtime_filed, gmtime_activity, gmtime_solved, treename, versionline, solved_reason, backburner, identified, htmlsnippet): + def __init__( + self, + repsrc, + gmtime_report, + gmtime_filed, + gmtime_activity, + gmtime_solved, + treename, + versionline, + solved_reason, + backburner, + identified, + htmlsnippet, + ): self.repsrc = repsrc self.gmtime_report = gmtime_report self.gmtime_filed = gmtime_filed @@ -577,15 +711,18 @@ def __init__(self, repsrc, gmtime_report, gmtime_filed, gmtime_activity, gmtime_ @staticmethod def outpage_header(yattagdoc, htmlpages, pagename, relpath=''): - yattagdoc.asis('' % relpath) + yattagdoc.asis( + '' + % relpath + ) with yattagdoc.tag('h1'): yattagdoc.text('Linux kernel regression status') with yattagdoc.tag('h2'): description = None for htmlpage in htmlpages: # make it obvious that stable is about longterm, too - if htmlpage == "stable": - description = "stable/longterm" + if htmlpage == 'stable': + description = 'stable/longterm' else: description = htmlpage @@ -593,27 +730,38 @@ def outpage_header(yattagdoc, htmlpages, pagename, relpath=''): # entries are also show on the previous pages if htmlpage == 'new' or htmlpage == 'all' or htmlpage == 'resolved': yattagdoc.text('•') - yattagdoc.asis(" ") + yattagdoc.asis(' ') # print if htmlpage == pagename: - yattagdoc.text("[%s]" % description) + yattagdoc.text('[%s]' % description) else: with yattagdoc.tag('a', href='../%s%s/' % (relpath, htmlpage)): - yattagdoc.text("[%s]" % description) + yattagdoc.text('[%s]' % description) # seperate entries by space, unless we are at the end if not htmlpage == htmlpage[-1]: - yattagdoc.asis(" ") + yattagdoc.asis(' ') @staticmethod - def outpage_table_span(yattagdoc, description, tablecolumns, horizontal_rule=False, strong=False, heading=False): + def outpage_table_span( + yattagdoc, + description, + tablecolumns, + horizontal_rule=False, + strong=False, + heading=False, + ): with yattagdoc.tag('tr'): if heading: - htmltag = "tr" + htmltag = 'tr' else: - htmltag = "td" - with yattagdoc.tag(htmltag, colspan=tablecolumns, style="text-align: left; padding-bottom: 1em;"): + htmltag = 'td' + with yattagdoc.tag( + htmltag, + colspan=tablecolumns, + style='text-align: left; padding-bottom: 1em;', + ): # with yattagdoc.tag(htmltag, style="text-align: left; padding-bottom: 1em;"): if horizontal_rule: yattagdoc.asis('
') @@ -626,47 +774,58 @@ def outpage_table_span(yattagdoc, description, tablecolumns, horizontal_rule=Fal @staticmethod def outpage_table_header_unhandled(yattagdoc): - with yattagdoc.tag('tr', style="vertical-align:top;"): - with yattagdoc.tag('th', align='left', style="width: 10px;"): - yattagdoc.text("id") + with yattagdoc.tag('tr', style='vertical-align:top;'): + with yattagdoc.tag('th', align='left', style='width: 10px;'): + yattagdoc.text('id') with yattagdoc.tag('th', align='left'): - yattagdoc.text("place") + yattagdoc.text('place') @staticmethod def outpage_footer(yattagdoc, count): yattagdoc.asis('
') with yattagdoc.tag('p', style='text-align: center'): - yattagdoc.text("[compiled by ") + yattagdoc.text('[compiled by ') with yattagdoc.tag('a', href='https://linux-regtracking.leemhuis.info'): - yattagdoc.text("regzbot") + yattagdoc.text('regzbot') currenttime = datetime.datetime.now(datetime.timezone.utc) - yattagdoc.text(" on %s (UTC). " % - currenttime.strftime("%Y-%m-%d %H:%M:%S")) - - yattagdoc.text("Wanna know more about regzbot? Then check out its ") - with yattagdoc.tag('a', href='https://gitlab.com/knurd42/regzbot/-/blob/main/docs/getting_started.md'): - yattagdoc.text("getting started guide") - yattagdoc.text(" or its ") - with yattagdoc.tag('a', href='https://gitlab.com/knurd42/regzbot/-/blob/main/docs/reference.md'): - yattagdoc.text("reference documentation") - yattagdoc.text(".]") + yattagdoc.text(' on %s (UTC). ' % currenttime.strftime('%Y-%m-%d %H:%M:%S')) + + yattagdoc.text('Wanna know more about regzbot? Then check out its ') + with yattagdoc.tag( + 'a', + href='https://gitlab.com/knurd42/regzbot/-/blob/main/docs/getting_started.md', + ): + yattagdoc.text('getting started guide') + yattagdoc.text(' or its ') + with yattagdoc.tag( + 'a', + href='https://gitlab.com/knurd42/regzbot/-/blob/main/docs/reference.md', + ): + yattagdoc.text('reference documentation') + yattagdoc.text('.]') if count > 0: with yattagdoc.tag('p', style='text-align: center'): - yattagdoc.text("[recently ") + yattagdoc.text('[recently ') with yattagdoc.tag('a', href='../unhandled/'): if count == 1: yattagdoc.text( - "%s event occurred that regzbot was unable to handle" % count) + '%s event occurred that regzbot was unable to handle' + % count + ) else: yattagdoc.text( - "%s events occurred that regzbot was unable to handle" % count) - yattagdoc.text("]") + '%s events occurred that regzbot was unable to handle' + % count + ) + yattagdoc.text(']') @staticmethod def outpage_head(yattagdoc): - yattagdoc.asis('') + yattagdoc.asis( + '' + ) return yattagdoc @staticmethod @@ -688,17 +847,21 @@ def create_individual_page(cls, htmlpages, unhandled_count, regression): with yattagdoc.tag('body'): yattagdoc.asis('') cls.outpage_header(yattagdoc, htmlpages, None) - with yattagdoc.tag('table', style="width:100%;"): - with yattagdoc.tag('tr', style="vertical-align:top;"): - yattagdoc.asis( - regression.htmlsnippet.getvalue()) - with yattagdoc.tag('td', style="width: 100px;"): + with yattagdoc.tag('table', style='width:100%;'): + with yattagdoc.tag('tr', style='vertical-align:top;'): + yattagdoc.asis(regression.htmlsnippet.getvalue()) + with yattagdoc.tag('td', style='width: 100px;'): yattagdoc.text(regression.treename) - yattagdoc.asis("") + yattagdoc.asis( + "" + ) cls.outpage_footer(yattagdoc, unhandled_count) - cls.outpage_write('regression/%s/%s' % - (regression.repsrc.generic_name, regression.repsrc.entryid), yattagdoc) + cls.outpage_write( + 'regression/%s/%s' + % (regression.repsrc.generic_name, regression.repsrc.entryid), + yattagdoc, + ) @classmethod def createpage_compilation(cls, htmlpages, unhandled_count, categories, pagename): @@ -707,23 +870,31 @@ def createpage_compilation(cls, htmlpages, unhandled_count, categories, pagename cls.outpage_head(yattagdoc) with yattagdoc.tag('body'): cls.outpage_header(yattagdoc, htmlpages, pagename) - with yattagdoc.tag('table', style="width:100%;"): + with yattagdoc.tag('table', style='width:100%;'): for category in categories.keys(): # print section header cls.outpage_table_span( - yattagdoc, categories[category]['desc'], tablecolumns, horizontal_rule=True, strong=True, ) + yattagdoc, + categories[category]['desc'], + tablecolumns, + horizontal_rule=True, + strong=True, + ) # check if the list for this section is empty if not categories[category]['entries']: - cls.outpage_table_span(yattagdoc, "none known by regzbot", tablecolumns) + cls.outpage_table_span( + yattagdoc, 'none known by regzbot', tablecolumns + ) # add html for regressionweb in categories[category]['entries']: - with yattagdoc.tag('tr', style="vertical-align:top;"): - yattagdoc.asis( - regressionweb.htmlsnippet.getvalue()) - if (pagename == 'all' - or pagename == 'resolved' - or pagename == 'inconclusive'): - with yattagdoc.tag('td', style="width: 100px;"): + with yattagdoc.tag('tr', style='vertical-align:top;'): + yattagdoc.asis(regressionweb.htmlsnippet.getvalue()) + if ( + pagename == 'all' + or pagename == 'resolved' + or pagename == 'inconclusive' + ): + with yattagdoc.tag('td', style='width: 100px;'): yattagdoc.text(regressionweb.treename) cls.outpage_footer(yattagdoc, unhandled_count) @@ -754,8 +925,10 @@ def create_events(cls, directory, unhandled_count, htmlpages, eventslist): @staticmethod def create_scriptfile_reldate(): - with open(os.path.join(regzbot.WEBPAGEDIR, 'relativetime.js'), 'w') as outputfile: - outputfile.write('''const timeAgoCurrentDate = new Date() + with open( + os.path.join(regzbot.WEBPAGEDIR, 'relativetime.js'), 'w' + ) as outputfile: + outputfile.write("""const timeAgoCurrentDate = new Date() const timeAgoFormatter = new Intl.RelativeTimeFormat("en", { numeric: 'always' @@ -783,7 +956,7 @@ def create_scriptfile_reldate(): } } // timeAgo("2022-10-05T08:00:00Z") -''') +""") @classmethod def create_unhandled(cls, directory, htmlpages): @@ -802,9 +975,9 @@ def create_unhandled(cls, directory, htmlpages): unhandled_events += 1 if unhandled_events == 0: - yattagdoc.text("No unhandled events known as of now") + yattagdoc.text('No unhandled events known as of now') else: - with yattagdoc.tag('table', style="width:100%;"): + with yattagdoc.tag('table', style='width:100%;'): cls.outpage_table_header_unhandled(yattagdoc) yattagdoc.asis(unhandled_html.getvalue()) @@ -821,12 +994,14 @@ def categorize(cls, regressionlist): if regzbot.LATEST_VERSIONS['indevelopment'] == False: indevelopment_descriptive = '%s-post' % regzbot.LATEST_VERSIONS['latest'] else: - indevelopment_descriptive = '%s-rc' % regzbot.LATEST_VERSIONS['indevelopment'] + indevelopment_descriptive = ( + '%s-rc' % regzbot.LATEST_VERSIONS['indevelopment'] + ) categories = { 'next': { 'identified': { - 'desc': "culprit identified", + 'desc': 'culprit identified', 'entries': list(), }, 'default': { @@ -840,27 +1015,39 @@ def categorize(cls, regressionlist): }, 'mainline': { 'identified_indevelopment': { - 'desc': "current cycle (%s.. aka %s), culprit identified" % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), + 'desc': 'current cycle (%s.. aka %s), culprit identified' + % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), 'entries': list(), }, 'unidentified_indevelopment': { - 'desc': "current cycle (%s.. aka %s), unknown culprit" % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), + 'desc': 'current cycle (%s.. aka %s), unknown culprit' + % (regzbot.LATEST_VERSIONS['latest'], indevelopment_descriptive), 'entries': list(), }, 'identified_latest': { - 'desc': "previous cycle (%s..%s), culprit identified, with activity in the past three months" % (regzbot.LATEST_VERSIONS['previous'], regzbot.LATEST_VERSIONS['latest']), + 'desc': 'previous cycle (%s..%s), culprit identified, with activity in the past three months' + % ( + regzbot.LATEST_VERSIONS['previous'], + regzbot.LATEST_VERSIONS['latest'], + ), 'entries': list(), }, 'identified_old': { - 'desc': "older cycles (..%s), culprit identified, with activity in the past three months" % regzbot.LATEST_VERSIONS['previous'], + 'desc': 'older cycles (..%s), culprit identified, with activity in the past three months' + % regzbot.LATEST_VERSIONS['previous'], 'entries': list(), }, 'unidentified_latest': { - 'desc': "previous cycle (%s..%s), unknown culprit, with activity in the past three weeks" % (regzbot.LATEST_VERSIONS['previous'], regzbot.LATEST_VERSIONS['latest']), + 'desc': 'previous cycle (%s..%s), unknown culprit, with activity in the past three weeks' + % ( + regzbot.LATEST_VERSIONS['previous'], + regzbot.LATEST_VERSIONS['latest'], + ), 'entries': list(), }, 'unidentified_old': { - 'desc': 'older cycles (..%s), unknown culprit, with activity in the past three weeks' % regzbot.LATEST_VERSIONS['previous'], + 'desc': 'older cycles (..%s), unknown culprit, with activity in the past three weeks' + % regzbot.LATEST_VERSIONS['previous'], 'entries': list(), }, 'default': { @@ -874,7 +1061,7 @@ def categorize(cls, regressionlist): }, 'stable': { 'identified': { - 'desc': "culprit identified", + 'desc': 'culprit identified', 'entries': list(), }, 'default': { @@ -906,32 +1093,53 @@ def categorize(cls, regressionlist): categories['inconclusive']['default']['entries'].append(regression) elif regression.gmtime_solved: categories['resolved']['default']['entries'].append(regression) - elif regression.backburner and \ - last_activity_days < 180: # things on backburner are allowed to get a little older - categories[regression.treename]['backburner']['entries'].append(regression) + elif ( + regression.backburner and last_activity_days < 180 + ): # things on backburner are allowed to get a little older + categories[regression.treename]['backburner']['entries'].append( + regression + ) elif last_activity_days > 90: categories['inconclusive']['default']['entries'].append(regression) elif regression.treename == 'next' or regression.treename == 'stable': if regression.identified: - categories[regression.treename]['identified']['entries'].append(regression) + categories[regression.treename]['identified']['entries'].append( + regression + ) else: - categories[regression.treename]['default']['entries'].append(regression) + categories[regression.treename]['default']['entries'].append( + regression + ) elif regression.treename == 'mainline': if regression.versionline == 'indevelopment': if regression.identified: - categories[regression.treename]['identified_indevelopment']['entries'].append(regression) + categories[regression.treename]['identified_indevelopment'][ + 'entries' + ].append(regression) else: - categories[regression.treename]['unidentified_indevelopment']['entries'].append(regression) + categories[regression.treename]['unidentified_indevelopment'][ + 'entries' + ].append(regression) elif regression.versionline == 'latest' and regression.identified: - categories[regression.treename]['identified_latest']['entries'].append(regression) + categories[regression.treename]['identified_latest'][ + 'entries' + ].append(regression) elif regression.versionline == 'latest' and last_activity_days < 21: - categories[regression.treename]['unidentified_latest']['entries'].append(regression) + categories[regression.treename]['unidentified_latest'][ + 'entries' + ].append(regression) elif regression.identified: - categories[regression.treename]['identified_old']['entries'].append(regression) + categories[regression.treename]['identified_old']['entries'].append( + regression + ) elif last_activity_days < 21: - categories[regression.treename]['unidentified_old']['entries'].append(regression) + categories[regression.treename]['unidentified_old'][ + 'entries' + ].append(regression) else: - categories[regression.treename]['default']['entries'].append(regression) + categories[regression.treename]['default']['entries'].append( + regression + ) else: # this should not happen, but in case it does due to later code changes: categories['mainline']['default']['entries'].append(regression) @@ -944,46 +1152,63 @@ def regression_to_json(cls, regression): for h in regression._histevents: regzbot_commands.append( { - "command": h.regzbotcmd, - "author": h.author, - "url": h.url(), - "timestamp": datetime.datetime.fromtimestamp(h.gmtime, datetime.timezone.utc).isoformat(), + 'command': h.regzbotcmd, + 'author': h.author, + 'url': h.url(), + 'timestamp': datetime.datetime.fromtimestamp( + h.gmtime, datetime.timezone.utc + ).isoformat(), } ) if regression.solved_gmtime: - solved_tstamp = datetime.datetime.fromtimestamp(regression.solved_gmtime, datetime.timezone.utc).isoformat() + solved_tstamp = datetime.datetime.fromtimestamp( + regression.solved_gmtime, datetime.timezone.utc + ).isoformat() else: solved_tstamp = None solved = { - "reason": regression.solved_reason, - "commit": regression.solved_entry, - "subject": regression.solved_subject, - "url": regression.solved_url, - "git_readable": regression._solved_entry_presentable, - "timestamp": solved_tstamp, + 'reason': regression.solved_reason, + 'commit': regression.solved_entry, + 'subject': regression.solved_subject, + 'url': regression.solved_url, + 'git_readable': regression._solved_entry_presentable, + 'timestamp': solved_tstamp, } return { - "id": regression.regid, - "introduced": regression.introduced, - "subject": regression.subject, - "tree": regression.treename, - "timestamp": datetime.datetime.fromtimestamp(regression.gmtime_filed, datetime.timezone.utc).isoformat(), - "url_regzbot": "https://linux-regtracking.leemhuis.info/regzbot/regression/%s/%s/" % (regression._actim_report.repsrc.generic_name, regression._actim_report.repsrc.entryid), - "identified": regression.identified, - "introduced_url": regression._introduced_url, - "regzbot_commands": regzbot_commands, - "solved": solved, + 'id': regression.regid, + 'introduced': regression.introduced, + 'subject': regression.subject, + 'tree': regression.treename, + 'timestamp': datetime.datetime.fromtimestamp( + regression.gmtime_filed, datetime.timezone.utc + ).isoformat(), + 'url_regzbot': 'https://linux-regtracking.leemhuis.info/regzbot/regression/%s/%s/' + % ( + regression._actim_report.repsrc.generic_name, + regression._actim_report.repsrc.entryid, + ), + 'identified': regression.identified, + 'introduced_url': regression._introduced_url, + 'regzbot_commands': regzbot_commands, + 'solved': solved, } @classmethod def compile(cls): - logger.debug("[webpages] generating") + logger.debug('[webpages] generating') # these are the pages we are going to create - htmlpages = ('next', 'mainline', 'stable', - 'new', 'all', 'resolved', 'inconclusive') + htmlpages = ( + 'next', + 'mainline', + 'stable', + 'new', + 'all', + 'resolved', + 'inconclusive', + ) # handle this page first, as we need something from it anyway unhandled_count = cls.create_unhandled(regzbot.WEBPAGEDIR, htmlpages) @@ -991,9 +1216,11 @@ def compile(cls): # gather everything we need regressionslist = list() eventslist = list() - events_gmtime_offset = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) - 604800 + events_gmtime_offset = ( + int(datetime.datetime.now(datetime.timezone.utc).timestamp()) - 604800 + ) if regzbot.is_running_citesting('offline'): - events_gmtime_offset = 604800*52*10 + events_gmtime_offset = 604800 * 52 * 10 json_data = list() for regression in RegressionWeb.get_all(): @@ -1003,16 +1230,34 @@ def compile(cls): eventslist.append(event) gmtime_solved = None - if regression.solved_reason == 'fixed' or regression.solved_reason == 'resolved' or regression.solved_reason == 'invalid' or regression.solved_reason == 'inconclusive' or regression.solved_reason == 'duplicateof' or regression.solved_duplicateof: + if ( + regression.solved_reason == 'fixed' + or regression.solved_reason == 'resolved' + or regression.solved_reason == 'invalid' + or regression.solved_reason == 'inconclusive' + or regression.solved_reason == 'duplicateof' + or regression.solved_duplicateof + ): gmtime_solved = regression.solved_gmtime if regression._actievents: last_activity = regression._actievents[-1].gmtime else: last_activity = regression._histevents[-1].gmtime - regressionslist.append(cls(regression._actim_report.repsrc, regression.gmtime, regression.gmtime_filed, - last_activity, gmtime_solved, regression.treename, - regression.versionline, regression.solved_reason, regression.backburner, regression.identified, - regression.html())) + regressionslist.append( + cls( + regression._actim_report.repsrc, + regression.gmtime, + regression.gmtime_filed, + last_activity, + gmtime_solved, + regression.treename, + regression.versionline, + regression.solved_reason, + regression.backburner, + regression.identified, + regression.html(), + ) + ) cls.create_scriptfile_reldate() @@ -1034,11 +1279,11 @@ def compile(cls): # create the page listing new regressions, sorted by date categories = { 'next': { - 'desc': "next", + 'desc': 'next', 'entries': list(), }, 'mainline': { - 'desc': "mainline", + 'desc': 'mainline', 'entries': list(), }, 'stable': { @@ -1049,8 +1294,12 @@ def compile(cls): for regression in regressionslist: if regression.gmtime_solved: continue - filed_days = (datetime.datetime.now(datetime.timezone.utc) - - datetime.datetime.fromtimestamp(regression.gmtime_filed, datetime.timezone.utc)).days + filed_days = ( + datetime.datetime.now(datetime.timezone.utc) + - datetime.datetime.fromtimestamp( + regression.gmtime_filed, datetime.timezone.utc + ) + ).days if filed_days < 7: categories[regression.treename]['entries'].append(regression) else: @@ -1065,18 +1314,26 @@ def compile(cls): regressionslist.sort(key=lambda x: x.gmtime_activity, reverse=True) categories = cls.categorize(regressionslist) for pagename in categories.keys(): - cls.createpage_compilation(htmlpages, unhandled_count, categories[pagename], pagename) + cls.createpage_compilation( + htmlpages, unhandled_count, categories[pagename], pagename + ) # create default with open(os.path.join(regzbot.WEBPAGEDIR, 'index.html'), 'w') as outputfile: - outputfile.write("") + outputfile.write( + "" + ) if not regzbot.is_running_citesting(): - publishscript = os.path.join(pathlib.Path.home(), '.local/share/regzbot/', 'pusblishwebsites.sh') + publishscript = os.path.join( + pathlib.Path.home(), '.local/share/regzbot/', 'pusblishwebsites.sh' + ) if os.path.exists(publishscript): os.system(publishscript) - with open(os.path.join(regzbot.WEBPAGEDIR, 'regressions.json'), 'w') as jsonfile: + with open( + os.path.join(regzbot.WEBPAGEDIR, 'regressions.json'), 'w' + ) as jsonfile: jsonfile.write(json.dumps(json_data)) - logger.debug("[webpages] generated") + logger.debug('[webpages] generated') diff --git a/regzbot/testing.py b/regzbot/testing.py index e1cbb1f..9cb2a43 100644 --- a/regzbot/testing.py +++ b/regzbot/testing.py @@ -22,7 +22,7 @@ SUPPORTED_TESTMODES = { 'offline': regzbot.testing_offline, 'online': regzbot.testing_online, - 'trackers': regzbot.testing_trackers + 'trackers': regzbot.testing_trackers, } logger = regzbot.logger @@ -30,14 +30,18 @@ def __get_resultfiles(path_testdata, path_tmpdir): if not os.path.isdir(path_testdata): - logger.critical("Directory for expexted results and template %s doesn't exist. Aborting.", - path_testdata) + logger.critical( + "Directory for expexted results and template %s doesn't exist. Aborting.", + path_testdata, + ) sys.exit(1) results_expected = {} results_generated = {} for mode in SUPPORTED_TESTMODES.keys(): - results_expected[mode] = os.path.join(path_testdata, 'expected/results-%s.csv' % mode) + results_expected[mode] = os.path.join( + path_testdata, 'expected/results-%s.csv' % mode + ) results_generated[mode] = os.path.join(path_tmpdir, 'testresults-%s.csv' % mode) return results_expected, results_generated @@ -46,9 +50,10 @@ def __get_resultfiles(path_testdata, path_tmpdir): def check_results(results_expected, results_generated): def ask_user(results_expected, results_generated): answer = input( - "Enter 'm' to call meld; enter 'a' or 'y' to accept differences; simply hit enter to move on.") + "Enter 'm' to call meld; enter 'a' or 'y' to accept differences; simply hit enter to move on." + ) if answer.lower() == 'm': - os.system("meld %s %s" % (results_expected, results_generated)) + os.system('meld %s %s' % (results_expected, results_generated)) return False if answer.lower() == 'a' or answer.lower() == 'y': shutil.copyfile(results_generated, results_expected) @@ -56,7 +61,12 @@ def ask_user(results_expected, results_generated): with open(results_expected, 'r') as file_expected: with open(results_generated, 'r') as file_generated: - if regzbot.db_diff(file_expected, file_generated, "%s" % results_expected, "%s" % results_generated): + if regzbot.db_diff( + file_expected, + file_generated, + '%s' % results_expected, + '%s' % results_generated, + ): sys.stdout.write('#######\n') while not ask_user(results_expected, results_generated): pass @@ -64,14 +74,12 @@ def ask_user(results_expected, results_generated): def init(tmpdir): if len(glob.glob(os.path.join(tmpdir, '*'))) > 0: - logger.critical( - "aborting, the directory %s is not empty", tmpdir) + logger.critical('aborting, the directory %s is not empty', tmpdir) sys.exit(1) def run(testmodes, testdatapath, tmpdir): - results_expected, results_generated = __get_resultfiles( - testdatapath, tmpdir) + results_expected, results_generated = __get_resultfiles(testdatapath, tmpdir) for mode in SUPPORTED_TESTMODES.keys(): if testmodes[mode]: diff --git a/regzbot/testing_offline.py b/regzbot/testing_offline.py index 536f255..8a95f59 100644 --- a/regzbot/testing_offline.py +++ b/regzbot/testing_offline.py @@ -31,13 +31,14 @@ emaildirs = dict() MAIL_TEMPLATE = string.Template( - '''Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce commodo + """Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce commodo justo ac mi ornare mollis id rutrum felis. ${tag} Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce commodo -justo ac mi ornare mollis id rutrum felis.''') +justo ac mi ornare mollis id rutrum felis.""" +) class Emaildir: @@ -50,7 +51,17 @@ def __init__(self, recipient, path_tmpdirectory, name): self.directory = os.path.join(path_tmpdirectory, name) os.mkdir(self.directory) - def create_email(self, funcname, tag, *, cc=None, subject=None, messageid=None, replyto=None, references=None): + def create_email( + self, + funcname, + tag, + *, + cc=None, + subject=None, + messageid=None, + replyto=None, + references=None, + ): if messageid is None: messageid = '' % funcname if replyto: @@ -59,8 +70,7 @@ def create_email(self, funcname, tag, *, cc=None, subject=None, messageid=None, new_references = [] if references: for reference in references: - new_references.append( - '' % reference) + new_references.append('' % reference) new_references.append(replyto) references = new_references @@ -68,21 +78,22 @@ def create_email(self, funcname, tag, *, cc=None, subject=None, messageid=None, if subject: msg['Subject'] = subject else: - msg['Subject'] = "%s: Lorem ipsum dolor sit amet" % funcname + msg['Subject'] = '%s: Lorem ipsum dolor sit amet' % funcname msg.set_content(MAIL_TEMPLATE.substitute(tag=tag)) msg['From'] = 'Regzbot testingmail ' msg['To'] = self.recipient if cc: msg['Cc'] = cc msg['Date'] = email.utils.formatdate( - timeval=(self._startdate + (Emaildir._count * 86400))) + timeval=(self._startdate + (Emaildir._count * 86400)) + ) msg['Message-Id'] = messageid if replyto: msg['In-Reply-To'] = replyto msg['References'] = ' '.join(references) - #filename = os.path.join(self.directory, "%s.regzbot" % messageid.strip('<>')) + # filename = os.path.join(self.directory, "%s.regzbot" % messageid.strip('<>')) # with open(filename, 'w') as out: # gen = email.generator.Generator(out) # gen.flatten(msg) @@ -90,9 +101,11 @@ def create_email(self, funcname, tag, *, cc=None, subject=None, messageid=None, # if replyto: # os.symlink(os.path.join(self.directory, "%s.regzbot" % messageid.strip('<>')), filename) - filename = os.path.join(self.directory, "%s.regzbot" % messageid.strip('<>')) + filename = os.path.join(self.directory, '%s.regzbot' % messageid.strip('<>')) if replyto: - filename_replyto = os.path.join(self.directory, "%s.regzbot" % replyto.strip('<>')) + filename_replyto = os.path.join( + self.directory, '%s.regzbot' % replyto.strip('<>') + ) os.symlink(filename_replyto, filename) mbox = mailbox.mbox(filename) mbox.add(mailbox.mboxMessage(msg)) @@ -101,21 +114,24 @@ def create_email(self, funcname, tag, *, cc=None, subject=None, messageid=None, Emaildir._count += 1 def clear(self): - for emailtestingfile in pathlib.Path(self.directory).glob("*.regzbot"): + for emailtestingfile in pathlib.Path(self.directory).glob('*.regzbot'): emailtestingfile.unlink() def process(self): filenames = sorted(pathlib.Path(self.directory).iterdir(), key=os.path.getmtime) for file in filenames: regzbot.mailin.processmsg_file( - self.repsrc, os.path.join(self.directory, file)) + self.repsrc, os.path.join(self.directory, file) + ) def reset(self): Emaildir._count = 0 class TestingGitTree: - def __init__(self, path_testdata, path_tmprepos, reponame, startdate, branchname='master'): + def __init__( + self, path_testdata, path_tmprepos, reponame, startdate, branchname='master' + ): self._count = 0 self._branchname = branchname self._description = reponame + '_' + branchname @@ -138,7 +154,9 @@ def __init_repo(self, path_tmprepos, reponame, startdate): if os.path.isdir(repodir): if not os.path.isdir(os.path.join(repodir, '.git')): logger.critical( - "Directory %s exist, but does not contain .git/. Aborting." % repodir) + 'Directory %s exist, but does not contain .git/. Aborting.' + % repodir + ) sys.exit(1) else: os.mkdir(repodir) @@ -147,8 +165,8 @@ def __init_repo(self, path_tmprepos, reponame, startdate): # make sure the global git config doesn't interfer with self.repo.config_writer() as gitcw: - gitcw.set_value("user", "name", "Regzbot Testing") - gitcw.set_value("user", "email", "nobody@example.com") + gitcw.set_value('user', 'name', 'Regzbot Testing') + gitcw.set_value('user', 'email', 'nobody@example.com') # is this a brand new repo? try: @@ -179,19 +197,26 @@ def __check_sha1sum(self, commit, commitnr): if commitnr >= len(self.hashes_known): self.__add_unknown_hash(str(self.repo.head.commit)) elif str(commit) != self.hashes_known[commitnr]: - logger.critical("Sha1 for the latest commit (%s) to %s doesn't match expected sha1 (%s)." % ( - commit, self._branchname, self.hashes_known[commitnr])) - logger.critical("Aborting") + logger.critical( + "Sha1 for the latest commit (%s) to %s doesn't match expected sha1 (%s)." + % (commit, self._branchname, self.hashes_known[commitnr]) + ) + logger.critical('Aborting') sys.exit(1) def __commit(self, commitmsg): if commitmsg is None: - commitmsg = "This is a %s commit for testing regzbot, the content doesn't matter." % self._description + commitmsg = ( + "This is a %s commit for testing regzbot, the content doesn't matter." + % self._description + ) commitdate = datetime.datetime.fromtimestamp( - self._startdate + self._count, tz=datetime.timezone.utc) + self._startdate + self._count, tz=datetime.timezone.utc + ) self.repo.index.commit( - commitmsg, author_date=commitdate, commit_date=commitdate) + commitmsg, author_date=commitdate, commit_date=commitdate + ) self.__check_sha1sum(self.repo.head.commit, self._count) self._count += 1 @@ -204,11 +229,11 @@ def __add_unknown_hash(self, sha1sum): self.hashes_known.append(sha1sum) def __init_branch(self): - filename = os.path.join(self.repo.working_dir, - self._description + '-' + str(self._count)) - file = open(filename, "x") - file.write( - "This is a file for testing regzbot, the content doesn't matter.\n") + filename = os.path.join( + self.repo.working_dir, self._description + '-' + str(self._count) + ) + file = open(filename, 'x') + file.write("This is a file for testing regzbot, the content doesn't matter.\n") file.close() self.repo.index.add([filename]) self.__commit(None) @@ -227,23 +252,30 @@ def mv(self, commitmsg=None): # make sure our branch is checked out self.__checkout_branch() - fileold = os.path.join(self.repo.working_dir, - self._description + '-' + str(self._count - 1)) - filenew = os.path.join(self.repo.working_dir, - self._description + '-' + str(self._count)) + fileold = os.path.join( + self.repo.working_dir, self._description + '-' + str(self._count - 1) + ) + filenew = os.path.join( + self.repo.working_dir, self._description + '-' + str(self._count) + ) self.repo.git.mv(fileold, filenew) self.__commit(commitmsg) def process(self): self.__checkout_branch() - def tag(self, tag, message="This is a tag for testing regzbot, the content doesn't matter."): + def tag( + self, + tag, + message="This is a tag for testing regzbot, the content doesn't matter.", + ): # make sure our branch is checked out self.__checkout_branch() # self.repo.create_tag(tag, message=message) - commitdate = "%s" % datetime.datetime.fromtimestamp( - self._startdate + self._count, tz=datetime.timezone.utc) + commitdate = '%s' % datetime.datetime.fromtimestamp( + self._startdate + self._count, tz=datetime.timezone.utc + ) with self.repo.git.custom_environment(GIT_COMMITTER_DATE=commitdate): self.repo.create_tag(tag, message=message) @@ -271,23 +303,23 @@ def emaildirs_clear(): def populatetree_linux(gittree_testing): gittree_testing.mv() - gittree_testing.tag("v1.8") + gittree_testing.tag('v1.8') gittree_testing.mv() - gittree_testing.tag("v1.9-rc1") + gittree_testing.tag('v1.9-rc1') gittree_testing.mv() - gittree_testing.tag("v1.9-rc2") + gittree_testing.tag('v1.9-rc2') gittree_testing.mv() - gittree_testing.tag("v1.9") + gittree_testing.tag('v1.9') gittree_testing.mv() - gittree_testing.tag("v1.10-rc1") + gittree_testing.tag('v1.10-rc1') gittree_testing.mv() - gittree_testing.tag("v1.10-rc2") + gittree_testing.tag('v1.10-rc2') gittree_testing.mv() - gittree_testing.tag("v1.10") + gittree_testing.tag('v1.10') gittree_testing.mv() - gittree_testing.tag("v1.11-rc1") + gittree_testing.tag('v1.11-rc1') gittree_testing.mv() - gittree_testing.tag("v1.11-rc2") + gittree_testing.tag('v1.11-rc2') gittree_testing.mv() @@ -307,9 +339,9 @@ def gittree_testing_prep_linux_next(repo): def populatetree_linux_next(gittree_testing): gittree_testing.mv() - gittree_testing.tag("next-20190101") + gittree_testing.tag('next-20190101') gittree_testing.mv() - gittree_testing.tag("next-20190102") + gittree_testing.tag('next-20190102') def gittree_testing_prep_linux_stable(repo): @@ -318,67 +350,95 @@ def gittree_testing_prep_linux_stable(repo): repo.create_head('linux-rolling-stable') # these are the one we care about - repo.create_head('linux-1.8.y', commit="v1.8") - repo.create_head('linux-1.10.y', commit="v1.10") + repo.create_head('linux-1.8.y', commit='v1.8') + repo.create_head('linux-1.10.y', commit='v1.10') def populatetree_linux_stable18(gittree_testing): gittree_testing.mv() - gittree_testing.tag("v1.8.1") + gittree_testing.tag('v1.8.1') gittree_testing.mv() - gittree_testing.tag("v1.8.2") + gittree_testing.tag('v1.8.2') def populatetree_linux_stable110(gittree_testing): gittree_testing.mv() - gittree_testing.tag("v1.10.1") + gittree_testing.tag('v1.10.1') gittree_testing.mv() - gittree_testing.tag("v1.10.2") + gittree_testing.tag('v1.10.2') def init_repodirs(path_tmprepos, path_testdata): # prep path_upstream_tmprepos = os.path.join(path_tmprepos, 'upstream') path_downstream_tmprepos = os.path.join(path_tmprepos, 'downstream') - logger.debug("Creating git repos in %s and pulling them to %s", - path_upstream_tmprepos, path_downstream_tmprepos) + logger.debug( + 'Creating git repos in %s and pulling them to %s', + path_upstream_tmprepos, + path_downstream_tmprepos, + ) os.mkdir(path_tmprepos) os.mkdir(path_downstream_tmprepos) os.mkdir(path_upstream_tmprepos) # create linux-mainline repo - regzbot.GitTree.add('mainline', 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/', - 'cgit', 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/', 'master', 0) + regzbot.GitTree.add( + 'mainline', + 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/', + 'cgit', + 'https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/', + 'master', + 0, + ) gittrees_testing['mainline'] = TestingGitTree( - path_testdata, path_upstream_tmprepos, 'mainline', 1546300800) + path_testdata, path_upstream_tmprepos, 'mainline', 1546300800 + ) gittrees_testing['mainline'].repo.clone( - os.path.join(path_downstream_tmprepos, 'mainline')) + os.path.join(path_downstream_tmprepos, 'mainline') + ) update_gittrees() populatetree_linux(gittrees_testing['mainline']) # create linux-next repo - regzbot.GitTree.add('next', 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/', - 'cgit', 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/commit/', 'master', -1) + regzbot.GitTree.add( + 'next', + 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/', + 'cgit', + 'https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/commit/', + 'master', + -1, + ) gittree_testing_prep_linux_next( - gittrees_testing['mainline'].clone(path_upstream_tmprepos, 'next')) + gittrees_testing['mainline'].clone(path_upstream_tmprepos, 'next') + ) gittrees_testing['next'] = TestingGitTree( - path_testdata, path_upstream_tmprepos, 'next', 1577836800) - gittrees_testing['next'].repo.clone( - os.path.join(path_downstream_tmprepos, 'next')) + path_testdata, path_upstream_tmprepos, 'next', 1577836800 + ) + gittrees_testing['next'].repo.clone(os.path.join(path_downstream_tmprepos, 'next')) update_gittrees() populatetree_linux_next(gittrees_testing['next']) # create linux-stable repo with two branches gittree_testing_prep_linux_stable( - gittrees_testing['mainline'].clone(path_upstream_tmprepos, 'stable')) - regzbot.GitTree.add('stable', 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git', 'cgit', - 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/', r'linux-[0-9][0-9]*.[0-9][0-9]*\.y', 1) + gittrees_testing['mainline'].clone(path_upstream_tmprepos, 'stable') + ) + regzbot.GitTree.add( + 'stable', + 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git', + 'cgit', + 'https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/', + r'linux-[0-9][0-9]*.[0-9][0-9]*\.y', + 1, + ) gittrees_testing['linux-1.8.y'] = TestingGitTree( - path_testdata, path_upstream_tmprepos, 'stable', 1609459200, 'linux-1.8.y') + path_testdata, path_upstream_tmprepos, 'stable', 1609459200, 'linux-1.8.y' + ) gittrees_testing['linux-1.8.y'].repo.clone( - os.path.join(path_downstream_tmprepos, 'stable')) + os.path.join(path_downstream_tmprepos, 'stable') + ) gittrees_testing['linux-1.10.y'] = TestingGitTree( - path_testdata, path_upstream_tmprepos, 'stable', 1609459200, 'linux-1.10.y') + path_testdata, path_upstream_tmprepos, 'stable', 1609459200, 'linux-1.10.y' + ) update_gittrees() populatetree_linux_stable110(gittrees_testing['linux-1.10.y']) populatetree_linux_stable18(gittrees_testing['linux-1.8.y']) @@ -386,23 +446,34 @@ def init_repodirs(path_tmprepos, path_testdata): def init_mailsdir(path_tmpmail): - logger.debug( - "Creating directory %s for holding emails files", path_tmpmail) + logger.debug('Creating directory %s for holding emails files', path_tmpmail) regzbot._TESTING['emaildirs'] = [] os.mkdir(path_tmpmail) emaildirs['primary'] = Emaildir('regressions@example.com', path_tmpmail, 'primary') regzbot._TESTING['emaildirs'].append(emaildirs['primary'].directory) - repsrcid = regzbot.ReportSource.add('Nonexistand primary mailinglist for regzbot testing', 2, - emaildirs['primary'].directory, - 'lore', 'https://lore.kernel.org/regressions/', identifiers='regressions@example.com') + repsrcid = regzbot.ReportSource.add( + 'Nonexistand primary mailinglist for regzbot testing', + 2, + emaildirs['primary'].directory, + 'lore', + 'https://lore.kernel.org/regressions/', + identifiers='regressions@example.com', + ) - emaildirs['secondary'] = Emaildir('linux-kernel@example.com', path_tmpmail, 'secondary') + emaildirs['secondary'] = Emaildir( + 'linux-kernel@example.com', path_tmpmail, 'secondary' + ) regzbot._TESTING['emaildirs'].append(emaildirs['secondary'].directory) - repsrcid = regzbot.ReportSource.add('Nonexistand secondary mailinglist for regzbot testing', 1, - emaildirs['secondary'].directory, - 'lore', 'https://lore.kernel.org/lkml/', identifiers='linux-kernel@example.com') + repsrcid = regzbot.ReportSource.add( + 'Nonexistand secondary mailinglist for regzbot testing', + 1, + emaildirs['secondary'].directory, + 'lore', + 'https://lore.kernel.org/lkml/', + identifiers='linux-kernel@example.com', + ) regzbot.ReportSource.add('generic', 99, '', 'generic', '') @@ -411,12 +482,16 @@ def init(tmpdir, testdatadir): regzbot.set_citesting('offline') _, databasedir, gittreesdir, _ = regzbot.basicressources_get_dirs( - tmpdir=tmpdir, databasedir=os.path.join(tmpdir, 'db-offlinetsts')) + tmpdir=tmpdir, databasedir=os.path.join(tmpdir, 'db-offlinetsts') + ) mailsdir = os.path.join(tmpdir, 'mails') regzbot.db_create(databasedir) - regzbot.basicressources_init(tmpdir=tmpdir, gittreesdir=os.path.join( - gittreesdir, 'downstream'), databasedir=os.path.join(tmpdir, 'db-offlinetsts')) + regzbot.basicressources_init( + tmpdir=tmpdir, + gittreesdir=os.path.join(gittreesdir, 'downstream'), + databasedir=os.path.join(tmpdir, 'db-offlinetsts'), + ) init_repodirs(gittreesdir, testdatadir) init_mailsdir(mailsdir) @@ -449,10 +524,10 @@ def run(resultfilename, tmpdir, testdatadir): innercount = 0 while '%s_%s_%s' % (testfuncprefix, outercount, innercount) in dir(this): # run test - callfunction = getattr(this, '%s_%s_%s' % - (testfuncprefix, outercount, innercount)) - instructions = callfunction( - 'test_%s_%s' % (outercount, innercount)) + callfunction = getattr( + this, '%s_%s_%s' % (testfuncprefix, outercount, innercount) + ) + instructions = callfunction('test_%s_%s' % (outercount, innercount)) # process created testdata if instructions: @@ -465,8 +540,7 @@ def run(resultfilename, tmpdir, testdatadir): update_gittrees() # write results - resultfile.write('[%s_%s_%s]\n' % - (testfuncprefix, outercount, innercount)) + resultfile.write('[%s_%s_%s]\n' % (testfuncprefix, outercount, innercount)) for data in regzbot.export_csv.dumpall_csv(): resultfile.write(data) resultfile.write('\n') @@ -489,216 +563,290 @@ def run(resultfilename, tmpdir, testdatadir): def offltest_0_0(funcname): logger.info('%s: create a mainline regression' % funcname) - emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email(funcname, '#regzbot introduced: v1.8..v1.9-rc1') return ['mailchk'] def offltest_0_1(funcname): replyto = 'test_0_0' - logger.info('%s: specifying the culprit for the regression created in %s' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot introduced: %s" % gittrees_testing['mainline'].hashes_known[5], - replyto=replyto) + logger.info( + '%s: specifying the culprit for the regression created in %s' + % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[5], + replyto=replyto, + ) return ['mailchk'] def offltest_0_2(funcname): replyto = 'test_0_0' - logger.info('%s: update title for the regression created in %s' % - (funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot summary: test_0_0: updated title (set by %s)" % funcname, - replyto=replyto) + logger.info( + '%s: update title for the regression created in %s' % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot summary: test_0_0: updated title (set by %s)' % funcname, + replyto=replyto, + ) return ['mailchk'] def offltest_0_3(funcname): logger.info( - '%s: create a second mainline regression and mark it immediately as duplicate' % funcname) - emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8..v1.9-rc1") + '%s: create a second mainline regression and mark it immediately as duplicate' + % funcname + ) + emaildirs['primary'].create_email(funcname, '#regzbot introduced: v1.8..v1.9-rc1') replyto = funcname - emaildirs['primary'].create_email("%s_1" % funcname, "#regzbot duplicate: https://lore.kernel.org/regressions/regzbot-testing-test_0_0@example.com", - replyto=replyto) + emaildirs['primary'].create_email( + '%s_1' % funcname, + '#regzbot duplicate: https://lore.kernel.org/regressions/regzbot-testing-test_0_0@example.com', + replyto=replyto, + ) return ['mailchk'] def offltest_0_4(funcname): replyto = 'test_0_0' - logger.info('%s: mark regression created in %s as fixed with a non-existing commit which has a comment' % - (funcname, replyto)) + logger.info( + '%s: mark regression created in %s as fixed with a non-existing commit which has a comment' + % (funcname, replyto) + ) # - emaildirs['primary'].create_email(funcname, "#regzbot fixed-by: 4169881b9e0781b2286dc94e4cb731982c5371aa Testcomment to fixed-by", - replyto=replyto) + emaildirs['primary'].create_email( + funcname, + '#regzbot fixed-by: 4169881b9e0781b2286dc94e4cb731982c5371aa Testcomment to fixed-by', + replyto=replyto, + ) return ['mailchk'] def offltest_0_5(funcname): replyto = 'test_0_0' - logger.info('%s: mark regression created in %s as fixed with with a commit that is actually existing' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot fixed-by: %s" % gittrees_testing['mainline'].hashes_known[6], - replyto=replyto) + logger.info( + '%s: mark regression created in %s as fixed with with a commit that is actually existing' + % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot fixed-by: %s' % gittrees_testing['mainline'].hashes_known[6], + replyto=replyto, + ) return ['mailchk'] def offltest_0_6(funcname): replyto = funcname logger.info( - '%s: send a mail which serves as report for a regression created by a reply later using ^introduced' % funcname) + '%s: send a mail which serves as report for a regression created by a reply later using ^introduced' + % funcname + ) + emaildirs['primary'].create_email(funcname, 'Nothing to see here, move along') emaildirs['primary'].create_email( - funcname, "Nothing to see here, move along") - emaildirs['primary'].create_email("%s_1" % funcname, "#regzbot ^introduced: v1.8..v1.9-rc1", - replyto=replyto) + '%s_1' % funcname, '#regzbot ^introduced: v1.8..v1.9-rc1', replyto=replyto + ) return ['mailchk'] def offltest_0_7(funcname): replyto = 'test_0_6' - logger.info('%s: mark the regression created in %s as resolved' % - (funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot resolve: some reason", - replyto=replyto) + logger.info( + '%s: mark the regression created in %s as resolved' % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, '#regzbot resolve: some reason', replyto=replyto + ) return ['mailchk'] def offltest_0_8(funcname): logger.info( - '%s: create a fourth mainline regression CCed to the secondary list' % funcname) + '%s: create a fourth mainline regression CCed to the secondary list' % funcname + ) emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8..v1.9-rc1", cc=emaildirs['secondary'].recipient) + funcname, + '#regzbot introduced: v1.8..v1.9-rc1', + cc=emaildirs['secondary'].recipient, + ) return ['mailchk'] def offltest_0_9(funcname): logger.info( - "%s: send a mail which serves as report for a regression created by a reply later using 'introduced ^'" % funcname) + "%s: send a mail which serves as report for a regression created by a reply later using 'introduced ^'" + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here, move along") + '%s_%s' % (funcname, subcounter), 'Nothing to see here, move along' + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here either, move along", - replyto="%s_%s" % (funcname, subcounter - 1)) + '%s_%s' % (funcname, subcounter), + 'Nothing to see here either, move along', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1 /", - replyto="%s_%s" % (funcname, subcounter - 1), references=("%s_0" % funcname, )) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: v1.8..v1.9-rc1 /', + replyto='%s_%s' % (funcname, subcounter - 1), + references=('%s_0' % funcname,), + ) return ['mailchk'] def offltest_0_10(funcname): replyto = 'test_0_9_0' logger.info( - "%s: send a mail with a regzbot command, but is not added as an activity due to #regzbot ignore-activity" % funcname) + '%s: send a mail with a regzbot command, but is not added as an activity due to #regzbot ignore-activity' + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot title: updated title, set by %s_%s\n\n#regzbot ignore-activity" - % (funcname, subcounter), replyto=replyto) + '%s_%s' % (funcname, subcounter), + '#regzbot title: updated title, set by %s_%s\n\n#regzbot ignore-activity' + % (funcname, subcounter), + replyto=replyto, + ) return ['mailchk'] def offltest_0_11(funcname): replyto = 'test_0_9_0' - logger.info( - "%s: try 'regzbot poke'" % funcname) + logger.info("%s: try 'regzbot poke'" % funcname) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot poke", replyto=replyto) + '%s_%s' % (funcname, subcounter), '#regzbot poke', replyto=replyto + ) return ['mailchk'] def offltest_0_12(funcname): replyto = 'test_0_0' - logger.info( - "%s: try 'regzbot from'" % funcname) + logger.info("%s: try 'regzbot from'" % funcname) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot from Some N. Ice Person ", replyto=replyto) + '%s_%s' % (funcname, subcounter), + '#regzbot from Some N. Ice Person ', + replyto=replyto, + ) return ['mailchk'] def offltest_0_13(funcname): replyto = 'test_0_9_0' - logger.info( - "%s: try 'regzbot backburner'" % funcname) + logger.info("%s: try 'regzbot backburner'" % funcname) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot backburner Some reason", replyto=replyto) + '%s_%s' % (funcname, subcounter), + '#regzbot backburner Some reason', + replyto=replyto, + ) return ['mailchk'] def offltest_0_14(funcname): replyto = 'test_0_9_0' - logger.info( - "%s: try 'regzbot unbackburn'" % funcname) + logger.info("%s: try 'regzbot unbackburn'" % funcname) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot unbackburn", replyto=replyto) + '%s_%s' % (funcname, subcounter), '#regzbot unbackburn', replyto=replyto + ) return ['mailchk'] def offltest_0_15(funcname): - logger.info('%s: create four additional regressions and mark them as duplicate in various way and then fix one marked that is marked as duplicate and has a duplicate' % funcname) + logger.info( + '%s: create four additional regressions and mark them as duplicate in various way and then fix one marked that is marked as duplicate and has a duplicate' + % funcname + ) subcounter = 0 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) subcounter = 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) subcounter = 2 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) subcounter = 3 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) replyto = '%s_%s' % (funcname, 1) dupof = '%s_%s' % (funcname, 0) subcounter = 4 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n" % dupof, - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' + % dupof, + replyto=replyto, + ) replyto = '%s_%s' % (funcname, 3) dupof = '%s_%s' % (funcname, 2) subcounter = 5 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n" % dupof, - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' + % dupof, + replyto=replyto, + ) replyto = '%s_%s' % (funcname, 2) dupof = '%s_%s' % (funcname, 0) subcounter = 6 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n" % dupof, - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' + % dupof, + replyto=replyto, + ) replyto = '%s_%s' % (funcname, 2) subcounter = 7 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot fixed-by: %s" % gittrees_testing['mainline'].hashes_known[6], - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot fixed-by: %s' % gittrees_testing['mainline'].hashes_known[6], + replyto=replyto, + ) return ['mailchk'] def offltest_0_16(funcname): - logger.info('%s: check if some attribut changes from an open regression progress downwards to duplicates' % funcname) + logger.info( + '%s: check if some attribut changes from an open regression progress downwards to duplicates' + % funcname + ) subcounter = 0 replyto = 'test_0_15_3' - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot title: new title, set via a duplicate\n", - replyto='%s' % replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot title: new title, set via a duplicate\n', + replyto='%s' % replyto, + ) return ['mailchk'] @@ -707,21 +855,27 @@ def offltest_0_17(funcname): logger.info('%s: create a regression and a duplicate from it' % funcname) subcounter = 0 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) replyto = '%s_%s' % (funcname, subcounter) subcounter += 1 - emaildirs['secondary'].create_email("%s_%s" % (funcname, subcounter), "Hello hello") + emaildirs['secondary'].create_email('%s_%s' % (funcname, subcounter), 'Hello hello') second_replyto = '%s_%s' % (funcname, subcounter) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot dup-of: https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n" % second_replyto, - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot dup-of: https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n' + % second_replyto, + replyto=replyto, + ) subcounter += 1 - emaildirs['secondary'].create_email("%s_%s" % (funcname, subcounter), "Hello again", - replyto=second_replyto) + emaildirs['secondary'].create_email( + '%s_%s' % (funcname, subcounter), 'Hello again', replyto=second_replyto + ) return ['mailchk'] @@ -729,204 +883,278 @@ def offltest_0_17(funcname): def offltest_0_18(funcname): logger.info('%s: creating a mainline regression for an arbitarily url' % funcname) emaildirs['primary'].create_email( - funcname, "#regzbot use https://bugzilla.example.com/show_bug.cgi?id=215744\n#regzbot introduced: v1.8..v1.9-rc1") + funcname, + '#regzbot use https://bugzilla.example.com/show_bug.cgi?id=215744\n#regzbot introduced: v1.8..v1.9-rc1', + ) return ['mailchk'] def offltest_0_19(funcname): logger.info( - '%s: send a mail which serves as report for a regression created by a reply later using ^introduced' % funcname) + '%s: send a mail which serves as report for a regression created by a reply later using ^introduced' + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here, move along") - replyto = "%s_%s" % (funcname, subcounter) + '%s_%s' % (funcname, subcounter), 'Nothing to see here, move along' + ) + replyto = '%s_%s' % (funcname, subcounter) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1 ^", - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: v1.8..v1.9-rc1 ^', + replyto=replyto, + ) return ['mailchk'] def offltest_0_20(funcname): logger.info( - '%s: add another report to an existing regression (which creates a new regression entry for the other report and marks it as a duplicate)' % funcname) + '%s: add another report to an existing regression (which creates a new regression entry for the other report and marks it as a duplicate)' + % funcname + ) replyto = 'test_0_19' subcounter = 0 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot duplicate: https://bugzilla.example.com/show_bug.cgi?id=215744", - replyto=replyto) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot duplicate: https://bugzilla.example.com/show_bug.cgi?id=215744', + replyto=replyto, + ) return ['mailchk'] def offltest_0_21(funcname): logger.info('%s: create a regression and mark it as inconclusive', funcname) emaildirs['primary'].create_email( - "%s" % funcname, "#regzbot introduced: v1.8..v1.9-rc1\n#regzbot inconclusive: some reason") + '%s' % funcname, + '#regzbot introduced: v1.8..v1.9-rc1\n#regzbot inconclusive: some reason', + ) return ['mailchk'] def offltest_0_22(funcname): logger.info( - "%s: send a mail which serves as report for a regression created by a reply later using 'regzbot use'" % funcname) + "%s: send a mail which serves as report for a regression created by a reply later using 'regzbot use'" + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here, move along") + '%s_%s' % (funcname, subcounter), 'Nothing to see here, move along' + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here either, move along", - replyto="%s_%s" % (funcname, subcounter - 1)) + '%s_%s' % (funcname, subcounter), + 'Nothing to see here either, move along', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['secondary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot use https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n#regzbot introduced: v1.8..v1.9-rc1" % 'test_0_22_0', + '%s_%s' % (funcname, subcounter), + '#regzbot use https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n#regzbot introduced: v1.8..v1.9-rc1' + % 'test_0_22_0', ) return ['mailchk'] def offltest_0_23(funcname): logger.info( - "%s: send a mail which serves as report for a regression created by a reply later using 'regzbot use'" % funcname) + "%s: send a mail which serves as report for a regression created by a reply later using 'regzbot use'" + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here, move along") + '%s_%s' % (funcname, subcounter), 'Nothing to see here, move along' + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here either, move along", - replyto="%s_%s" % (funcname, subcounter - 1)) + '%s_%s' % (funcname, subcounter), + 'Nothing to see here either, move along', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot use https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n#regzbot introduced: v1.8..v1.9-rc1" % 'test_0_23_0', - replyto="%s_%s" % (funcname, subcounter - 1)) + '%s_%s' % (funcname, subcounter), + '#regzbot use https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n#regzbot introduced: v1.8..v1.9-rc1' + % 'test_0_23_0', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "Nothing to see here either, move along", - replyto="%s_%s" % (funcname, subcounter - 1)) + '%s_%s' % (funcname, subcounter), + 'Nothing to see here either, move along', + replyto='%s_%s' % (funcname, subcounter - 1), + ) return ['mailchk'] + # create a mainline regression def offltest_1_0(funcname): logger.info('%s: creating a mainline regression' % funcname) emaildirs['primary'].create_email( - funcname, '#regzbot introduced: v1.8..v1.9-rc1 ("foo: bar baz")') + funcname, '#regzbot introduced: v1.8..v1.9-rc1 ("foo: bar baz")' + ) return ['mailchk'] def offltest_1_1(funcname): replyto = 'test_1_0' - logger.info('%s: creating a git commit that links to the regression created in %s, which should mark is as fixed' % ( - funcname, replyto)) + logger.info( + '%s: creating a git commit that links to the regression created in %s, which should mark is as fixed' + % (funcname, replyto) + ) gittrees_testing['mainline'].mv( - 'Testcommit %s\n\nLink: https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n' % (funcname, replyto)) + 'Testcommit %s\n\nLink: https://lore.kernel.org/lkml/regzbot-testing-%s@example.com\n' + % (funcname, replyto) + ) return ['gitchk'] def offltest_1_2(funcname): logger.info( - '%s: create a mainline regression and mark it as "fixed-by" by a commit that has not reached the repos yet' % funcname) + '%s: create a mainline regression and mark it as "fixed-by" by a commit that has not reached the repos yet' + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) # create the commit here, but don't check the repo yet (see below) as we have the commitid at hand here gittrees_testing['mainline'].mv() subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot fix: %s" % - gittrees_testing['mainline'].hashes_known[-1], replyto="%s_%s" % (funcname, subcounter - 1)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot fix: %s' % gittrees_testing['mainline'].hashes_known[-1], + replyto='%s_%s' % (funcname, subcounter - 1), + ) # the second False ensure that the tree is not check yet: return ['mailchk'] def offltest_1_3(funcname): - logger.info( - '%s: land the commit to fix the regression created in ' % funcname) + logger.info('%s: land the commit to fix the regression created in ' % funcname) # in truth: now check the commit created in the last function return ['gitchk'] def offltest_1_4(funcname): logger.info( - '%s: create a mainline regression that will be fixed by a commit that shows up in next' % funcname) - emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8..v1.9-rc1") + '%s: create a mainline regression that will be fixed by a commit that shows up in next' + % funcname + ) + emaildirs['primary'].create_email(funcname, '#regzbot introduced: v1.8..v1.9-rc1') gittrees_testing['next'].mv( - 'Testcommit %s\n\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' % (funcname, funcname)) + 'Testcommit %s\n\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' + % (funcname, funcname) + ) return ['mailchk', 'gitchk'] def offltest_1_5(funcname): logger.info( - '%s: create a mainline regression and have a commit refer to in in stable' % funcname) - emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8..v1.9-rc1") + '%s: create a mainline regression and have a commit refer to in in stable' + % funcname + ) + emaildirs['primary'].create_email(funcname, '#regzbot introduced: v1.8..v1.9-rc1') gittrees_testing['linux-1.8.y'].mv( - 'Testcommit %s\n\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' % (funcname, funcname)) + 'Testcommit %s\n\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n' + % (funcname, funcname) + ) return ['mailchk', 'gitchk'] def offltest_1_6(funcname): logger.info( - '%s: create a mainline commit which a "Fixes: %s" for a culprit of a regression introduced later' % (funcname, gittrees_testing['mainline'].hashes_known[-1][0:12])) + '%s: create a mainline commit which a "Fixes: %s" for a culprit of a regression introduced later' + % (funcname, gittrees_testing['mainline'].hashes_known[-1][0:12]) + ) gittrees_testing['mainline'].mv( - 'Testcommit %s\n\nFixes: %s ("Foo bar")\n' % (funcname, gittrees_testing['mainline'].hashes_known[-1][0:12])) + 'Testcommit %s\n\nFixes: %s ("Foo bar")\n' + % (funcname, gittrees_testing['mainline'].hashes_known[-1][0:12]) + ) return ['gitchk'] def offltest_1_7(funcname): logger.info( - '%s: create a mainline regression with a culprit that a commit mentions in a Fixed: tag' % funcname) + '%s: create a mainline regression with a culprit that a commit mentions in a Fixed: tag' + % funcname + ) emaildirs['primary'].create_email( - funcname, "#regzbot introduced: %s" % gittrees_testing['mainline'].hashes_known[-2]) + funcname, + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[-2], + ) return ['mailchk'] def offltest_1_8(funcname): logger.info( - '%s: create a mainline commit which a Fixed: for a culprit of a present regression' % funcname) + '%s: create a mainline commit which a Fixed: for a culprit of a present regression' + % funcname + ) gittrees_testing['mainline'].mv( - 'Testcommit %s\n\nFixes: %s ("Foo bar")\n' % (funcname, gittrees_testing['mainline'].hashes_known[-2][0:12])) + 'Testcommit %s\n\nFixes: %s ("Foo bar")\n' + % (funcname, gittrees_testing['mainline'].hashes_known[-2][0:12]) + ) return ['gitchk'] def offltest_1_9(funcname): - logger.info('%s: create a regression and a duplicate from it with a unsupported url, then fix with a commit specifying the latter' % funcname) + logger.info( + '%s: create a regression and a duplicate from it with a unsupported url, then fix with a commit specifying the latter' + % funcname + ) subcounter = 0 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) replyto = '%s_%s' % (funcname, subcounter) subcounter += 1 link = 'https://somewhere.over.the.rainbow.example.org/regzbot-testing@example.com' - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot dup-of: %s\n" % link, - replyto=replyto) - gittrees_testing['mainline'].mv( - 'Testcommit %s\n\nLink: %s\n' % (funcname, link)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot dup-of: %s\n' % link, + replyto=replyto, + ) + gittrees_testing['mainline'].mv('Testcommit %s\n\nLink: %s\n' % (funcname, link)) return ['mailchk', 'gitchk'] def offltest_1_10(funcname): - logger.info('%s: create a regression with a fix specified by a git summary that is not yet in the repo' % funcname) - testfix_subject = "This is a test 123456789" + logger.info( + '%s: create a regression with a fix specified by a git summary that is not yet in the repo' + % funcname + ) + testfix_subject = 'This is a test 123456789' subcounter = 0 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot introduced: v1.8..v1.9-rc1\n#regzbot fix: %s" % testfix_subject) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: v1.8..v1.9-rc1\n#regzbot fix: %s' % testfix_subject, + ) subcounter += 1 - emaildirs['secondary'].create_email("%s_%s" % (funcname, subcounter), 'foo', subject=testfix_subject) + emaildirs['secondary'].create_email( + '%s_%s' % (funcname, subcounter), 'foo', subject=testfix_subject + ) gittrees_testing['mainline'].mv(commitmsg=testfix_subject) @@ -934,12 +1162,19 @@ def offltest_1_10(funcname): def offltest_1_11(funcname): - logger.info('%s: create a regression with a fix specified by a git summary that already in the tree' % funcname) - testfix_subject = "This is a test 123456789" # that's the commit for the previous test + logger.info( + '%s: create a regression with a fix specified by a git summary that already in the tree' + % funcname + ) + testfix_subject = ( + 'This is a test 123456789' # that's the commit for the previous test + ) subcounter = 0 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot introduced: v1.8..v1.9-rc1\n#regzbot fix: %s" % testfix_subject) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: v1.8..v1.9-rc1\n#regzbot fix: %s' % testfix_subject, + ) return ['mailchk'] @@ -947,121 +1182,178 @@ def offltest_1_11(funcname): def offltest_1_12(funcname): subcounter = 0 logger.info( - '%s_%s: create a mainline regression and use Closes tag to resolve it' % (funcname, subcounter)) + '%s_%s: create a mainline regression and use Closes tag to resolve it' + % (funcname, subcounter) + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) gittrees_testing['mainline'].mv( - 'Testcommit %s_%s\n\nCloses: https://lore.kernel.org/regressions/regzbot-testing-%s_%s@example.com\n' % (funcname, subcounter, funcname, subcounter)) + 'Testcommit %s_%s\n\nCloses: https://lore.kernel.org/regressions/regzbot-testing-%s_%s@example.com\n' + % (funcname, subcounter, funcname, subcounter) + ) return ['mailchk', 'gitchk'] def offltest_2_0(funcname): - logger.info( - '%s: creating a mainline regression and add a link to it ' % funcname) - emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8..v1.9-rc1") + logger.info('%s: creating a mainline regression and add a link to it ' % funcname) + emaildirs['primary'].create_email(funcname, '#regzbot introduced: v1.8..v1.9-rc1') subcounter = 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot link https://www.kernel.org/releases.html Linktitle", - replyto=funcname) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot link https://www.kernel.org/releases.html Linktitle', + replyto=funcname, + ) return ['mailchk'] def offltest_2_1(funcname): replyto = 'test_2_0' - logger.info('%s: update the title of the link just added to the regression created in %s' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot link https://www.kernel.org/releases.html Updated linktitle", - replyto=replyto) + logger.info( + '%s: update the title of the link just added to the regression created in %s' + % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot link https://www.kernel.org/releases.html Updated linktitle', + replyto=replyto, + ) return ['mailchk'] def offltest_2_2(funcname): replyto = 'test_2_0' - logger.info('%s: remove the link to the regression created in %s' % - (funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot unlink https://www.kernel.org/releases.html", - replyto=replyto) + logger.info( + '%s: remove the link to the regression created in %s' % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot unlink https://www.kernel.org/releases.html', + replyto=replyto, + ) return ['mailchk'] def offltest_2_3(funcname): replyto = 'test_2_0' - logger.info('%s: refer to the regression created in %s on another mailing list' % ( - funcname, replyto)) - emaildirs['secondary'].create_email(funcname, "https://lore.kernel.org/regressions/regzbot-testing-%s@example.com" % replyto, - subject="%s: refer to this regression on another mainling list" % funcname) + logger.info( + '%s: refer to the regression created in %s on another mailing list' + % (funcname, replyto) + ) + emaildirs['secondary'].create_email( + funcname, + 'https://lore.kernel.org/regressions/regzbot-testing-%s@example.com' % replyto, + subject='%s: refer to this regression on another mainling list' % funcname, + ) return ['mailchk'] def offltest_2_4(funcname): replyto = 'test_2_0' referencedmail = 'test_2_3' - logger.info('%s: in the regression created by %s, start to monitor the thread created in %s' % ( - funcname, replyto, referencedmail)) - emaildirs['primary'].create_email(funcname, "#regzbot monitor https://lore.kernel.org/lkml/regzbot-testing-%s@example.com" % referencedmail, - replyto=replyto) + logger.info( + '%s: in the regression created by %s, start to monitor the thread created in %s' + % (funcname, replyto, referencedmail) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot monitor https://lore.kernel.org/lkml/regzbot-testing-%s@example.com' + % referencedmail, + replyto=replyto, + ) return ['mailchk'] def offltest_2_5(funcname): replyto = 'test_2_3' - logger.info('%s: add a reply to the thread %s that is now monitored' % - (funcname, replyto)) - emaildirs['secondary'].create_email(funcname, "Lorem ipsum dolor sit amet", - subject="%s: reply to the thread now monitored" % funcname, - replyto=replyto) + logger.info( + '%s: add a reply to the thread %s that is now monitored' % (funcname, replyto) + ) + emaildirs['secondary'].create_email( + funcname, + 'Lorem ipsum dolor sit amet', + subject='%s: reply to the thread now monitored' % funcname, + replyto=replyto, + ) return ['mailchk'] def offltest_2_6(funcname): replyto = 'test_2_3' - logger.info('%s: use a rezbot comment in the thread %s that is now monitored' % - (funcname, replyto)) - emaildirs['secondary'].create_email(funcname, "#regzbot title new title set via a monitored thread", - subject="%s: reply to the thread now monitored with a regzbot command" % funcname, - replyto=replyto) + logger.info( + '%s: use a rezbot comment in the thread %s that is now monitored' + % (funcname, replyto) + ) + emaildirs['secondary'].create_email( + funcname, + '#regzbot title new title set via a monitored thread', + subject='%s: reply to the thread now monitored with a regzbot command' + % funcname, + replyto=replyto, + ) return ['mailchk'] def offltest_2_7(funcname): replyto = 'test_2_0' referencedmail = 'test_2_3' - logger.info('%s: in the regression created by %s, stop monitoring the thread created in %s' % ( - funcname, replyto, referencedmail)) - emaildirs['primary'].create_email(funcname, "#regzbot unmonitor https://lore.kernel.org/lkml/regzbot-testing-%s@example.com" % referencedmail, - replyto=replyto) + logger.info( + '%s: in the regression created by %s, stop monitoring the thread created in %s' + % (funcname, replyto, referencedmail) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot unmonitor https://lore.kernel.org/lkml/regzbot-testing-%s@example.com' + % referencedmail, + replyto=replyto, + ) return ['mailchk'] def offltest_2_8(funcname): replyto = 'test_2_3' - logger.info('%s: add a reply to the thread %s that is now unmonitored' % ( - funcname, replyto)) - emaildirs['secondary'].create_email(funcname, "Lorem ipsum dolor sit amet", - subject="%s: reply to the thread now monitored" % funcname, - replyto=replyto) + logger.info( + '%s: add a reply to the thread %s that is now unmonitored' % (funcname, replyto) + ) + emaildirs['secondary'].create_email( + funcname, + 'Lorem ipsum dolor sit amet', + subject='%s: reply to the thread now monitored' % funcname, + replyto=replyto, + ) return ['mailchk'] def offltest_2_9(funcname): replyto = 'test_2_0' - logger.info('%s: on another mainling list, refer to the regression created in %s with a link tag (will be monitored)' % ( - funcname, replyto)) - emaildirs['secondary'].create_email(funcname, "Link: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com" % replyto, - subject="%s: refer to this regression on another mainling list" % funcname) + logger.info( + '%s: on another mainling list, refer to the regression created in %s with a link tag (will be monitored)' + % (funcname, replyto) + ) + emaildirs['secondary'].create_email( + funcname, + 'Link: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com' + % replyto, + subject='%s: refer to this regression on another mainling list' % funcname, + ) return ['mailchk'] def offltest_2_10(funcname): replyto = 'test_2_8' - logger.info('%s: on another mainling list, add a reply to the thread %s that should be monitored now' % ( - funcname, replyto)) - emaildirs['secondary'].create_email(funcname, "Lorem ipsum dolor sit amet", - subject="%s: reply to the thread now monitored" % funcname, - replyto=replyto) + logger.info( + '%s: on another mainling list, add a reply to the thread %s that should be monitored now' + % (funcname, replyto) + ) + emaildirs['secondary'].create_email( + funcname, + 'Lorem ipsum dolor sit amet', + subject='%s: reply to the thread now monitored' % funcname, + replyto=replyto, + ) return ['mailchk'] @@ -1069,29 +1361,37 @@ def offltest_2_11(funcname): # backmonitor was given up, this does nothing replyto = 'test_2_0' - logger.info('%s: on another mainling list, use #regzbotot ^backmonitor to get a the regression created in %s monitored' % ( - funcname, replyto)) + logger.info( + '%s: on another mainling list, use #regzbotot ^backmonitor to get a the regression created in %s monitored' + % (funcname, replyto) + ) subcounter = 0 - emaildirs['secondary'].create_email("%s_%s" % (funcname, subcounter), "Lorem ipsum dolor sit amet", - subject="%s_%s: a patch to fix a regression missing a Link: tag" % (funcname, subcounter)) + emaildirs['secondary'].create_email( + '%s_%s' % (funcname, subcounter), + 'Lorem ipsum dolor sit amet', + subject='%s_%s: a patch to fix a regression missing a Link: tag' + % (funcname, subcounter), + ) subcounter += 1 # emaildirs['secondary'].create_email("%s_%s" % (funcname, subcounter), "Link: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\n\n#regzbotot ^backmonitor https://lore.kernel.org/regressions/regzbot-testing-%s@example.com" % (replyto, replyto), # subject="%s_%s: get the previous mail monitored" % (funcname, subcounter), # replyto='%s_0' % funcname) - emaildirs['secondary'].create_email("%s_%s" % (funcname, subcounter), "Lorem ipsum dolor sit amet", - replyto='%s_0' % funcname) + emaildirs['secondary'].create_email( + '%s_%s' % (funcname, subcounter), + 'Lorem ipsum dolor sit amet', + replyto='%s_0' % funcname, + ) return ['mailchk'] def offltest_2_12(funcname): subcounter = 0 replyto = 'test_2_0' - logger.info( - "%s: a reply with a simple patch'" % funcname) + logger.info("%s: a reply with a simple patch'" % funcname) subcounter += 1 - content = '''something something + content = """something something diff --git a/drivers/net/wireless/ralink/rt2x00/rt2x00usb.c b/drivers/net/wireless/ralink/rt2x00/rt2x00usb.c index e4473a551241..57c947dad036 100644 @@ -1106,13 +1406,17 @@ def offltest_2_12(funcname): + !test_bit(DEVICE_STATE_STARTED, &rt2x00dev->flags)) return true; - return false;''' + return false;""" emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), content, subject="%s_%s: add a mail with a simple patch" % (funcname, subcounter), replyto=replyto) + '%s_%s' % (funcname, subcounter), + content, + subject='%s_%s: add a mail with a simple patch' % (funcname, subcounter), + replyto=replyto, + ) subcounter += 1 - content = '''something something + content = """something something diff --git a/drivers/net/wireless/ralink/rt2x00/rt2x00usb.c b/drivers/net/wireless/ralink/rt2x00/rt2x00usb.c index e4473a551241..57c947dad036 100644 @@ -1127,13 +1431,18 @@ def offltest_2_12(funcname): + !test_bit(DEVICE_STATE_STARTED, &rt2x00dev->flags)) return true; - return false;''' + return false;""" emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), content, subject="[PATCH v2] %s_%s: add a mail with a simple patch" % (funcname, subcounter), replyto=replyto) + '%s_%s' % (funcname, subcounter), + content, + subject='[PATCH v2] %s_%s: add a mail with a simple patch' + % (funcname, subcounter), + replyto=replyto, + ) subcounter += 1 - content = '''something something + content = """something something From be7736582945b56e88d385ddd4a05e13e4bc6784 Mon Sep 17 00:00:00 2001 From: Alexei Starovoitov @@ -1161,10 +1470,14 @@ def offltest_2_12(funcname): reg->type = PTR_TO_XDP_SOCK; } else if (map->map_type == BPF_MAP_TYPE_SOCKMAP || -- -2.30.2''' +2.30.2""" emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), content, subject="%s_%s: add a mail with a simple patch" % (funcname, subcounter), replyto=replyto) + '%s_%s' % (funcname, subcounter), + content, + subject='%s_%s: add a mail with a simple patch' % (funcname, subcounter), + replyto=replyto, + ) return ['mailchk'] @@ -1174,170 +1487,235 @@ def offltest_2_13(funcname): subcounter = 0 logger.info( - '%s_%s: set the introduced to a commit we know and mention it in another mail' % (funcname, subcounter)) + '%s_%s: set the introduced to a commit we know and mention it in another mail' + % (funcname, subcounter) + ) emaildirs['primary'].create_email( - '%s_%s' % (funcname, subcounter), "#regzbot introduced: %s" % - gittrees_testing['mainline'].hashes_known[-1], replyto=replyto) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[-1], + replyto=replyto, + ) subcounter += 1 emaildirs['primary'].create_email( - '%s_%s' % (funcname, subcounter), 'Foobar\nFixes: %s ("foo bar baz")\nSigned-off-by: Someone' % - gittrees_testing['mainline'].hashes_known[-1][0:12]) + '%s_%s' % (funcname, subcounter), + 'Foobar\nFixes: %s ("foo bar baz")\nSigned-off-by: Someone' + % gittrees_testing['mainline'].hashes_known[-1][0:12], + ) return ['mailchk'] def offltest_2_14(funcname): - logger.info('%s: create two regression and link to them using Link on another mailinglist' % ( - funcname)) + logger.info( + '%s: create two regression and link to them using Link on another mailinglist' + % (funcname) + ) subcounter = 0 emaildirs['primary'].create_email( - '%s_%s' % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) replyto_1 = '%s_%s' % (funcname, subcounter) subcounter += 1 emaildirs['primary'].create_email( - '%s_%s' % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) replyto_2 = '%s_%s' % (funcname, subcounter) subcounter += 1 emaildirs['primary'].create_email( '%s_%s' % (funcname, subcounter), - "Link: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com" % ( - replyto_1, replyto_2), - subject="%s_%s: refer to this regression on another mainling list" % (funcname, subcounter)) + 'Link: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com' + % (replyto_1, replyto_2), + subject='%s_%s: refer to this regression on another mainling list' + % (funcname, subcounter), + ) return ['mailchk'] def offltest_2_15(funcname): - logger.info('%s: mark a regression that monitors some threads as a regression of another' % ( - funcname)) + logger.info( + '%s: mark a regression that monitors some threads as a regression of another' + % (funcname) + ) replyto = 'test_2_0' dupof = 'test_2_14_0' subcounter = 0 emaildirs['primary'].create_email( - '%s_%s' % (funcname, subcounter), "#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com" % dupof, - replyto=replyto) + '%s_%s' % (funcname, subcounter), + '#regzbot dup-of: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com' + % dupof, + replyto=replyto, + ) replyto_1 = '%s_%s' % (funcname, subcounter) return ['mailchk'] def offltest_2_16(funcname): - logger.info('%s: create a regression and refer to it on another list using a closes tag (will be monitored)' % ( - funcname)) + logger.info( + '%s: create a regression and refer to it on another list using a closes tag (will be monitored)' + % (funcname) + ) subcounter = 0 emaildirs['primary'].create_email( - '%s_%s' % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) replyto = '%s_%s' % (funcname, subcounter) subcounter += 1 - emaildirs['secondary'].create_email(funcname, "Closes: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com" % replyto, - subject="%s: refer to newly created regression on another mainling list" % funcname) + emaildirs['secondary'].create_email( + funcname, + 'Closes: https://lore.kernel.org/regressions/regzbot-testing-%s@example.com' + % replyto, + subject='%s: refer to newly created regression on another mainling list' + % funcname, + ) return ['mailchk'] def offltest_3_0(funcname): logger.info('%s: create a regression in next' % funcname) emaildirs['primary'].create_email( - funcname, "#regzbot introduced: next-20190101..next-20190102") + funcname, '#regzbot introduced: next-20190101..next-20190102' + ) return ['mailchk'] def offltest_3_1(funcname): replyto = 'test_3_0' - logger.info('%s: specify the culprit for the regression created in %s' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot introduced: %s" % gittrees_testing['next'].hashes_known[1], - replyto=replyto) + logger.info( + '%s: specify the culprit for the regression created in %s' % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot introduced: %s' % gittrees_testing['next'].hashes_known[1], + replyto=replyto, + ) return ['mailchk'] # mark regression as fixed by an existing commit def offltest_3_2(funcname): replyto = 'test_3_0' - logger.info('%s: mark regression created in %s as fixed by and exiting commit' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot fixed-by: %s" % gittrees_testing['next'].hashes_known[2], - replyto=replyto) + logger.info( + '%s: mark regression created in %s as fixed by and exiting commit' + % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot fixed-by: %s' % gittrees_testing['next'].hashes_known[2], + replyto=replyto, + ) return ['mailchk'] def offltest_3_3(funcname): logger.info('%s: create a regression in stable' % funcname) - emaildirs['primary'].create_email( - funcname, "#regzbot introduced: v1.8.1..v1.8.2") + emaildirs['primary'].create_email(funcname, '#regzbot introduced: v1.8.1..v1.8.2') return ['mailchk'] def offltest_3_4(funcname): replyto = 'test_3_3' - logger.info('%s: specify the culprit for the regression created in %s' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot introduced: %s" % gittrees_testing['linux-1.8.y'].hashes_known[1][0:11], - replyto=replyto) + logger.info( + '%s: specify the culprit for the regression created in %s' % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot introduced: %s' + % gittrees_testing['linux-1.8.y'].hashes_known[1][0:11], + replyto=replyto, + ) return ['mailchk'] def offltest_3_5(funcname): replyto = 'test_3_3' - logger.info('%s: mark regression created in %s as fixed by and exiting commit' % ( - funcname, replyto)) - emaildirs['primary'].create_email(funcname, "#regzbot fixed-by: %s" % gittrees_testing['linux-1.8.y'].hashes_known[2], - replyto=replyto) + logger.info( + '%s: mark regression created in %s as fixed by and exiting commit' + % (funcname, replyto) + ) + emaildirs['primary'].create_email( + funcname, + '#regzbot fixed-by: %s' % gittrees_testing['linux-1.8.y'].hashes_known[2], + replyto=replyto, + ) return ['mailchk'] def offltest_4_0(funcname): subcounter = 0 logger.info( - '%s: creating a mainline regression in the current cycle (range)' % funcname) + '%s: creating a mainline regression in the current cycle (range)' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..v1.11-rc1' + ) subcounter += 1 logger.info( - '%s: creating a mainline regression in the current cycle (bisected)' % funcname) + '%s: creating a mainline regression in the current cycle (bisected)' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: %s" % gittrees_testing['mainline'].hashes_known[-1]) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[-1], + ) subcounter += 1 logger.info( - '%s: creating a mainline regression in the previous cycle (range)' % funcname) + '%s: creating a mainline regression in the previous cycle (range)' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.9..v1.10-rc2") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.9..v1.10-rc2' + ) subcounter += 1 logger.info( - '%s: creating a mainline regression in the current cycle (bisected)' % funcname) + '%s: creating a mainline regression in the current cycle (bisected)' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: %s" % gittrees_testing['mainline'].hashes_known[-4]) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[-4], + ) subcounter += 1 logger.info( - '%s: creating a mainline regression in an older cycle (range)' % funcname) + '%s: creating a mainline regression in an older cycle (range)' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.9-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.9-rc1' + ) subcounter += 1 logger.info( - '%s: creating a mainline regression bisected in an older tree' % funcname) + '%s: creating a mainline regression bisected in an older tree' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: %s" % gittrees_testing['mainline'].hashes_known[-8]) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[-8], + ) subcounter += 1 logger.info( - '%s: creating a mainline regression where the range spans two releases' % funcname) + '%s: creating a mainline regression where the range spans two releases' + % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.9..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.9..v1.11-rc1' + ) subcounter += 1 logger.info( - '%s: creating a mainline regression in the current cycle with open end)' % funcname) + '%s: creating a mainline regression in the current cycle with open end)' + % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..' + ) return ['mailchk'] @@ -1346,18 +1724,24 @@ def offltest_4_1(funcname): subcounter = 0 logger.info('%s: creating a linux-next regression (range)' % funcname) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: next-20190101..next-20190102") + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: next-20190101..next-20190102', + ) subcounter += 1 logger.info('%s: creating a linux-next regression (bisected)' % funcname) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: %s" % gittrees_testing['next'].hashes_known[1]) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' % gittrees_testing['next'].hashes_known[1], + ) subcounter += 1 logger.info( - '%s: creating a linux-next regression (range starting with mainline)' % funcname) + '%s: creating a linux-next regression (range starting with mainline)' % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..next-20190102") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..next-20190102' + ) return ['mailchk'] @@ -1366,30 +1750,43 @@ def offltest_4_2(funcname): subcounter = 0 logger.info('%s: creating a linux-stable regression (range)' % funcname) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8.1..v1.8.2") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8.1..v1.8.2' + ) subcounter += 1 logger.info('%s: creating a linux-stable regression (bisected)' % funcname) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: %s" % gittrees_testing['linux-1.8.y'].hashes_known[1][0:11]) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' + % gittrees_testing['linux-1.8.y'].hashes_known[1][0:11], + ) subcounter += 1 logger.info( - '%s: creating a linux-stable regression (range starting with mainline)' % funcname) + '%s: creating a linux-stable regression (range starting with mainline)' + % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.8..v1.8.1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.8..v1.8.1' + ) subcounter += 1 logger.info( - '%s: creating a regression with a range starting with a stable release and ending in mainline)' % funcname) + '%s: creating a regression with a range starting with a stable release and ending in mainline)' + % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10.2..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10.2..v1.11-rc1' + ) subcounter += 1 logger.info( - '%s: creating a regression with a range starting with an earlier stable release and ending in mainline)' % funcname) + '%s: creating a regression with a range starting with an earlier stable release and ending in mainline)' + % funcname + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.9.2..v1.10") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.9.2..v1.10' + ) return ['mailchk'] @@ -1397,75 +1794,106 @@ def offltest_4_2(funcname): def offltest_4_3(funcname): subcounter = 0 logger.info( - '%s_%s: creating a regressions that refers to non-existant tag' % (funcname, subcounter)) + '%s_%s: creating a regressions that refers to non-existant tag' + % (funcname, subcounter) + ) emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v0.10..v0.11") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v0.10..v0.11' + ) subcounter += 1 logger.info( - '%s_%s: creating a regressions that refers to non-existant tag' % (funcname, subcounter)) + '%s_%s: creating a regressions that refers to non-existant tag' + % (funcname, subcounter) + ) # as a side effect, the following mail will also make code fail that misses a str(foo), as something might put 123456789012 into an int instead of a string: emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: 123456789012") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: 123456789012' + ) return ['mailchk'] def offltest_4_4(funcname): logger.info( - '%s: creating a bunch of regressions and solve them in various ways to show everything in the webui' % funcname) + '%s: creating a bunch of regressions and solve them in various ways to show everything in the webui' + % funcname + ) subcounter = 0 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: %s" % gittrees_testing['mainline'].hashes_known[-1]) + '%s_%s' % (funcname, subcounter), + '#regzbot introduced: %s' % gittrees_testing['mainline'].hashes_known[-1], + ) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot link https://www.kernel.org/releases.html Link somewhere", - replyto="%s_%s" % (funcname, subcounter-1)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot link https://www.kernel.org/releases.html Link somewhere', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..v1.11-rc1' + ) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot fixed-by: %s" % gittrees_testing['mainline'].hashes_known[-2], - replyto="%s_%s" % (funcname, subcounter-1)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot fixed-by: %s' % gittrees_testing['mainline'].hashes_known[-2], + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..v1.11-rc1' + ) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot fixed-by: 1234567890abcdef1234567890abcdef", - replyto="%s_%s" % (funcname, subcounter-1)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot fixed-by: 1234567890abcdef1234567890abcdef', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..v1.11-rc1' + ) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), - "#regzbot dupof: https://lore.kernel.org/regressions/regzbot-testing-%s_%s@example.com" % ( - funcname, subcounter - 3), - replyto="%s_%s" % (funcname, subcounter-1)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot dupof: https://lore.kernel.org/regressions/regzbot-testing-%s_%s@example.com' + % (funcname, subcounter - 3), + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..v1.11-rc1' + ) subcounter += 1 - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot resolve: some reason", - replyto="%s_%s" % (funcname, subcounter-1)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot resolve: some reason', + replyto='%s_%s' % (funcname, subcounter - 1), + ) subcounter += 1 emaildirs['primary'].create_email( - "%s_%s" % (funcname, subcounter), "#regzbot introduced: v1.10..v1.11-rc1") + '%s_%s' % (funcname, subcounter), '#regzbot introduced: v1.10..v1.11-rc1' + ) gittrees_testing['next'].mv( - 'Testcommit %s\n\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s_%s@example.com\n' % (funcname, funcname, subcounter)) + 'Testcommit %s\n\nLink: https://lore.kernel.org/regressions/regzbot-testing-%s_%s@example.com\n' + % (funcname, funcname, subcounter) + ) return ['mailchk', 'gitchk'] # a regzbot command for a regression/ml thread that is not yet tracked def offltest_5_0(funcname): - logger.info( - '%s: create a regression as base for other tests' % funcname) + logger.info('%s: create a regression as base for other tests' % funcname) emaildirs['primary'].create_email( - "%s" % funcname, "#regzbot introduced: v1.10..v1.11-rc1") + '%s' % funcname, '#regzbot introduced: v1.10..v1.11-rc1' + ) return ['mailchk'] @@ -1473,16 +1901,19 @@ def offltest_5_1(funcname): replyto = 'test_5_0' subcounter = 0 - logger.info( - '%s_%s: use a unknown regzbot command' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % (funcname, subcounter), "#regzbot foobar: 123456789", - replyto=replyto) + logger.info('%s_%s: use a unknown regzbot command' % (funcname, subcounter)) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot foobar: 123456789', replyto=replyto + ) subcounter += 1 logger.info( - '%s_%s: use a regzbot command in a thread not associated with a regression' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % ( - funcname, subcounter), "#regzbot fixed-by: 123456789") + '%s_%s: use a regzbot command in a thread not associated with a regression' + % (funcname, subcounter) + ) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), '#regzbot fixed-by: 123456789' + ) return ['mailchk'] @@ -1492,32 +1923,55 @@ def offltest_5_2(funcname): subcounter = 0 logger.info( - '%s_%s: try regzbot monitor with a typo in the url' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % ( - funcname, subcounter), "#regzbot monitor: http://lore.kernel.org/somelist_somemsgid/", replyto=replyto) + '%s_%s: try regzbot monitor with a typo in the url' % (funcname, subcounter) + ) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot monitor: http://lore.kernel.org/somelist_somemsgid/', + replyto=replyto, + ) subcounter += 1 logger.info( - '%s_%s: try regzbot monitor with a unkown mailing list ' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % ( - funcname, subcounter), "#regzbot monitor: http://lore.kernel.org/somelist/somemsgid/", replyto=replyto) + '%s_%s: try regzbot monitor with a unkown mailing list ' + % (funcname, subcounter) + ) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot monitor: http://lore.kernel.org/somelist/somemsgid/', + replyto=replyto, + ) subcounter += 1 logger.info( - '%s_%s: try regzbot unmonitor with a typo a unkown mailing list ' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % ( - funcname, subcounter), "#regzbot unmonitor: http://lore.kernel.org/somelist_somemsgid/", replyto=replyto) + '%s_%s: try regzbot unmonitor with a typo a unkown mailing list ' + % (funcname, subcounter) + ) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot unmonitor: http://lore.kernel.org/somelist_somemsgid/', + replyto=replyto, + ) subcounter += 1 logger.info( - '%s_%s: try regzbot unmonitor with a typo in the url' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % ( - funcname, subcounter), "#regzbot unmonitor: http://lore.kernel.org/somelist/somemsgid/", replyto=replyto) + '%s_%s: try regzbot unmonitor with a typo in the url' % (funcname, subcounter) + ) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot unmonitor: http://lore.kernel.org/somelist/somemsgid/', + replyto=replyto, + ) subcounter += 1 logger.info( - '%s_%s: try regzbot unmonitor with a unkown mailing list ' % (funcname, subcounter)) - emaildirs['primary'].create_email("%s_%s" % ( - funcname, subcounter), "#regzbot unmonitor: http://lore.kernel.org/regressions/some_fake_msgid/", replyto=replyto) + '%s_%s: try regzbot unmonitor with a unkown mailing list ' + % (funcname, subcounter) + ) + emaildirs['primary'].create_email( + '%s_%s' % (funcname, subcounter), + '#regzbot unmonitor: http://lore.kernel.org/regressions/some_fake_msgid/', + replyto=replyto, + ) return ['mailchk'] diff --git a/regzbot/testing_online.py b/regzbot/testing_online.py index 4e259c3..5ca674c 100644 --- a/regzbot/testing_online.py +++ b/regzbot/testing_online.py @@ -16,9 +16,15 @@ def init(tmpdir): regzbot.set_citesting('online') regzbot.basicressources_setup( - tmpdir=tmpdir, gittreesdir=True, databasedir=os.path.join(tmpdir, 'db-onlinetsts')) + tmpdir=tmpdir, + gittreesdir=True, + databasedir=os.path.join(tmpdir, 'db-onlinetsts'), + ) regzbot.basicressources_init( - tmpdir=tmpdir, gittreesdir=True, databasedir=os.path.join(tmpdir, 'db-onlinetsts')) + tmpdir=tmpdir, + gittreesdir=True, + databasedir=os.path.join(tmpdir, 'db-onlinetsts'), + ) def run(resultfilename, tmpdir, _): @@ -37,18 +43,19 @@ def run(resultfilename, tmpdir, _): innercount = 0 while '%s_%s_%s' % (testfuncprefix, outercount, innercount) in dir(this): # run test - callfunction = getattr(this, '%s_%s_%s' % - (testfuncprefix, outercount, innercount)) + callfunction = getattr( + this, '%s_%s_%s' % (testfuncprefix, outercount, innercount) + ) chk_mail, chk_git, wait = callfunction( - 'test_%s_%s' % (outercount, innercount)) + 'test_%s_%s' % (outercount, innercount) + ) if chk_git: for gittree in regzbot.GitTree.getall(): gittree.update() # write results - resultfile.write('[%s_%s_%s]\n' % - (testfuncprefix, outercount, innercount)) + resultfile.write('[%s_%s_%s]\n' % (testfuncprefix, outercount, innercount)) for data in regzbot.export_csv.dumpall_csv(): resultfile.write(data) resultfile.write('\n') @@ -81,6 +88,7 @@ def onlntest_0_2(funcname): regzbot.checkout_msgid('438d711b-094b-fcfd-79e3-69f03a14df21@leemhuis.info') return False, False, False + # the last mail in the thread will only find the report by walking the thread @@ -95,6 +103,6 @@ def onlntest_1_0(funcname): return False, False, False -#def onlntest_1_1(funcname): +# def onlntest_1_1(funcname): # regzbot.redo_regressions(['5edaa2b7c2fe4abd0347b8454b2ac032b6694e2c.camel@collabora.com', ]) # return False, False, False diff --git a/regzbot/testing_trackers.py b/regzbot/testing_trackers.py index 039a377..c646f09 100644 --- a/regzbot/testing_trackers.py +++ b/regzbot/testing_trackers.py @@ -18,9 +18,15 @@ def init(tmpdir): regzbot.set_citesting('trackers') regzbot.basicressources_setup( - tmpdir=tmpdir, gittreesdir=True, databasedir=os.path.join(tmpdir, 'db-trackertsts')) + tmpdir=tmpdir, + gittreesdir=True, + databasedir=os.path.join(tmpdir, 'db-trackertsts'), + ) regzbot.basicressources_init( - tmpdir=tmpdir, gittreesdir=True, databasedir=os.path.join(tmpdir, 'db-trackertsts')) + tmpdir=tmpdir, + gittreesdir=True, + databasedir=os.path.join(tmpdir, 'db-trackertsts'), + ) regzbot.GitTree.updateall() @@ -38,18 +44,17 @@ def run(resultfilename, tmpdir, _): innercount = 0 while '%s_%s_%s' % (testfuncprefix, outercount, innercount) in dir(this): # run test - callfunction = getattr(this, '%s_%s_%s' % - (testfuncprefix, outercount, innercount)) - chk_git, wait = callfunction( - 'test_%s_%s' % (outercount, innercount)) + callfunction = getattr( + this, '%s_%s_%s' % (testfuncprefix, outercount, innercount) + ) + chk_git, wait = callfunction('test_%s_%s' % (outercount, innercount)) if chk_git: for gittree in regzbot.GitTree.getall(): gittree.update() # write results - resultfile.write('[%s_%s_%s]\n' % - (testfuncprefix, outercount, innercount)) + resultfile.write('[%s_%s_%s]\n' % (testfuncprefix, outercount, innercount)) for data in regzbot.export_csv.dumpall_csv(): resultfile.write(data) resultfile.write('\n') @@ -70,52 +75,78 @@ def run(resultfilename, tmpdir, _): def trackertest_0_0(funcname): - regzbot.ReportSource.add('regzbottesting-gitlab', 3, - 'https://gitlab.com/knurd42/linux', - 'gitlab', '', - lastchked=int(datetime.datetime.fromisoformat('2023-11-20T00:00:00.000Z').timestamp())) - regzbot.ReportSource.add('regzbottesting-github', 3, - 'https://github.com/knurd/linux', - 'github', '', - lastchked=int(datetime.datetime.fromisoformat('2022-03-15T00:00:00.000Z').timestamp())) - - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T11:35:00.000Z') + regzbot.ReportSource.add( + 'regzbottesting-gitlab', + 3, + 'https://gitlab.com/knurd42/linux', + 'gitlab', + '', + lastchked=int( + datetime.datetime.fromisoformat('2023-11-20T00:00:00.000Z').timestamp() + ), + ) + regzbot.ReportSource.add( + 'regzbottesting-github', + 3, + 'https://github.com/knurd/linux', + 'github', + '', + lastchked=int( + datetime.datetime.fromisoformat('2022-03-15T00:00:00.000Z').timestamp() + ), + ) + + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T11:35:00.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False def trackertest_0_1(funcname): - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T11:37:00.000Z') + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T11:37:00.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False def trackertest_0_2(funcname): - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T12:22:00.000Z') + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T12:22:00.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False def trackertest_0_3(funcname): - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T12:30:00.000Z') + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T12:30:00.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False def trackertest_0_4(funcname): - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T13:00:00.000Z') + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T13:00:00.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False def trackertest_0_5(funcname): - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T13:01:45.000Z') + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T13:01:45.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False def trackertest_0_6(funcname): - regzbot._TESTING['until'] = datetime.datetime.fromisoformat('2023-11-20T13:02:45.000Z') + regzbot._TESTING['until'] = datetime.datetime.fromisoformat( + '2023-11-20T13:02:45.000Z' + ) regzbot.checkout_url('https://gitlab.com/knurd42/linux/-/issues/11') return False, False From 19d726372ca8302cb17bb4472983b8681d461a43 Mon Sep 17 00:00:00 2001 From: Marcelo Robert Santos Date: Wed, 22 Apr 2026 11:44:02 -0300 Subject: [PATCH 2/4] feat: add ruff config files Configures the ruff standards and adds an ignore to the formatting commit so that it doesn't show on git blame commands --- .git-blame-ignore-revs | 1 + docs/installation.md | 29 ++++++++++++++++ requirements-dev.txt | 1 + ruff.toml | 75 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 106 insertions(+) create mode 100644 .git-blame-ignore-revs create mode 100644 requirements-dev.txt create mode 100644 ruff.toml diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..0af7838 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1 @@ +c5d1a135763f8c3019d1310444d2596ffa193932 # Ruff auto-formatting \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md index 5e3620d..87846ab 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -11,6 +11,10 @@ Then activate the environment and install the dependencies: source ~/.local/share/regzbot/python-venv/bin/activate pip install -r requirements.txt +If you are going to contribute to the project, you should also install the development dependencies: + + pip install -r requirements-dev.txt + ## setting up git trees Next create the git trees repositories at `~/.cache/regzbot/gittrees/`. You need git checkouts @@ -42,3 +46,28 @@ Now you are ready to run regzbot ./regzbot.sh run It will generate web reports at `~/.cache/regzbot/websites/` + +## Development tools + +### Ruff + +We use [Ruff](https://github.com/astral-sh/ruff) for fast Python linting and formatting. +The configuration for this tool can be seen in [ruff.toml](../ruff.toml). + +#### Running Ruff Checker + +You can check the formatting or linting status by running the following commands: + +```bash +ruff format --check +ruff check +``` + +#### Fixing Issues Automatically + +You can fix issues automatically with these commands: + +```bash +ruff format +ruff check --fix +``` diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..5883ad4 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1 @@ +ruff==0.11.9 diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..b85b6da --- /dev/null +++ b/ruff.toml @@ -0,0 +1,75 @@ +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Will format at line-length 88 but will only give error at max-line-length 110 +line-length = 88 +indent-width = 4 + +# Assume Python 3.10 +target-version = "py310" + +[lint] +# Selecting default rules for now +select = ["E4", "E7", "E9", "F"] + +# F841: unused variables; many occurences, unnecessary refactor at the time of writing +# F821: undefined variable name; some occurences for _repsources, possibly a command parameter +# E712: use `not var` instead of `var == False`; some occurences and is an unsafe fix +ignore = ["F841", "F821", "E712"] + +# NOTE: We're enabling only basic rules. Won't be enabling complexity rule +# because there are many functions over the complexity limit of 10, which +# means there would be too much refactor to be done. + +[lint.pycodestyle] +# E501 reports lines that exceed the length of 110 in case they can't be wrapped. +max-line-length = 110 + +[format] +quote-style = "single" # continue with repository style +indent-style = "space" +line-ending = "auto" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" From 534f07b5b32530b16e72a4ae15c5300e3db6dd50 Mon Sep 17 00:00:00 2001 From: Marcelo Robert Santos Date: Wed, 22 Apr 2026 14:28:39 -0300 Subject: [PATCH 3/4] feat: add pre-commit Add pre-commit hooks to run ruff automatically and flag problems --- .pre-commit-config.yaml | 7 +++++++ docs/installation.md | 11 +++++++++++ requirements-dev.txt | 1 + 3 files changed, 19 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..3f73f91 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,7 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.11.13 + hooks: + - id: ruff-format + args: [--check] + - id: ruff-check diff --git a/docs/installation.md b/docs/installation.md index 87846ab..86c97a0 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -71,3 +71,14 @@ You can fix issues automatically with these commands: ruff format ruff check --fix ``` + +## Pre-commit + +To run Ruff automatically on each commit, install the dev dependencies and then install the pre-commit hooks: + + pip install -r requirements-dev.txt + pre-commit install + +You can also run the hooks on all files manually: + + pre-commit run --all-files diff --git a/requirements-dev.txt b/requirements-dev.txt index 5883ad4..60ff385 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1 +1,2 @@ ruff==0.11.9 +pre-commit==4.2.0 From 80e695489fdfe60a7d27a0a6ebb40540b202a6bc Mon Sep 17 00:00:00 2001 From: Marcelo Robert Santos Date: Wed, 22 Apr 2026 14:47:28 -0300 Subject: [PATCH 4/4] feat: add github CI Adds ci file to run ruff automatically on pull requests Closes #3 --- .github/workflows/ci.yml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..3ca3ca1 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,28 @@ +name: CI for Regzbot + +on: + pull_request: + +jobs: + ruff: + runs-on: ubuntu-latest + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Install Ruff + run: | + python -m pip install --upgrade pip + pip install ruff + + - name: Check formatting + run: ruff format --check regzbot + + - name: Check linting + run: ruff check regzbot --output-format=github \ No newline at end of file