2013-08-01 17:32:44 +00:00
|
|
|
# Functions for manipulating .sum summary files.
|
|
|
|
|
|
|
|
import re
|
|
|
|
import os.path
|
|
|
|
from StringIO import StringIO
|
2015-08-07 01:58:30 +00:00
|
|
|
# Necessary for ordered dictionaries. We use them when the order or
|
|
|
|
# the tests matters to us.
|
|
|
|
from collections import OrderedDict
|
2016-11-06 03:59:59 +00:00
|
|
|
import lzma
|
2013-08-01 17:32:44 +00:00
|
|
|
|
|
|
|
# Helper regex for parse_sum_line.
|
2016-03-20 19:38:28 +00:00
|
|
|
sum_matcher = re.compile('^(.?(PASS|FAIL)): (.*)$')
|
2016-03-21 22:35:47 +00:00
|
|
|
racy_file_matcher = re.compile ('^(gdb\..*)')
|
2013-08-01 17:32:44 +00:00
|
|
|
|
|
|
|
# You must call set_web_base at startup to set this.
|
|
|
|
gdb_web_base = None
|
|
|
|
|
|
|
|
def set_web_base(arg):
|
|
|
|
global gdb_web_base
|
|
|
|
gdb_web_base = arg
|
|
|
|
if not os.path.isdir(gdb_web_base):
|
|
|
|
# If the parent doesn't exist, we're confused.
|
|
|
|
# So, use mkdir and not makedirs.
|
|
|
|
os.mkdir(gdb_web_base, 0755)
|
|
|
|
|
2014-12-22 22:51:13 +00:00
|
|
|
def get_web_base ():
|
|
|
|
global gdb_web_base
|
|
|
|
return gdb_web_base
|
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
class DejaResults(object):
|
|
|
|
def __init__(self):
|
|
|
|
object.__init__(self)
|
|
|
|
|
|
|
|
# Parse a single line from a .sum file.
|
|
|
|
# Uniquify the name, and put the result into OUT_DICT.
|
|
|
|
# If the line does not appear to be about a test, ignore it.
|
2016-03-20 19:38:28 +00:00
|
|
|
def parse_sum_line(self, out_dict, line, is_racy_file = False):
|
2013-08-01 17:32:44 +00:00
|
|
|
global sum_matcher
|
2016-03-15 22:04:36 +00:00
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
line = line.rstrip()
|
2016-03-20 19:38:28 +00:00
|
|
|
if not is_racy_file:
|
2016-05-17 22:48:08 +00:00
|
|
|
# Removing special XFAIL comment added by script.
|
2016-05-19 18:09:52 +00:00
|
|
|
m = re.match(sum_matcher, line)
|
2016-03-20 19:38:28 +00:00
|
|
|
else:
|
|
|
|
m = re.match (racy_file_matcher, line)
|
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
if m:
|
2016-03-20 19:38:28 +00:00
|
|
|
if is_racy_file:
|
2016-03-15 22:04:36 +00:00
|
|
|
# On racy.sum files, there is no result to parse.
|
|
|
|
result = 'NONE'
|
2016-03-20 19:38:28 +00:00
|
|
|
test_name = m.group (1)
|
|
|
|
else:
|
|
|
|
result = m.group (1)
|
|
|
|
test_name = m.group (3)
|
2016-03-15 22:04:36 +00:00
|
|
|
# Remove tail parentheses
|
|
|
|
test_name = re.sub ('(\s+)?\(.*$', '', test_name)
|
|
|
|
if result not in out_dict[1].keys ():
|
|
|
|
out_dict[1][result] = set ()
|
2013-08-01 17:32:44 +00:00
|
|
|
if test_name in out_dict:
|
|
|
|
i = 2
|
|
|
|
while True:
|
|
|
|
nname = test_name + ' <<' + str(i) + '>>'
|
|
|
|
if nname not in out_dict:
|
|
|
|
break
|
|
|
|
i = i + 1
|
|
|
|
test_name = nname
|
2016-03-15 22:04:36 +00:00
|
|
|
# Add the testname to the dictionary...
|
|
|
|
out_dict[0][test_name] = result
|
|
|
|
# and to the set.
|
|
|
|
out_dict[1][result].add (test_name)
|
2013-08-01 17:32:44 +00:00
|
|
|
|
2017-09-05 13:43:15 +00:00
|
|
|
def _write_sum_file(self, sum_dict, builder, rev, filename, header = None,
|
2017-09-15 17:34:32 +00:00
|
|
|
istry = False, branch = "master", try_count = "0"):
|
2013-08-01 17:32:44 +00:00
|
|
|
global gdb_web_base
|
2016-11-06 03:59:59 +00:00
|
|
|
|
2016-11-10 19:35:52 +00:00
|
|
|
if istry:
|
2017-09-05 13:43:15 +00:00
|
|
|
bdir = os.path.join (gdb_web_base, builder, 'try', rev[:2], rev, try_count)
|
2016-11-10 19:35:52 +00:00
|
|
|
else:
|
|
|
|
bdir = os.path.join (gdb_web_base, builder, rev[:2], rev)
|
|
|
|
|
2016-11-06 03:59:59 +00:00
|
|
|
if not os.path.exists (bdir):
|
2016-11-10 06:45:57 +00:00
|
|
|
old_umask = os.umask (0022)
|
2016-11-06 03:59:59 +00:00
|
|
|
os.makedirs (bdir)
|
2016-11-10 06:45:57 +00:00
|
|
|
os.umask (old_umask)
|
2015-01-23 23:56:28 +00:00
|
|
|
fname = os.path.join (bdir, filename)
|
2016-03-15 22:04:36 +00:00
|
|
|
keys = sum_dict[0].keys ()
|
2015-02-05 01:32:58 +00:00
|
|
|
mode = 'w'
|
2016-11-06 03:59:59 +00:00
|
|
|
old_umask = os.umask (0133)
|
2015-02-05 01:32:58 +00:00
|
|
|
if header:
|
|
|
|
with open (fname, 'w') as f:
|
|
|
|
f.write (header)
|
|
|
|
mode = 'a'
|
|
|
|
with open (fname, mode) as f:
|
2015-01-23 23:56:28 +00:00
|
|
|
for k in keys:
|
2016-03-15 22:04:36 +00:00
|
|
|
f.write (sum_dict[0][k] + ': ' + k + '\n')
|
2016-11-06 03:59:59 +00:00
|
|
|
os.umask (old_umask)
|
2015-01-23 23:56:28 +00:00
|
|
|
|
2017-10-09 22:18:28 +00:00
|
|
|
def write_sum_file(self, sum_dict, builder, branch, rev, istry, try_count = "0"):
|
2017-09-04 18:53:08 +00:00
|
|
|
if istry:
|
2017-10-02 13:17:35 +00:00
|
|
|
self.write_try_build_sum_file (self, sum_dict, builder, branch, rev,
|
|
|
|
try_count = try_count)
|
2017-09-04 18:53:08 +00:00
|
|
|
else:
|
|
|
|
self._write_sum_file (sum_dict, builder, rev, 'gdb.sum', istry = False,
|
|
|
|
branch = branch)
|
2013-08-01 17:32:44 +00:00
|
|
|
|
2017-09-05 13:43:15 +00:00
|
|
|
def write_try_build_sum_file (self, sum_dict, builder, branch, rev, try_count):
|
2016-11-06 03:59:59 +00:00
|
|
|
self._write_sum_file (sum_dict, builder, rev, 'trybuild_gdb.sum',
|
2016-11-10 19:35:52 +00:00
|
|
|
header = "### THIS SUM FILE WAS GENERATED BY A TRY BUILD ###\n\n",
|
2017-09-04 18:53:08 +00:00
|
|
|
istry = True,
|
2017-09-05 13:43:15 +00:00
|
|
|
branch = branch,
|
|
|
|
try_count = try_count)
|
2016-07-29 15:34:20 +00:00
|
|
|
|
2016-11-10 19:35:52 +00:00
|
|
|
def write_baseline(self, sum_dict, builder, branch, rev, istry):
|
2017-09-04 18:53:08 +00:00
|
|
|
if istry:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
self._write_sum_file(sum_dict, builder, rev, 'baseline',
|
|
|
|
header = "### THIS BASELINE WAS LAST UPDATED BY COMMIT %s ###\n\n" % rev,
|
|
|
|
istry = False,
|
|
|
|
branch = branch)
|
2013-08-01 17:32:44 +00:00
|
|
|
|
|
|
|
# Read a .sum file.
|
|
|
|
# The builder name is BUILDER.
|
|
|
|
# The base file name is given in FILENAME. This should be a git
|
|
|
|
# revision; to read the baseline file for a branch, use `read_baseline'.
|
|
|
|
# Returns a dictionary holding the .sum contents, or None if the
|
|
|
|
# file did not exist.
|
2016-11-06 03:59:59 +00:00
|
|
|
def _read_sum_file(self, builder, branch, rev, filename,
|
|
|
|
is_racy_file = False, is_xfail_file = False):
|
2013-08-01 17:32:44 +00:00
|
|
|
global gdb_web_base
|
2016-11-06 03:59:59 +00:00
|
|
|
|
|
|
|
if is_xfail_file:
|
|
|
|
fname = os.path.join (gdb_web_base, builder, 'xfails', branch, filename)
|
2014-12-22 22:51:13 +00:00
|
|
|
else:
|
2016-11-06 03:59:59 +00:00
|
|
|
fname = os.path.join (gdb_web_base, builder, rev[:2], rev, filename)
|
|
|
|
result = []
|
|
|
|
# result[0] is the OrderedDict containing all the tests
|
|
|
|
# and results.
|
|
|
|
result.append (OrderedDict ())
|
|
|
|
# result[1] is a dictionary containing sets of tests
|
|
|
|
result.append (dict ())
|
|
|
|
|
2015-01-23 23:56:28 +00:00
|
|
|
if os.path.exists (fname):
|
|
|
|
with open (fname, 'r') as f:
|
|
|
|
for line in f:
|
2016-03-21 22:35:47 +00:00
|
|
|
self.parse_sum_line (result, line,
|
|
|
|
is_racy_file = is_racy_file)
|
2016-11-06 03:59:59 +00:00
|
|
|
elif os.path.exists (fname + '.xz'):
|
2016-11-11 01:59:34 +00:00
|
|
|
fname += '.xz'
|
2016-11-10 06:45:57 +00:00
|
|
|
f = lzma.LZMAFile (fname, 'r')
|
|
|
|
for line in f:
|
|
|
|
self.parse_sum_line (result, line,
|
|
|
|
is_racy_file = is_racy_file)
|
|
|
|
f.close ()
|
2013-08-01 17:32:44 +00:00
|
|
|
else:
|
2016-11-06 03:59:59 +00:00
|
|
|
return None
|
2013-08-01 17:32:44 +00:00
|
|
|
return result
|
|
|
|
|
2016-11-06 03:59:59 +00:00
|
|
|
def read_sum_file (self, builder, branch, rev):
|
|
|
|
return self._read_sum_file (builder, branch, rev, 'gdb.sum')
|
2014-12-22 22:51:13 +00:00
|
|
|
|
2016-11-06 03:59:59 +00:00
|
|
|
def read_baseline(self, builder, branch, rev):
|
|
|
|
return self._read_sum_file (builder, branch, rev, 'baseline')
|
2014-12-24 01:22:22 +00:00
|
|
|
|
2016-11-10 06:45:57 +00:00
|
|
|
def read_xfail (self, builder, branch):
|
|
|
|
return self._read_sum_file (builder, branch, None, 'xfail', is_xfail_file = True)
|
2013-08-01 17:32:44 +00:00
|
|
|
|
2016-11-06 03:59:59 +00:00
|
|
|
def read_old_sum_file (self, builder, branch, rev):
|
|
|
|
return self._read_sum_file (builder, branch, rev, 'previous_gdb.sum')
|
2015-02-05 01:32:58 +00:00
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
# Parse some text as a .sum file and return the resulting
|
|
|
|
# dictionary.
|
2016-03-20 19:38:28 +00:00
|
|
|
def read_sum_text (self, text, is_racy_file = False):
|
2015-01-23 23:56:28 +00:00
|
|
|
cur_file = StringIO (text)
|
2016-03-15 22:04:36 +00:00
|
|
|
cur_results = []
|
|
|
|
cur_results.append (OrderedDict ())
|
|
|
|
cur_results.append (dict ())
|
2015-01-23 23:56:28 +00:00
|
|
|
for line in cur_file.readlines ():
|
2016-03-20 19:38:28 +00:00
|
|
|
self.parse_sum_line (cur_results, line,
|
|
|
|
is_racy_file = is_racy_file)
|
2013-08-01 17:32:44 +00:00
|
|
|
return cur_results
|
|
|
|
|
2016-03-15 22:04:36 +00:00
|
|
|
# Parse some text as the racy.sum file and return the resulting
|
|
|
|
# dictionary.
|
|
|
|
def read_racy_sum_text (self, text):
|
2016-03-20 19:38:28 +00:00
|
|
|
return self.read_sum_text (text, is_racy_file = True)
|
2016-03-15 22:04:36 +00:00
|
|
|
|
2014-12-21 01:47:56 +00:00
|
|
|
# Compute regressions between RESULTS and BASELINE on BUILDER.
|
2013-08-01 17:32:44 +00:00
|
|
|
# BASELINE will be modified if any new PASSes are seen.
|
|
|
|
# Returns a regression report, as a string.
|
2016-03-15 22:04:36 +00:00
|
|
|
def compute_regressions (self, builder, branch, results, old_res):
|
|
|
|
our_keys = results[0].keys ()
|
2013-08-01 17:32:44 +00:00
|
|
|
result = ''
|
2015-01-23 23:56:28 +00:00
|
|
|
xfails = self.read_xfail (builder, branch)
|
2013-08-01 17:32:44 +00:00
|
|
|
if xfails is None:
|
|
|
|
xfails = {}
|
2016-03-15 22:04:36 +00:00
|
|
|
else:
|
|
|
|
xfails = xfails[0]
|
2013-08-01 17:32:44 +00:00
|
|
|
for key in our_keys:
|
|
|
|
# An XFAIL entry means we have an unreliable test.
|
2015-01-20 01:26:05 +00:00
|
|
|
if key in xfails:
|
2013-08-01 17:32:44 +00:00
|
|
|
continue
|
|
|
|
# A transition to PASS means we should update the baseline.
|
2016-03-15 22:04:36 +00:00
|
|
|
if results[0][key] == 'PASS':
|
|
|
|
if key not in old_res[0] or old_res[0][key] != 'PASS':
|
|
|
|
old_res[0][key] = 'PASS'
|
|
|
|
continue
|
2017-09-15 18:47:19 +00:00
|
|
|
if results[0][key] == 'XFAIL' or results[0][key] == 'XPASS':
|
|
|
|
# We don't report new XFAILs or XPASSes
|
2017-09-15 18:28:25 +00:00
|
|
|
continue
|
2017-09-14 20:34:34 +00:00
|
|
|
# We report both PASS -> FAIL and FAIL -> PASS, as well as
|
|
|
|
# new FAIL and new PASS.
|
2016-03-15 22:04:36 +00:00
|
|
|
if key not in old_res[0]:
|
2017-09-14 20:34:34 +00:00
|
|
|
result += 'new ' + results[0][key] + ': ' + key + '\n'
|
|
|
|
elif results[0][key] != old_res[0][key]:
|
|
|
|
result += old_res[0][key] + ' -> ' + results[0][key] + ': ' + key + '\n'
|
2013-08-01 17:32:44 +00:00
|
|
|
return result
|