2013-08-01 17:32:44 +00:00
|
|
|
# Functions for manipulating .sum summary files.
|
|
|
|
|
|
|
|
import re
|
|
|
|
import os.path
|
|
|
|
from StringIO import StringIO
|
2015-08-07 01:58:30 +00:00
|
|
|
# Necessary for ordered dictionaries. We use them when the order or
|
|
|
|
# the tests matters to us.
|
|
|
|
from collections import OrderedDict
|
2013-08-01 17:32:44 +00:00
|
|
|
|
|
|
|
# Helper regex for parse_sum_line.
|
2016-03-20 19:38:28 +00:00
|
|
|
sum_matcher = re.compile('^(.?(PASS|FAIL)): (.*)$')
|
2016-03-21 22:35:47 +00:00
|
|
|
racy_file_matcher = re.compile ('^(gdb\..*)')
|
2013-08-01 17:32:44 +00:00
|
|
|
|
|
|
|
# You must call set_web_base at startup to set this.
|
|
|
|
gdb_web_base = None
|
|
|
|
|
|
|
|
def set_web_base(arg):
|
|
|
|
global gdb_web_base
|
|
|
|
gdb_web_base = arg
|
|
|
|
if not os.path.isdir(gdb_web_base):
|
|
|
|
# If the parent doesn't exist, we're confused.
|
|
|
|
# So, use mkdir and not makedirs.
|
|
|
|
os.mkdir(gdb_web_base, 0755)
|
|
|
|
|
2014-12-22 22:51:13 +00:00
|
|
|
def get_web_base ():
|
|
|
|
global gdb_web_base
|
|
|
|
return gdb_web_base
|
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
class DejaResults(object):
|
|
|
|
def __init__(self):
|
|
|
|
object.__init__(self)
|
|
|
|
|
|
|
|
# Parse a single line from a .sum file.
|
|
|
|
# Uniquify the name, and put the result into OUT_DICT.
|
|
|
|
# If the line does not appear to be about a test, ignore it.
|
2016-03-20 19:38:28 +00:00
|
|
|
def parse_sum_line(self, out_dict, line, is_racy_file = False):
|
2013-08-01 17:32:44 +00:00
|
|
|
global sum_matcher
|
2016-03-15 22:04:36 +00:00
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
line = line.rstrip()
|
2016-03-20 19:38:28 +00:00
|
|
|
if not is_racy_file:
|
2016-05-17 22:48:08 +00:00
|
|
|
# Removing special XFAIL comment added by script.
|
2016-05-19 18:09:52 +00:00
|
|
|
m = re.match(sum_matcher, line)
|
2016-03-20 19:38:28 +00:00
|
|
|
else:
|
|
|
|
m = re.match (racy_file_matcher, line)
|
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
if m:
|
2016-03-20 19:38:28 +00:00
|
|
|
if is_racy_file:
|
2016-03-15 22:04:36 +00:00
|
|
|
# On racy.sum files, there is no result to parse.
|
|
|
|
result = 'NONE'
|
2016-03-20 19:38:28 +00:00
|
|
|
test_name = m.group (1)
|
|
|
|
else:
|
|
|
|
result = m.group (1)
|
|
|
|
test_name = m.group (3)
|
2016-03-15 22:04:36 +00:00
|
|
|
# Remove tail parentheses
|
|
|
|
test_name = re.sub ('(\s+)?\(.*$', '', test_name)
|
|
|
|
if result not in out_dict[1].keys ():
|
|
|
|
out_dict[1][result] = set ()
|
2013-08-01 17:32:44 +00:00
|
|
|
if test_name in out_dict:
|
|
|
|
i = 2
|
|
|
|
while True:
|
|
|
|
nname = test_name + ' <<' + str(i) + '>>'
|
|
|
|
if nname not in out_dict:
|
|
|
|
break
|
|
|
|
i = i + 1
|
|
|
|
test_name = nname
|
2016-03-15 22:04:36 +00:00
|
|
|
# Add the testname to the dictionary...
|
|
|
|
out_dict[0][test_name] = result
|
|
|
|
# and to the set.
|
|
|
|
out_dict[1][result].add (test_name)
|
2013-08-01 17:32:44 +00:00
|
|
|
|
2015-02-05 01:32:58 +00:00
|
|
|
def _write_sum_file(self, sum_dict, subdir, rev_or_branch, filename,
|
|
|
|
header = None):
|
2013-08-01 17:32:44 +00:00
|
|
|
global gdb_web_base
|
2015-01-23 23:56:28 +00:00
|
|
|
if not rev_or_branch:
|
2014-12-24 01:22:22 +00:00
|
|
|
bdir = os.path.join (gdb_web_base, subdir)
|
2015-01-23 23:56:28 +00:00
|
|
|
else:
|
|
|
|
bdir = os.path.join (gdb_web_base, subdir, rev_or_branch)
|
|
|
|
if not os.path.isdir (bdir):
|
|
|
|
os.makedirs (bdir, 0755)
|
|
|
|
fname = os.path.join (bdir, filename)
|
2016-03-15 22:04:36 +00:00
|
|
|
keys = sum_dict[0].keys ()
|
2015-02-05 01:32:58 +00:00
|
|
|
mode = 'w'
|
|
|
|
if header:
|
|
|
|
with open (fname, 'w') as f:
|
|
|
|
f.write (header)
|
|
|
|
mode = 'a'
|
|
|
|
with open (fname, mode) as f:
|
2015-01-23 23:56:28 +00:00
|
|
|
for k in keys:
|
2016-03-15 22:04:36 +00:00
|
|
|
f.write (sum_dict[0][k] + ': ' + k + '\n')
|
2015-01-23 23:56:28 +00:00
|
|
|
|
|
|
|
def write_sum_file(self, sum_dict, builder, branch):
|
2015-01-11 23:25:48 +00:00
|
|
|
self._write_sum_file (sum_dict, builder, None, 'gdb.sum')
|
2013-08-01 17:32:44 +00:00
|
|
|
|
2016-07-29 15:34:20 +00:00
|
|
|
def write_try_build_sum_file (self, sum_dict, builder, branch):
|
|
|
|
self._write_sum_file (sum_dict, builder, None, 'trybuild_gdb.sum'
|
|
|
|
header = "### THIS SUM FILE WAS GENERATED BY A TRY BUILD ###\n\n")
|
|
|
|
|
2015-02-05 01:32:58 +00:00
|
|
|
def write_baseline(self, sum_dict, builder, branch, rev):
|
|
|
|
self._write_sum_file(sum_dict, builder, None, 'baseline',
|
|
|
|
header = "### THIS BASELINE WAS LAST UPDATED BY COMMIT %s ###\n\n" % rev)
|
2013-08-01 17:32:44 +00:00
|
|
|
|
|
|
|
# Read a .sum file.
|
|
|
|
# The builder name is BUILDER.
|
|
|
|
# The base file name is given in FILENAME. This should be a git
|
|
|
|
# revision; to read the baseline file for a branch, use `read_baseline'.
|
|
|
|
# Returns a dictionary holding the .sum contents, or None if the
|
|
|
|
# file did not exist.
|
2016-03-20 19:38:28 +00:00
|
|
|
def _read_sum_file(self, subdir, rev_or_branch, filename,
|
|
|
|
is_racy_file = False):
|
2013-08-01 17:32:44 +00:00
|
|
|
global gdb_web_base
|
2015-01-23 23:56:28 +00:00
|
|
|
if not rev_or_branch:
|
|
|
|
fname = os.path.join (gdb_web_base, subdir, filename)
|
2014-12-22 22:51:13 +00:00
|
|
|
else:
|
2015-01-23 23:56:28 +00:00
|
|
|
fname = os.path.join (gdb_web_base, subdir, rev_or_branch, filename)
|
|
|
|
if os.path.exists (fname):
|
2016-03-15 22:04:36 +00:00
|
|
|
result = []
|
|
|
|
# result[0] is the OrderedDict containing all the tests
|
|
|
|
# and results.
|
|
|
|
result.append (OrderedDict ())
|
|
|
|
# result[1] is a dictionary containing sets of tests
|
|
|
|
result.append (dict ())
|
2015-01-23 23:56:28 +00:00
|
|
|
with open (fname, 'r') as f:
|
|
|
|
for line in f:
|
2016-03-21 22:35:47 +00:00
|
|
|
self.parse_sum_line (result, line,
|
|
|
|
is_racy_file = is_racy_file)
|
2013-08-01 17:32:44 +00:00
|
|
|
else:
|
|
|
|
result = None
|
|
|
|
return result
|
|
|
|
|
2015-01-23 23:56:28 +00:00
|
|
|
def read_sum_file (self, builder, branch):
|
2015-01-11 23:25:48 +00:00
|
|
|
return self._read_sum_file (builder, None, 'gdb.sum')
|
2014-12-22 22:51:13 +00:00
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
def read_baseline(self, builder, branch):
|
2015-01-23 23:56:28 +00:00
|
|
|
return self._read_sum_file (builder, None, 'baseline')
|
2014-12-24 01:22:22 +00:00
|
|
|
|
2015-01-23 23:56:28 +00:00
|
|
|
def read_xfail (self, builder, branch):
|
|
|
|
return self._read_sum_file (builder, os.path.join ('xfails', branch),
|
|
|
|
'xfail')
|
2013-08-01 17:32:44 +00:00
|
|
|
|
2015-02-05 01:32:58 +00:00
|
|
|
def read_old_sum_file (self, builder, branch):
|
|
|
|
return self._read_sum_file (builder, None, 'previous_gdb.sum')
|
|
|
|
|
2013-08-01 17:32:44 +00:00
|
|
|
# Parse some text as a .sum file and return the resulting
|
|
|
|
# dictionary.
|
2016-03-20 19:38:28 +00:00
|
|
|
def read_sum_text (self, text, is_racy_file = False):
|
2015-01-23 23:56:28 +00:00
|
|
|
cur_file = StringIO (text)
|
2016-03-15 22:04:36 +00:00
|
|
|
cur_results = []
|
|
|
|
cur_results.append (OrderedDict ())
|
|
|
|
cur_results.append (dict ())
|
2015-01-23 23:56:28 +00:00
|
|
|
for line in cur_file.readlines ():
|
2016-03-20 19:38:28 +00:00
|
|
|
self.parse_sum_line (cur_results, line,
|
|
|
|
is_racy_file = is_racy_file)
|
2013-08-01 17:32:44 +00:00
|
|
|
return cur_results
|
|
|
|
|
2016-03-15 22:04:36 +00:00
|
|
|
# Parse some text as the racy.sum file and return the resulting
|
|
|
|
# dictionary.
|
|
|
|
def read_racy_sum_text (self, text):
|
2016-03-20 19:38:28 +00:00
|
|
|
return self.read_sum_text (text, is_racy_file = True)
|
2016-03-15 22:04:36 +00:00
|
|
|
|
2014-12-21 01:47:56 +00:00
|
|
|
# Compute regressions between RESULTS and BASELINE on BUILDER.
|
2013-08-01 17:32:44 +00:00
|
|
|
# BASELINE will be modified if any new PASSes are seen.
|
|
|
|
# Returns a regression report, as a string.
|
2016-03-15 22:04:36 +00:00
|
|
|
def compute_regressions (self, builder, branch, results, old_res):
|
|
|
|
our_keys = results[0].keys ()
|
2013-08-01 17:32:44 +00:00
|
|
|
result = ''
|
2015-01-23 23:56:28 +00:00
|
|
|
xfails = self.read_xfail (builder, branch)
|
2013-08-01 17:32:44 +00:00
|
|
|
if xfails is None:
|
|
|
|
xfails = {}
|
2016-03-15 22:04:36 +00:00
|
|
|
else:
|
|
|
|
xfails = xfails[0]
|
2013-08-01 17:32:44 +00:00
|
|
|
for key in our_keys:
|
|
|
|
# An XFAIL entry means we have an unreliable test.
|
2015-01-20 01:26:05 +00:00
|
|
|
if key in xfails:
|
2013-08-01 17:32:44 +00:00
|
|
|
continue
|
|
|
|
# A transition to PASS means we should update the baseline.
|
2016-03-15 22:04:36 +00:00
|
|
|
if results[0][key] == 'PASS':
|
|
|
|
if key not in old_res[0] or old_res[0][key] != 'PASS':
|
|
|
|
old_res[0][key] = 'PASS'
|
|
|
|
continue
|
2013-08-01 17:32:44 +00:00
|
|
|
# A regression is just a transition to FAIL.
|
2016-03-15 22:04:36 +00:00
|
|
|
if results[0][key] != 'FAIL':
|
2013-08-01 17:32:44 +00:00
|
|
|
continue
|
2016-03-15 22:04:36 +00:00
|
|
|
if key not in old_res[0]:
|
2013-08-01 17:32:44 +00:00
|
|
|
result = result + 'new FAIL: ' + key + '\n'
|
2016-03-15 22:04:36 +00:00
|
|
|
elif old_res[0][key] != 'FAIL':
|
|
|
|
result = result + old_res[0][key] + ' -> FAIL: ' + key + '\n'
|
2013-08-01 17:32:44 +00:00
|
|
|
return result
|