Initial attempt to change the way we store results

This was imported (with modifications) from the 0.9-prep branch.
This commit is contained in:
Sergio Durigan Junior 2016-11-05 23:59:59 -04:00
parent d7efeb9b57
commit 3c736356e5
5 changed files with 333 additions and 186 deletions

20
lib/fsdb.py Normal file
View file

@ -0,0 +1,20 @@
# DB-like with filesystem
from buildbot.plugins import util, steps
from sumfiles import get_web_base
import os
class SaveGDBResults (steps.MasterShellCommand):
name = 'save build results'
description = 'saving build results'
descriptionDone = 'saved build results'
def __init__ (self, **kwargs):
steps.MasterShellCommand.__init__ (self, **kwargs)
self.command = [ os.path.expanduser ("~/scripts/update-logs.sh"),
"--commit", util.Property ('got_revision'),
"--builder", util.Property ('buildername'),
"--base-directory", get_web_base (),
"--branch", util.Property ('branch'),
"--is-try-sched", util.Property ('isTrySched',
default = 'no') ]

View file

@ -1,67 +1,92 @@
# GDB .sum-fetching command. # GDB .sum-fetching command.
from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION from buildbot.process.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.steps.shell import ShellCommand from buildbot.plugins import steps, util
from sumfiles import DejaResults, get_web_base from sumfiles import DejaResults, get_web_base
from gdbgitdb import switch_to_branch from gdbgitdb import switch_to_branch
from shutil import copyfile import os
import sqlite3
class CopyOldGDBSumFile (ShellCommand): @util.renderer
def create_copy_command (props):
rev = props.getProperty ('got_revision')
builder = props.getProperty ('buildername')
istry = props.getProperty ('isTrySched')
branch = props.getProperty ('branch')
command = [ 'cp', '-a' ]
db_file = os.path.join (get_web_base (), builder, builder + '.db')
if not os.path.exists (db_file):
# This is probably the first commit being tested. Don't do anything.
return [ 'true' ]
con = sqlite3.connect (db_file)
c = con.cursor ()
c.execute ('SELECT commitid WHERE branch = "%s" AND trysched = 0 FROM logs ORDER BY timestamp DESC LIMIT 1' % branch)
con.close ()
commit = c.fetchone ()[0]
from_path = os.path.join (get_web_base (), commit[:2], commit, 'gdb.sum')
if istry and istry == 'yes':
to_path = os.path.join (get_web_base (), 'try', rev[:2], rev, 'previous_gdb.sum')
else:
to_path = os.path.join (get_web_base (), rev[:2], rev, 'previous_gdb.sum')
command += [ from_path, to_path ]
return command
class CopyOldGDBSumFile (steps.MasterShellCommand):
"""Copy the current gdb.sum file into the old_gdb.sum file.""" """Copy the current gdb.sum file into the old_gdb.sum file."""
name = "copy gdb.sum file" name = "copy gdb.sum file"
description = "copying previous gdb.sum file" description = "copying previous gdb.sum file"
descriptionDone = "copied previous gdb.sum file" descriptionDone = "copied previous gdb.sum file"
command = [ 'true' ]
def __init__ (self, **kwargs): def __init__ (self, **kwargs):
ShellCommand.__init__ (self, **kwargs) steps.MasterShellCommand.__init__ (self, command = create_copy_command, **kwargs)
def evaluateCommand (self, cmd): class GdbCatSumfileCommand(steps.ShellCommand):
rev = self.getProperty('got_revision')
builder = self.getProperty('buildername')
isrebuild = self.getProperty ('isRebuild')
branch = self.getProperty('branch')
wb = get_web_base ()
if branch is None:
branch = 'master'
if isrebuild and isrebuild == 'yes':
return SUCCESS
# Switch to the right branch inside the BUILDER repo
switch_to_branch (builder, branch, force_switch = True)
try:
copyfile ("%s/%s/gdb.sum" % (wb, builder),
"%s/%s/previous_gdb.sum" % (wb, builder))
except IOError:
# If the dest file does not exist, ignore
pass
return SUCCESS
class GdbCatSumfileCommand(ShellCommand):
name = 'regressions' name = 'regressions'
command = ['cat', 'gdb.sum'] command = ['cat', 'gdb.sum']
def __init__(self, **kwargs): def __init__(self, **kwargs):
ShellCommand.__init__(self, **kwargs) steps.ShellCommand.__init__(self, **kwargs)
def evaluateCommand(self, cmd): def evaluateCommand(self, cmd):
rev = self.getProperty('got_revision') rev = self.getProperty('got_revision')
builder = self.getProperty('buildername') builder = self.getProperty('buildername')
istrysched = self.getProperty('isTrySched') istrysched = self.getProperty('isTrySched')
branch = self.getProperty('branch') branch = self.getProperty('branch')
db_file = os.path.join (get_web_base (), builder, builder + '.db')
parser = DejaResults()
cur_results = parser.read_sum_text(self.getLog('stdio').getText())
baseline = None
if branch is None: if branch is None:
branch = 'master' branch = 'master'
# Switch to the right branch inside the BUILDER repo if not os.path.exists (db_file):
switch_to_branch (builder, branch, force_switch = False) # This takes care of our very first build.
parser.write_sum_file (cur_results, builder, branch, rev)
# If there was no previous baseline, then this run
# gets the honor.
if baseline is None:
baseline = cur_results
parser.write_baseline (baseline, builder, branch, rev)
return SUCCESS
parser = DejaResults() con = sqlite3.connect (db_file)
cur_results = parser.read_sum_text(self.getLog('stdio').getText()) c = con.cursor ()
baseline = parser.read_baseline (builder, branch) c.execute ('SELECT commitid WHERE branch = "%s" AND trysched = 0 FROM logs ORDER BY timestamp DESC LIMIT 1' % branch)
old_sum = parser.read_sum_file (builder, branch) con.close ()
prevcommit = c.fetchone ()[0]
# Switch to the right branch inside the BUILDER repo
# switch_to_branch (builder, branch, force_switch = False)
baseline = parser.read_baseline (builder, branch, prevcommit)
old_sum = parser.read_sum_file (builder, branch, prevcommit)
result = SUCCESS result = SUCCESS
if baseline is not None: if baseline is not None:
@ -78,14 +103,14 @@ class GdbCatSumfileCommand(ShellCommand):
self.addCompleteLog ('regressions', report) self.addCompleteLog ('regressions', report)
result = FAILURE result = FAILURE
if not istrysched or istrysched == 'no': if istrysched and istrysched == 'yes':
parser.write_sum_file (cur_results, builder, branch) parser.write_try_build_sum_file (cur_results, builder, branch, rev)
else:
parser.write_sum_file (cur_results, builder, branch, rev)
# If there was no previous baseline, then this run # If there was no previous baseline, then this run
# gets the honor. # gets the honor.
if baseline is None: if baseline is None:
baseline = cur_results baseline = cur_results
parser.write_baseline (baseline, builder, branch, rev) parser.write_baseline (baseline, builder, branch, rev)
else:
parser.write_try_build_sum_file (cur_results, builder, branch)
return result return result

View file

@ -6,6 +6,7 @@ from StringIO import StringIO
# Necessary for ordered dictionaries. We use them when the order or # Necessary for ordered dictionaries. We use them when the order or
# the tests matters to us. # the tests matters to us.
from collections import OrderedDict from collections import OrderedDict
import lzma
# Helper regex for parse_sum_line. # Helper regex for parse_sum_line.
sum_matcher = re.compile('^(.?(PASS|FAIL)): (.*)$') sum_matcher = re.compile('^(.?(PASS|FAIL)): (.*)$')
@ -68,18 +69,16 @@ class DejaResults(object):
# and to the set. # and to the set.
out_dict[1][result].add (test_name) out_dict[1][result].add (test_name)
def _write_sum_file(self, sum_dict, subdir, rev_or_branch, filename, def _write_sum_file(self, sum_dict, builder, rev, filename, header = None):
header = None):
global gdb_web_base global gdb_web_base
if not rev_or_branch:
bdir = os.path.join (gdb_web_base, subdir) bdir = os.path.join (gdb_web_base, builder, rev[:2], rev)
else: if not os.path.exists (bdir):
bdir = os.path.join (gdb_web_base, subdir, rev_or_branch) os.makedirs (bdir)
if not os.path.isdir (bdir):
os.makedirs (bdir, 0755)
fname = os.path.join (bdir, filename) fname = os.path.join (bdir, filename)
keys = sum_dict[0].keys () keys = sum_dict[0].keys ()
mode = 'w' mode = 'w'
old_umask = os.umask (0133)
if header: if header:
with open (fname, 'w') as f: with open (fname, 'w') as f:
f.write (header) f.write (header)
@ -87,16 +86,17 @@ class DejaResults(object):
with open (fname, mode) as f: with open (fname, mode) as f:
for k in keys: for k in keys:
f.write (sum_dict[0][k] + ': ' + k + '\n') f.write (sum_dict[0][k] + ': ' + k + '\n')
os.umask (old_umask)
def write_sum_file(self, sum_dict, builder, branch): def write_sum_file(self, sum_dict, builder, branch, rev):
self._write_sum_file (sum_dict, builder, None, 'gdb.sum') self._write_sum_file (sum_dict, builder, rev, 'gdb.sum')
def write_try_build_sum_file (self, sum_dict, builder, branch): def write_try_build_sum_file (self, sum_dict, builder, branch, rev):
self._write_sum_file (sum_dict, builder, None, 'trybuild_gdb.sum', self._write_sum_file (sum_dict, builder, rev, 'trybuild_gdb.sum',
header = "### THIS SUM FILE WAS GENERATED BY A TRY BUILD ###\n\n") header = "### THIS SUM FILE WAS GENERATED BY A TRY BUILD ###\n\n")
def write_baseline(self, sum_dict, builder, branch, rev): def write_baseline(self, sum_dict, builder, branch, rev):
self._write_sum_file(sum_dict, builder, None, 'baseline', self._write_sum_file(sum_dict, builder, rev, 'baseline',
header = "### THIS BASELINE WAS LAST UPDATED BY COMMIT %s ###\n\n" % rev) header = "### THIS BASELINE WAS LAST UPDATED BY COMMIT %s ###\n\n" % rev)
# Read a .sum file. # Read a .sum file.
@ -105,40 +105,46 @@ class DejaResults(object):
# revision; to read the baseline file for a branch, use `read_baseline'. # revision; to read the baseline file for a branch, use `read_baseline'.
# Returns a dictionary holding the .sum contents, or None if the # Returns a dictionary holding the .sum contents, or None if the
# file did not exist. # file did not exist.
def _read_sum_file(self, subdir, rev_or_branch, filename, def _read_sum_file(self, builder, branch, rev, filename,
is_racy_file = False): is_racy_file = False, is_xfail_file = False):
global gdb_web_base global gdb_web_base
if not rev_or_branch:
fname = os.path.join (gdb_web_base, subdir, filename) if is_xfail_file:
fname = os.path.join (gdb_web_base, builder, 'xfails', branch, filename)
else: else:
fname = os.path.join (gdb_web_base, subdir, rev_or_branch, filename) fname = os.path.join (gdb_web_base, builder, rev[:2], rev, filename)
if os.path.exists (fname):
result = [] result = []
# result[0] is the OrderedDict containing all the tests # result[0] is the OrderedDict containing all the tests
# and results. # and results.
result.append (OrderedDict ()) result.append (OrderedDict ())
# result[1] is a dictionary containing sets of tests # result[1] is a dictionary containing sets of tests
result.append (dict ()) result.append (dict ())
if os.path.exists (fname):
with open (fname, 'r') as f: with open (fname, 'r') as f:
for line in f: for line in f:
self.parse_sum_line (result, line, self.parse_sum_line (result, line,
is_racy_file = is_racy_file) is_racy_file = is_racy_file)
elif os.path.exists (fname + '.xz'):
with lzma.open (fname, 'r') as f:
for line in f:
self.parse_sum_line (result, line,
is_racy_file = is_racy_file)
else: else:
result = None return None
return result return result
def read_sum_file (self, builder, branch): def read_sum_file (self, builder, branch, rev):
return self._read_sum_file (builder, None, 'gdb.sum') return self._read_sum_file (builder, branch, rev, 'gdb.sum')
def read_baseline(self, builder, branch): def read_baseline(self, builder, branch, rev):
return self._read_sum_file (builder, None, 'baseline') return self._read_sum_file (builder, branch, rev, 'baseline')
def read_xfail (self, builder, branch): def read_xfail (self, builder, branch, rev):
return self._read_sum_file (builder, os.path.join ('xfails', branch), return self._read_sum_file (builder, branch, rev, 'xfail', is_xfail_file = True)
'xfail')
def read_old_sum_file (self, builder, branch): def read_old_sum_file (self, builder, branch, rev):
return self._read_sum_file (builder, None, 'previous_gdb.sum') return self._read_sum_file (builder, branch, rev, 'previous_gdb.sum')
# Parse some text as a .sum file and return the resulting # Parse some text as a .sum file and return the resulting
# dictionary. # dictionary.

View file

@ -15,17 +15,10 @@ from buildbot.schedulers.timed import Nightly
from buildbot.schedulers.trysched import Try_Jobdir from buildbot.schedulers.trysched import Try_Jobdir
from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.process import factory from buildbot.process import factory
from buildbot.process.properties import WithProperties, Property from buildbot.plugins import util, reporters, steps, worker
from buildbot.steps.shell import Compile from buildbot.reporters import utils
from buildbot.steps.shell import Configure
from buildbot.steps.shell import ShellCommand
from buildbot.steps.shell import SetPropertyFromCommand
from buildbot.steps.transfer import FileUpload
from buildbot.steps.source.git import Git
from buildbot.steps.slave import RemoveDirectory
from buildbot.changes.filter import ChangeFilter from buildbot.changes.filter import ChangeFilter
from buildbot.buildslave import BuildSlave from buildbot.process.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.status.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.interfaces import IEmailLookup from buildbot.interfaces import IEmailLookup
from zope.interface import implements from zope.interface import implements
from gdbcommand import CopyOldGDBSumFile, GdbCatSumfileCommand from gdbcommand import CopyOldGDBSumFile, GdbCatSumfileCommand
@ -34,10 +27,12 @@ from racyanalyze import GDBAnalyzeRacyTests
from urllib import quote from urllib import quote
from sumfiles import DejaResults, set_web_base from sumfiles import DejaResults, set_web_base
import os.path import os
import urllib import urllib
from json import load from json import load
import re import re
import jinja2
import git
#################################### ####################################
#################################### ####################################
@ -55,9 +50,8 @@ c = BuildmasterConfig = {}
# Base directory for the web server. This is needed in order to # Base directory for the web server. This is needed in order to
# compare the test results. # compare the test results.
gdb_web_base = os.path.expanduser (os.path.join (basedir, gdb_web_base = os.path.expanduser ("~/results/")
'public_html',
'results'))
set_web_base (gdb_web_base) set_web_base (gdb_web_base)
GDB_MAIL_FROM = 'sergiodj+buildbot@sergiodj.net' GDB_MAIL_FROM = 'sergiodj+buildbot@sergiodj.net'
@ -81,10 +75,10 @@ def should_watch_branch (branch):
return False return False
from buildbot.changes.gitpoller import GitPoller from buildbot.changes.gitpoller import GitPoller
master_git_repo = os.path.expanduser (os.path.join ('~/', 'buildbot-master-binutils-gdb'))
c['change_source'] = [] c['change_source'] = []
c['change_source'].append(GitPoller( c['change_source'].append (GitPoller (repourl = 'git://git.libreplanetbr.org/gdb.git',
repourl = 'git://git.libreplanetbr.org/gdb.git', workdir = master_git_repo,
workdir = os.path.expanduser (os.path.join ('~/', 'buildbot-master-binutils-gdb')),
branches = should_watch_branch, branches = should_watch_branch,
pollinterval = 60 * 3)) pollinterval = 60 * 3))
@ -567,8 +561,7 @@ mn = MyMailNotifier(fromaddr = GDB_MAIL_FROM,
messageFormatter = MessageGDBTesters, messageFormatter = MessageGDBTesters,
tags = [ "MAIL" ], tags = [ "MAIL" ],
extraHeaders = { 'X-GDB-Buildbot' : '1', extraHeaders = { 'X-GDB-Buildbot' : '1',
'In-Reply-To' : WithProperties ("<%s@gdb-build>", 'In-Reply-To' : util.Interpolate ("<%(prop:got_revision)s@gdb-build>")})
'got_revision') })
class LookupEmailTryBuild (object): class LookupEmailTryBuild (object):
implements (IEmailLookup) implements (IEmailLookup)
@ -607,7 +600,7 @@ c['db'] = {
## the documentation on each build step class to understand what it ## the documentation on each build step class to understand what it
## does. ## does.
class CloneOrUpdateGDBMasterRepo (Git): class CloneOrUpdateGDBMasterRepo (steps.Git):
"""This build step updates the so-called "master" git repository. For """This build step updates the so-called "master" git repository. For
each buildslave, we have one master GDB git repository, which is then each buildslave, we have one master GDB git repository, which is then
@ -629,18 +622,18 @@ from the principal repository."""
name = "update gdb master repo" name = "update gdb master repo"
description = r"fetching GDB master sources" description = r"fetching GDB master sources"
descriptionDone = r"fetched GDB master sources" descriptionDone = r"fetched GDB master sources"
def __init__ (self): def __init__ (self, **kwargs):
Git.__init__ (self, steps.Git.__init__ (self,
repourl = 'git://git.libreplanetbr.org/gdb.git', repourl = 'git://git.libreplanetbr.org/gdb.git',
workdir = WithProperties (r"%s/../binutils-gdb-master/", workdir = util.Interpolate ("%(prop:builddir)s/../binutils-gdb-master/"),
r'builddir'),
retryFetch = True, retryFetch = True,
mode = r'incremental', mode = 'incremental',
progress = True) progress = True,
**kwargs)
self.haltOnFailure = False self.haltOnFailure = False
self.flunkOnFailure = False self.flunkOnFailure = False
class CloneOrUpdateGDBRepo (Git): class CloneOrUpdateGDBRepo (steps.Git):
"""This build step is used to clone the GDB git repository that will """This build step is used to clone the GDB git repository that will
be used by an specific builder (inside a buildslave). The trick here be used by an specific builder (inside a buildslave). The trick here
is to use the "reference" parameter to initialize the class, which is to use the "reference" parameter to initialize the class, which
@ -649,24 +642,24 @@ present at the reference repository (i.e., locally)."""
name = "clone gdb repo" name = "clone gdb repo"
description = "fetching GDB sources" description = "fetching GDB sources"
descriptionDone = "fetched GDB sources" descriptionDone = "fetched GDB sources"
def __init__ (self): def __init__ (self, **kwargs):
Git.__init__ (self, steps.Git.__init__ (self,
repourl = 'git://git.libreplanetbr.org/gdb.git', repourl = 'git://git.libreplanetbr.org/gdb.git',
workdir = WithProperties ('%s/binutils-gdb/', 'builddir'), workdir = util.Interpolate ('%(prop:builddir)s/binutils-gdb/'),
reference = WithProperties ("%s/../binutils-gdb-master/", reference = util.Interpolate ("%(prop:builddir)s/../binutils-gdb-master/"),
'builddir'),
retryFetch = True, retryFetch = True,
progress = True) progress = True,
**kwargs)
class ConfigureGDB (Configure): class ConfigureGDB (steps.Configure):
"""This build step runs the GDB "configure" command, providing extra """This build step runs the GDB "configure" command, providing extra
flags for it if needed.""" flags for it if needed."""
name = "configure gdb" name = "configure gdb"
description = r"configure GDB" description = r"configure GDB"
descriptionDone = r"configured GDB" descriptionDone = r"configured GDB"
def __init__ (self, extra_conf_flags, **kwargs): def __init__ (self, extra_conf_flags, **kwargs):
Configure.__init__ (self, **kwargs) steps.Configure.__init__ (self, **kwargs)
self.workdir = WithProperties (r"%s", r'builddir') self.workdir = util.Interpolate ('%(prop:builddir)s/build/')
self.command = ['../binutils-gdb/configure', self.command = ['../binutils-gdb/configure',
'--disable-binutils', '--disable-binutils',
'--disable-ld', '--disable-ld',
@ -675,7 +668,7 @@ flags for it if needed."""
'--disable-sim', '--disable-sim',
'--disable-gprof'] + extra_conf_flags '--disable-gprof'] + extra_conf_flags
class CompileGDB (Compile): class CompileGDB (steps.Compile):
"""This build step runs "make" to compile the GDB sources. It """This build step runs "make" to compile the GDB sources. It
provides extra "make" flags to "make" if needed. It also uses the provides extra "make" flags to "make" if needed. It also uses the
"jobs" properties to figure out how many parallel jobs we can use when "jobs" properties to figure out how many parallel jobs we can use when
@ -687,27 +680,27 @@ buildslave."""
descriptionDone = r"compiled GDB" descriptionDone = r"compiled GDB"
def __init__ (self, make_command = 'make', extra_make_flags = [], def __init__ (self, make_command = 'make', extra_make_flags = [],
**kwargs): **kwargs):
Compile.__init__ (self, **kwargs) steps.Compile.__init__ (self, **kwargs)
self.workdir = WithProperties (r"%s", r'builddir') self.workdir = util.Interpolate ('%(prop:builddir)s/build/')
self.command = ['%s' % make_command, self.command = ['%s' % make_command,
WithProperties (r"-j%s", r'jobs'), util.Interpolate ("-j%(prop:jobs)s"),
'all'] + extra_make_flags 'all'] + extra_make_flags
class MakeTAGSGDB (ShellCommand): class MakeTAGSGDB (steps.ShellCommand):
name = 'make tags' name = 'make tags'
description = 'running make TAGS' description = 'running make TAGS'
descriptionDone = 'ran make TAGS' descriptionDone = 'ran make TAGS'
def __init__ (self, **kwargs): def __init__ (self, **kwargs):
ShellCommand.__init__ (self, make_command = 'make', steps.ShellCommand.__init__ (self, make_command = 'make',
**kwargs) **kwargs)
self.workdir = WithProperties ("%s/build/gdb", 'builddir') self.workdir = util.Interpolate ("%(prop:builddir)s/build/gdb")
self.command = [ '%s' % make_command, 'TAGS' ] self.command = [ '%s' % make_command, 'TAGS' ]
# We do not want to stop testing when this command fails. # We do not want to stop testing when this command fails.
self.haltOnFailure = False self.haltOnFailure = False
self.flunkOnFailure = False self.flunkOnFailure = False
self.flunkOnWarnings = False self.flunkOnWarnings = False
class TestGDB (ShellCommand): class TestGDB (steps.ShellCommand):
"""This build step runs the full testsuite for GDB. It can run in """This build step runs the full testsuite for GDB. It can run in
parallel mode (see BuildAndTestGDBFactory below), and it will also parallel mode (see BuildAndTestGDBFactory below), and it will also
provide any extra flags for "make" if needed. Unfortunately, because provide any extra flags for "make" if needed. Unfortunately, because
@ -718,12 +711,12 @@ BuildBot halt on failure."""
descriptionDone = r"tested GDB" descriptionDone = r"tested GDB"
def __init__ (self, make_command = 'make', extra_make_check_flags = [], def __init__ (self, make_command = 'make', extra_make_check_flags = [],
test_env = {}, **kwargs): test_env = {}, **kwargs):
ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS, steps.ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS,
1 : SUCCESS, 1 : SUCCESS,
2 : SUCCESS }, 2 : SUCCESS },
**kwargs) **kwargs)
self.workdir = WithProperties (r"%s/build/gdb/testsuite", r'builddir') self.workdir = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite")
self.command = ['%s' % make_command, self.command = ['%s' % make_command,
'-k', '-k',
'check'] + extra_make_check_flags 'check'] + extra_make_check_flags
@ -734,7 +727,7 @@ BuildBot halt on failure."""
self.flunkOnFailure = False self.flunkOnFailure = False
self.flunkOnWarnings = False self.flunkOnWarnings = False
class TestRacyGDB (ShellCommand): class TestRacyGDB (steps.ShellCommand):
"""This build step runs the full testsuite for GDB for racy testcases. """This build step runs the full testsuite for GDB for racy testcases.
It can run in parallel mode (see BuildAndTestGDBFactory below), and it It can run in parallel mode (see BuildAndTestGDBFactory below), and it
will also provide any extra flags for "make" if needed. will also provide any extra flags for "make" if needed.
@ -745,12 +738,12 @@ command must not make BuildBot halt on failure."""
descriptionDone = r"tested GDB (racy)" descriptionDone = r"tested GDB (racy)"
def __init__ (self, make_command = 'make', extra_make_check_flags = [], def __init__ (self, make_command = 'make', extra_make_check_flags = [],
test_env = {}, **kwargs): test_env = {}, **kwargs):
ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS, steps.ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS,
1 : SUCCESS, 1 : SUCCESS,
2 : SUCCESS }, 2 : SUCCESS },
**kwargs) **kwargs)
self.workdir = WithProperties (r"%s/build/gdb/testsuite", r'builddir') self.workdir = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite")
self.command = ['%s' % make_command, self.command = ['%s' % make_command,
'-k', '-k',
'check', 'check',
@ -762,26 +755,6 @@ command must not make BuildBot halt on failure."""
self.flunkOnFailure = False self.flunkOnFailure = False
self.flunkOnWarnings = False self.flunkOnWarnings = False
class CleanupBreakageLockfile (ShellCommand):
"""Clean up (i.e., remove) the breakage lockfile for a specific builder."""
name = "cleanup breakage lockfile"
description = "cleaning up breakage lockfile"
descriptionDone = "cleaned up breakage lockfile"
command = [ 'true' ]
def __init__ (self, **kwargs):
ShellCommand.__init__ (self, **kwargs)
def evaluateCommand (self, cmd):
builder = self.getProperty ('buildername')
branch = self.getProperty ('branch')
lockfile = make_breakage_lockfile_name (builder, branch)
if os.path.isfile (lockfile):
os.remove (lockfile)
return SUCCESS
def scheduler_is_racy (step): def scheduler_is_racy (step):
return step.getProperty ('scheduler').startswith ('racy') return step.getProperty ('scheduler').startswith ('racy')
@ -900,29 +873,28 @@ The parameters of the class are:
# mjw asked me to delay the build by X number of seconds. # mjw asked me to delay the build by X number of seconds.
if initial_delay: if initial_delay:
self.addStep (ShellCommand (command = ['sleep', '%d' % initial_delay], self.addStep (steps.ShellCommand (command = ['sleep', '%d' % initial_delay],
description = "delaying start of build by %d seconds" % initial_delay, description = "delaying start of build by %d seconds" % initial_delay,
descriptionDone = "delayed start of build by %d seconds" % initial_delay)) descriptionDone = "delayed start of build by %d seconds" % initial_delay))
self.addStep (RemoveDirectory (dir = WithProperties (r"%s/build", self.addStep (steps.RemoveDirectory (dir = util.Interpolate ("%(prop:builddir)s/build"),
r'builddir'), description = "removing old build dir",
description = r"removing old build dir", descriptionDone = "removed old build dir"))
descriptionDone = r"removed old build dir")) self.addStep (CloneOrUpdateGDBMasterRepo (hideStepIf = True))
self.addStep (CloneOrUpdateGDBMasterRepo ())
self.addStep (CloneOrUpdateGDBRepo ()) self.addStep (CloneOrUpdateGDBRepo ())
if self.run_testsuite: if self.run_testsuite:
self.addStep (CopyOldGDBSumFile (doStepIf = scheduler_is_not_racy_try_do, self.addStep (CopyOldGDBSumFile (doStepIf = scheduler_is_not_racy_try_do,
hideStepIf = scheduler_is_racy_try_hide)) hideStepIf = True))
if not self.extra_conf_flags: if not self.extra_conf_flags:
self.extra_conf_flags = [] self.extra_conf_flags = []
if self.enable_targets_all: if self.enable_targets_all:
self.extra_conf_flags.append (r'--enable-targets=all') self.extra_conf_flags.append ('--enable-targets=all')
if self.use_system_debuginfo: if self.use_system_debuginfo:
self.extra_conf_flags.append (r'--with-separate-debug-dir=/usr/lib/debug') self.extra_conf_flags.append ('--with-separate-debug-dir=/usr/lib/debug')
self.addStep (self.ConfigureClass (self.extra_conf_flags + architecture_triplet, self.addStep (self.ConfigureClass (self.extra_conf_flags + architecture_triplet,
haltOnFailure = True)) haltOnFailure = True))
@ -935,7 +907,9 @@ The parameters of the class are:
# This last will be executed when the build succeeds. It is # This last will be executed when the build succeeds. It is
# needed in order to cleanup the breakage lockfile, if it # needed in order to cleanup the breakage lockfile, if it
# exists. # exists.
self.addStep (CleanupBreakageLockfile (hideStepIf = True)) self.addStep (steps.MasterShellCommand (command = [ 'rm', '-f',
util.Interpolate ('/tmp/gdb-buildbot-breakage-report-%(prop:branch)s-%(prop:buildername)s') ],
hideStepIf = True))
# Disabling this until we figure out how to properly run + test # Disabling this until we figure out how to properly run + test
# self.addStep (MakeTAGSGDB ()) # self.addStep (MakeTAGSGDB ())
@ -948,7 +922,7 @@ The parameters of the class are:
self.test_env = {} self.test_env = {}
if self.test_parallel: if self.test_parallel:
self.extra_make_check_flags.append (WithProperties (r"-j%s", r'jobs')) self.extra_make_check_flags.append (util.Interpolate ("-j%(prop:jobs)s"))
self.extra_make_check_flags.append (r'FORCE_PARALLEL=1') self.extra_make_check_flags.append (r'FORCE_PARALLEL=1')
self.addStep (self.TestClass (self.make_command, self.extra_make_check_flags, self.addStep (self.TestClass (self.make_command, self.extra_make_check_flags,
@ -956,17 +930,16 @@ The parameters of the class are:
doStepIf = scheduler_is_not_racy_do, doStepIf = scheduler_is_not_racy_do,
hideStepIf = scheduler_is_racy_hide)) hideStepIf = scheduler_is_racy_hide))
self.addStep (GdbCatSumfileCommand (workdir = WithProperties (r'%s/build/gdb/testsuite', self.addStep (GdbCatSumfileCommand (workdir = util.Interpolate ('%(prop:builddir)s/build/gdb/testsuite'),
r'builddir'), description = 'analyzing test results',
description = r'analyze test results', descriptionDone = 'analyzed test results',
doStepIf = scheduler_is_not_racy_do, doStepIf = scheduler_is_not_racy_do,
hideStepIf = scheduler_is_racy_hide)) hideStepIf = scheduler_is_racy_hide))
self.addStep (FileUpload (slavesrc = WithProperties (r"%s/build/gdb/testsuite/gdb.log", self.addStep (steps.FileUpload (workersrc = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite/gdb.log"),
r'builddir'), masterdest = util.Interpolate ("public_html/results/%(prop:buildername)s/tmp/$(prop:got_revision)s/gdb.log"),
masterdest = WithProperties (r"public_html/results/%s/gdb.log", mode = 0644,
r'buildername'),
doStepIf = scheduler_is_not_racy_do, doStepIf = scheduler_is_not_racy_do,
hideStepIf = scheduler_is_racy_hide)) hideStepIf = True))
self.addStep (SaveGDBResults (doStepIf = scheduler_is_not_racy_do, self.addStep (SaveGDBResults (doStepIf = scheduler_is_not_racy_do,
hideStepIf = scheduler_is_racy_hide)) hideStepIf = scheduler_is_racy_hide))
@ -977,8 +950,7 @@ The parameters of the class are:
doStepIf = scheduler_is_racy_do, doStepIf = scheduler_is_racy_do,
hideStepIf = scheduler_is_not_racy_hide)) hideStepIf = scheduler_is_not_racy_hide))
self.addStep (GDBAnalyzeRacyTests (workdir = WithProperties ('%s/build/gdb/testsuite', self.addStep (GDBAnalyzeRacyTests (workdir = util.Interpolate ('%(prop:builddir)s/build/gdb/testsuite'),
'builddir'),
description = 'analyzing racy tests', description = 'analyzing racy tests',
descriptionDone = 'analyzed racy tests', descriptionDone = 'analyzed racy tests',
doStepIf = scheduler_is_racy_do, doStepIf = scheduler_is_racy_do,
@ -1061,16 +1033,16 @@ class RunTestGDBNativeExtendedGDBServer_c32t32 (BuildAndTestGDBFactory):
class RunTestGDBIndexBuild (BuildAndTestGDBFactory): class RunTestGDBIndexBuild (BuildAndTestGDBFactory):
"""Testing with the "cc-with-tweaks.sh" passing -i.""" """Testing with the "cc-with-tweaks.sh" passing -i."""
def __init__ (self, **kwargs): def __init__ (self, **kwargs):
self.extra_make_check_flags = [ WithProperties (r'CC_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc', r'builddir'), self.extra_make_check_flags = [ util.Interpolate ('CC_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc'),
WithProperties (r'CXX_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++', r'builddir') ] util.Interpolate ('CXX_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++') ]
BuildAndTestGDBFactory.__init__ (self, **kwargs) BuildAndTestGDBFactory.__init__ (self, **kwargs)
class RunTestGDBIndexBuild_c32t32 (BuildAndTestGDBFactory): class RunTestGDBIndexBuild_c32t32 (BuildAndTestGDBFactory):
"""Testing with the "cc-with-tweaks.sh" passing -i. 32-bit version""" """Testing with the "cc-with-tweaks.sh" passing -i. 32-bit version"""
def __init__ (self, **kwargs): def __init__ (self, **kwargs):
self.extra_conf_flags = [ 'CFLAGS=-m32', 'CXXFLAGS=-m32' ] self.extra_conf_flags = [ 'CFLAGS=-m32', 'CXXFLAGS=-m32' ]
self.extra_make_check_flags = [ WithProperties (r'CC_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc', r'builddir'), self.extra_make_check_flags = [ util.Interpolate ('CC_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc'),
WithProperties (r'CXX_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++', r'builddir'), util.Interpolate ('CXX_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++'),
'RUNTESTFLAGS=--target_board unix/-m32' ] 'RUNTESTFLAGS=--target_board unix/-m32' ]
BuildAndTestGDBFactory.__init__ (self, **kwargs) BuildAndTestGDBFactory.__init__ (self, **kwargs)

124
scripts/update-logs.sh Executable file
View file

@ -0,0 +1,124 @@
#!/bin/bash
set -e
umask u=rw,g=r,o=r
usage ()
{
cat > /dev/stderr <<EOF
$0 -- Update build logs for builder
Usage: $0 [-c|--commit COMMIT] [-b|--builder BUILDER] [--branch BRANCH] [-d|--base-directory DIR] [-t|--is-try-sched yes|no] [-h|--help]
EOF
}
err ()
{
local msg=$1
echo "ERROR: $msg" > /dev/stderr
exit 1
}
msg ()
{
local msg=$1
echo ">>> INFO: $msg"
}
while test "$1" != "" ; do
case "$1" in
"-c"|"--commit")
COMMIT=$2
shift 2
;;
"-b"|"--builder")
BUILDER=$2
shift 2
;;
"-d"|"--base-directory")
BASE_DIR=$2
shift 2
;;
"-h"|"--help")
usage
exit 0
;;
"-t"|"--is-try-sched")
IS_TRY_SCHED=$2
shift 2
;;
"--branch")
BRANCH=$2
shift 2
;;
*)
usage
exit 1
;;
esac
done
DIR=$BASE_DIR/$BUILDER/
if test ! -d $DIR ; then
msg "$DIR is not a valid directory. Creeating it..."
mkdir -p $DIR
fi
cd $DIR
DB_NAME=$DIR/${BUILDER}.db
if test ! -f $DB_NAME ; then
msg "Database $DB_NAME does not exist. Creating it..."
sqlite3 $DB_NAME "CREATE TABLE logs(commitid TEXT, branch TEXT DEFAULT 'master', trysched BOOLEAN DEFAULT 0, timestamp TIMESTAMP PRIMARY KEY DEFAULT (strftime('%s', 'now')) NOT NULL)"
fi
COMMIT_2_DIG=`echo $COMMIT | sed 's/^\(..\).*$/\1/'`
CDIR=$COMMIT_2_DIG/$COMMIT/
ISTRY=0
if test "$IS_TRY_SCHED" = "yes" ; then
CDIR=try/${CDIR}
ISTRY=1
fi
if test -d $CDIR ; then
# If this is a try build, the user is doing a rebuild.
# If this is a normal build, someone triggered a rebuild.
# Either way, we need to delete the current log dir.
msg "Log dir $CDIR already exists. Deleting it so that we can update the logs..."
rm --verbose -rf $CDIR
fi
msg "Creating directory structure $CDIR..."
mkdir --verbose -p $CDIR
cd $CDIR
TMP_DIR=$DIR/tmp/$COMMIT/
msg "Moving log files to $PWD..."
mv --verbose $TMP_DIR/* .
rmdir $TMP_DIR
msg "Compressing log files..."
xz --verbose --compress *
PREV_COMMIT=`sqlite3 $DB_NAME "SELECT commitid FROM logs WHERE branch = '$BRANCH' AND trysched = 0 ORDER BY timestamp DESC LIMIT 1"`
msg "Update database..."
sqlite3 $DB_NAME "INSERT INTO logs(commitid, branch, trysched) VALUES('$COMMIT', '$BRANCH', $ISTRY)"
msg "Creating README.txt..."
cat > README.txt <<EOF
== README -- Logs for commit $COMMIT ==
Branch tested: $BRANCH
Previous commit: $PREV_COMMIT
EOF
exit 0