Initial attempt to change the way we store results
This was imported (with modifications) from the 0.9-prep branch.
This commit is contained in:
parent
d7efeb9b57
commit
3c736356e5
5 changed files with 333 additions and 186 deletions
20
lib/fsdb.py
Normal file
20
lib/fsdb.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# DB-like with filesystem
|
||||
|
||||
from buildbot.plugins import util, steps
|
||||
from sumfiles import get_web_base
|
||||
import os
|
||||
|
||||
class SaveGDBResults (steps.MasterShellCommand):
|
||||
name = 'save build results'
|
||||
description = 'saving build results'
|
||||
descriptionDone = 'saved build results'
|
||||
|
||||
def __init__ (self, **kwargs):
|
||||
steps.MasterShellCommand.__init__ (self, **kwargs)
|
||||
self.command = [ os.path.expanduser ("~/scripts/update-logs.sh"),
|
||||
"--commit", util.Property ('got_revision'),
|
||||
"--builder", util.Property ('buildername'),
|
||||
"--base-directory", get_web_base (),
|
||||
"--branch", util.Property ('branch'),
|
||||
"--is-try-sched", util.Property ('isTrySched',
|
||||
default = 'no') ]
|
|
@ -1,67 +1,92 @@
|
|||
# GDB .sum-fetching command.
|
||||
|
||||
from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
|
||||
from buildbot.steps.shell import ShellCommand
|
||||
from buildbot.process.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
|
||||
from buildbot.plugins import steps, util
|
||||
from sumfiles import DejaResults, get_web_base
|
||||
from gdbgitdb import switch_to_branch
|
||||
from shutil import copyfile
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
class CopyOldGDBSumFile (ShellCommand):
|
||||
@util.renderer
|
||||
def create_copy_command (props):
|
||||
rev = props.getProperty ('got_revision')
|
||||
builder = props.getProperty ('buildername')
|
||||
istry = props.getProperty ('isTrySched')
|
||||
branch = props.getProperty ('branch')
|
||||
command = [ 'cp', '-a' ]
|
||||
|
||||
db_file = os.path.join (get_web_base (), builder, builder + '.db')
|
||||
if not os.path.exists (db_file):
|
||||
# This is probably the first commit being tested. Don't do anything.
|
||||
return [ 'true' ]
|
||||
|
||||
con = sqlite3.connect (db_file)
|
||||
c = con.cursor ()
|
||||
c.execute ('SELECT commitid WHERE branch = "%s" AND trysched = 0 FROM logs ORDER BY timestamp DESC LIMIT 1' % branch)
|
||||
con.close ()
|
||||
|
||||
commit = c.fetchone ()[0]
|
||||
|
||||
from_path = os.path.join (get_web_base (), commit[:2], commit, 'gdb.sum')
|
||||
if istry and istry == 'yes':
|
||||
to_path = os.path.join (get_web_base (), 'try', rev[:2], rev, 'previous_gdb.sum')
|
||||
else:
|
||||
to_path = os.path.join (get_web_base (), rev[:2], rev, 'previous_gdb.sum')
|
||||
|
||||
command += [ from_path, to_path ]
|
||||
|
||||
return command
|
||||
|
||||
class CopyOldGDBSumFile (steps.MasterShellCommand):
|
||||
"""Copy the current gdb.sum file into the old_gdb.sum file."""
|
||||
name = "copy gdb.sum file"
|
||||
description = "copying previous gdb.sum file"
|
||||
descriptionDone = "copied previous gdb.sum file"
|
||||
command = [ 'true' ]
|
||||
|
||||
def __init__ (self, **kwargs):
|
||||
ShellCommand.__init__ (self, **kwargs)
|
||||
steps.MasterShellCommand.__init__ (self, command = create_copy_command, **kwargs)
|
||||
|
||||
def evaluateCommand (self, cmd):
|
||||
rev = self.getProperty('got_revision')
|
||||
builder = self.getProperty('buildername')
|
||||
isrebuild = self.getProperty ('isRebuild')
|
||||
branch = self.getProperty('branch')
|
||||
wb = get_web_base ()
|
||||
if branch is None:
|
||||
branch = 'master'
|
||||
|
||||
if isrebuild and isrebuild == 'yes':
|
||||
return SUCCESS
|
||||
|
||||
# Switch to the right branch inside the BUILDER repo
|
||||
switch_to_branch (builder, branch, force_switch = True)
|
||||
|
||||
try:
|
||||
copyfile ("%s/%s/gdb.sum" % (wb, builder),
|
||||
"%s/%s/previous_gdb.sum" % (wb, builder))
|
||||
except IOError:
|
||||
# If the dest file does not exist, ignore
|
||||
pass
|
||||
|
||||
return SUCCESS
|
||||
|
||||
class GdbCatSumfileCommand(ShellCommand):
|
||||
class GdbCatSumfileCommand(steps.ShellCommand):
|
||||
name = 'regressions'
|
||||
command = ['cat', 'gdb.sum']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ShellCommand.__init__(self, **kwargs)
|
||||
steps.ShellCommand.__init__(self, **kwargs)
|
||||
|
||||
def evaluateCommand(self, cmd):
|
||||
rev = self.getProperty('got_revision')
|
||||
builder = self.getProperty('buildername')
|
||||
istrysched = self.getProperty('isTrySched')
|
||||
branch = self.getProperty('branch')
|
||||
db_file = os.path.join (get_web_base (), builder, builder + '.db')
|
||||
parser = DejaResults()
|
||||
cur_results = parser.read_sum_text(self.getLog('stdio').getText())
|
||||
baseline = None
|
||||
|
||||
if branch is None:
|
||||
branch = 'master'
|
||||
|
||||
# Switch to the right branch inside the BUILDER repo
|
||||
switch_to_branch (builder, branch, force_switch = False)
|
||||
if not os.path.exists (db_file):
|
||||
# This takes care of our very first build.
|
||||
parser.write_sum_file (cur_results, builder, branch, rev)
|
||||
# If there was no previous baseline, then this run
|
||||
# gets the honor.
|
||||
if baseline is None:
|
||||
baseline = cur_results
|
||||
parser.write_baseline (baseline, builder, branch, rev)
|
||||
return SUCCESS
|
||||
|
||||
parser = DejaResults()
|
||||
cur_results = parser.read_sum_text(self.getLog('stdio').getText())
|
||||
baseline = parser.read_baseline (builder, branch)
|
||||
old_sum = parser.read_sum_file (builder, branch)
|
||||
con = sqlite3.connect (db_file)
|
||||
c = con.cursor ()
|
||||
c.execute ('SELECT commitid WHERE branch = "%s" AND trysched = 0 FROM logs ORDER BY timestamp DESC LIMIT 1' % branch)
|
||||
con.close ()
|
||||
prevcommit = c.fetchone ()[0]
|
||||
|
||||
# Switch to the right branch inside the BUILDER repo
|
||||
# switch_to_branch (builder, branch, force_switch = False)
|
||||
|
||||
baseline = parser.read_baseline (builder, branch, prevcommit)
|
||||
old_sum = parser.read_sum_file (builder, branch, prevcommit)
|
||||
result = SUCCESS
|
||||
|
||||
if baseline is not None:
|
||||
|
@ -78,14 +103,14 @@ class GdbCatSumfileCommand(ShellCommand):
|
|||
self.addCompleteLog ('regressions', report)
|
||||
result = FAILURE
|
||||
|
||||
if not istrysched or istrysched == 'no':
|
||||
parser.write_sum_file (cur_results, builder, branch)
|
||||
if istrysched and istrysched == 'yes':
|
||||
parser.write_try_build_sum_file (cur_results, builder, branch, rev)
|
||||
else:
|
||||
parser.write_sum_file (cur_results, builder, branch, rev)
|
||||
# If there was no previous baseline, then this run
|
||||
# gets the honor.
|
||||
if baseline is None:
|
||||
baseline = cur_results
|
||||
parser.write_baseline (baseline, builder, branch, rev)
|
||||
else:
|
||||
parser.write_try_build_sum_file (cur_results, builder, branch)
|
||||
|
||||
return result
|
||||
|
|
|
@ -6,6 +6,7 @@ from StringIO import StringIO
|
|||
# Necessary for ordered dictionaries. We use them when the order or
|
||||
# the tests matters to us.
|
||||
from collections import OrderedDict
|
||||
import lzma
|
||||
|
||||
# Helper regex for parse_sum_line.
|
||||
sum_matcher = re.compile('^(.?(PASS|FAIL)): (.*)$')
|
||||
|
@ -68,18 +69,16 @@ class DejaResults(object):
|
|||
# and to the set.
|
||||
out_dict[1][result].add (test_name)
|
||||
|
||||
def _write_sum_file(self, sum_dict, subdir, rev_or_branch, filename,
|
||||
header = None):
|
||||
def _write_sum_file(self, sum_dict, builder, rev, filename, header = None):
|
||||
global gdb_web_base
|
||||
if not rev_or_branch:
|
||||
bdir = os.path.join (gdb_web_base, subdir)
|
||||
else:
|
||||
bdir = os.path.join (gdb_web_base, subdir, rev_or_branch)
|
||||
if not os.path.isdir (bdir):
|
||||
os.makedirs (bdir, 0755)
|
||||
|
||||
bdir = os.path.join (gdb_web_base, builder, rev[:2], rev)
|
||||
if not os.path.exists (bdir):
|
||||
os.makedirs (bdir)
|
||||
fname = os.path.join (bdir, filename)
|
||||
keys = sum_dict[0].keys ()
|
||||
mode = 'w'
|
||||
old_umask = os.umask (0133)
|
||||
if header:
|
||||
with open (fname, 'w') as f:
|
||||
f.write (header)
|
||||
|
@ -87,16 +86,17 @@ class DejaResults(object):
|
|||
with open (fname, mode) as f:
|
||||
for k in keys:
|
||||
f.write (sum_dict[0][k] + ': ' + k + '\n')
|
||||
os.umask (old_umask)
|
||||
|
||||
def write_sum_file(self, sum_dict, builder, branch):
|
||||
self._write_sum_file (sum_dict, builder, None, 'gdb.sum')
|
||||
def write_sum_file(self, sum_dict, builder, branch, rev):
|
||||
self._write_sum_file (sum_dict, builder, rev, 'gdb.sum')
|
||||
|
||||
def write_try_build_sum_file (self, sum_dict, builder, branch):
|
||||
self._write_sum_file (sum_dict, builder, None, 'trybuild_gdb.sum',
|
||||
def write_try_build_sum_file (self, sum_dict, builder, branch, rev):
|
||||
self._write_sum_file (sum_dict, builder, rev, 'trybuild_gdb.sum',
|
||||
header = "### THIS SUM FILE WAS GENERATED BY A TRY BUILD ###\n\n")
|
||||
|
||||
def write_baseline(self, sum_dict, builder, branch, rev):
|
||||
self._write_sum_file(sum_dict, builder, None, 'baseline',
|
||||
self._write_sum_file(sum_dict, builder, rev, 'baseline',
|
||||
header = "### THIS BASELINE WAS LAST UPDATED BY COMMIT %s ###\n\n" % rev)
|
||||
|
||||
# Read a .sum file.
|
||||
|
@ -105,40 +105,46 @@ class DejaResults(object):
|
|||
# revision; to read the baseline file for a branch, use `read_baseline'.
|
||||
# Returns a dictionary holding the .sum contents, or None if the
|
||||
# file did not exist.
|
||||
def _read_sum_file(self, subdir, rev_or_branch, filename,
|
||||
is_racy_file = False):
|
||||
def _read_sum_file(self, builder, branch, rev, filename,
|
||||
is_racy_file = False, is_xfail_file = False):
|
||||
global gdb_web_base
|
||||
if not rev_or_branch:
|
||||
fname = os.path.join (gdb_web_base, subdir, filename)
|
||||
|
||||
if is_xfail_file:
|
||||
fname = os.path.join (gdb_web_base, builder, 'xfails', branch, filename)
|
||||
else:
|
||||
fname = os.path.join (gdb_web_base, subdir, rev_or_branch, filename)
|
||||
fname = os.path.join (gdb_web_base, builder, rev[:2], rev, filename)
|
||||
result = []
|
||||
# result[0] is the OrderedDict containing all the tests
|
||||
# and results.
|
||||
result.append (OrderedDict ())
|
||||
# result[1] is a dictionary containing sets of tests
|
||||
result.append (dict ())
|
||||
|
||||
if os.path.exists (fname):
|
||||
result = []
|
||||
# result[0] is the OrderedDict containing all the tests
|
||||
# and results.
|
||||
result.append (OrderedDict ())
|
||||
# result[1] is a dictionary containing sets of tests
|
||||
result.append (dict ())
|
||||
with open (fname, 'r') as f:
|
||||
for line in f:
|
||||
self.parse_sum_line (result, line,
|
||||
is_racy_file = is_racy_file)
|
||||
elif os.path.exists (fname + '.xz'):
|
||||
with lzma.open (fname, 'r') as f:
|
||||
for line in f:
|
||||
self.parse_sum_line (result, line,
|
||||
is_racy_file = is_racy_file)
|
||||
else:
|
||||
result = None
|
||||
return None
|
||||
return result
|
||||
|
||||
def read_sum_file (self, builder, branch):
|
||||
return self._read_sum_file (builder, None, 'gdb.sum')
|
||||
def read_sum_file (self, builder, branch, rev):
|
||||
return self._read_sum_file (builder, branch, rev, 'gdb.sum')
|
||||
|
||||
def read_baseline(self, builder, branch):
|
||||
return self._read_sum_file (builder, None, 'baseline')
|
||||
def read_baseline(self, builder, branch, rev):
|
||||
return self._read_sum_file (builder, branch, rev, 'baseline')
|
||||
|
||||
def read_xfail (self, builder, branch):
|
||||
return self._read_sum_file (builder, os.path.join ('xfails', branch),
|
||||
'xfail')
|
||||
def read_xfail (self, builder, branch, rev):
|
||||
return self._read_sum_file (builder, branch, rev, 'xfail', is_xfail_file = True)
|
||||
|
||||
def read_old_sum_file (self, builder, branch):
|
||||
return self._read_sum_file (builder, None, 'previous_gdb.sum')
|
||||
def read_old_sum_file (self, builder, branch, rev):
|
||||
return self._read_sum_file (builder, branch, rev, 'previous_gdb.sum')
|
||||
|
||||
# Parse some text as a .sum file and return the resulting
|
||||
# dictionary.
|
||||
|
|
192
master.cfg
192
master.cfg
|
@ -15,17 +15,10 @@ from buildbot.schedulers.timed import Nightly
|
|||
from buildbot.schedulers.trysched import Try_Jobdir
|
||||
from buildbot.schedulers.forcesched import ForceScheduler
|
||||
from buildbot.process import factory
|
||||
from buildbot.process.properties import WithProperties, Property
|
||||
from buildbot.steps.shell import Compile
|
||||
from buildbot.steps.shell import Configure
|
||||
from buildbot.steps.shell import ShellCommand
|
||||
from buildbot.steps.shell import SetPropertyFromCommand
|
||||
from buildbot.steps.transfer import FileUpload
|
||||
from buildbot.steps.source.git import Git
|
||||
from buildbot.steps.slave import RemoveDirectory
|
||||
from buildbot.plugins import util, reporters, steps, worker
|
||||
from buildbot.reporters import utils
|
||||
from buildbot.changes.filter import ChangeFilter
|
||||
from buildbot.buildslave import BuildSlave
|
||||
from buildbot.status.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
|
||||
from buildbot.process.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
|
||||
from buildbot.interfaces import IEmailLookup
|
||||
from zope.interface import implements
|
||||
from gdbcommand import CopyOldGDBSumFile, GdbCatSumfileCommand
|
||||
|
@ -34,10 +27,12 @@ from racyanalyze import GDBAnalyzeRacyTests
|
|||
from urllib import quote
|
||||
|
||||
from sumfiles import DejaResults, set_web_base
|
||||
import os.path
|
||||
import os
|
||||
import urllib
|
||||
from json import load
|
||||
import re
|
||||
import jinja2
|
||||
import git
|
||||
|
||||
####################################
|
||||
####################################
|
||||
|
@ -55,9 +50,8 @@ c = BuildmasterConfig = {}
|
|||
|
||||
# Base directory for the web server. This is needed in order to
|
||||
# compare the test results.
|
||||
gdb_web_base = os.path.expanduser (os.path.join (basedir,
|
||||
'public_html',
|
||||
'results'))
|
||||
gdb_web_base = os.path.expanduser ("~/results/")
|
||||
|
||||
set_web_base (gdb_web_base)
|
||||
|
||||
GDB_MAIL_FROM = 'sergiodj+buildbot@sergiodj.net'
|
||||
|
@ -81,12 +75,12 @@ def should_watch_branch (branch):
|
|||
return False
|
||||
|
||||
from buildbot.changes.gitpoller import GitPoller
|
||||
master_git_repo = os.path.expanduser (os.path.join ('~/', 'buildbot-master-binutils-gdb'))
|
||||
c['change_source'] = []
|
||||
c['change_source'].append(GitPoller(
|
||||
repourl = 'git://git.libreplanetbr.org/gdb.git',
|
||||
workdir = os.path.expanduser (os.path.join ('~/', 'buildbot-master-binutils-gdb')),
|
||||
branches = should_watch_branch,
|
||||
pollinterval = 60 * 3))
|
||||
c['change_source'].append (GitPoller (repourl = 'git://git.libreplanetbr.org/gdb.git',
|
||||
workdir = master_git_repo,
|
||||
branches = should_watch_branch,
|
||||
pollinterval = 60 * 3))
|
||||
|
||||
# 'status' is a list of Status Targets. The results of each build will be
|
||||
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
|
||||
|
@ -567,8 +561,7 @@ mn = MyMailNotifier(fromaddr = GDB_MAIL_FROM,
|
|||
messageFormatter = MessageGDBTesters,
|
||||
tags = [ "MAIL" ],
|
||||
extraHeaders = { 'X-GDB-Buildbot' : '1',
|
||||
'In-Reply-To' : WithProperties ("<%s@gdb-build>",
|
||||
'got_revision') })
|
||||
'In-Reply-To' : util.Interpolate ("<%(prop:got_revision)s@gdb-build>")})
|
||||
|
||||
class LookupEmailTryBuild (object):
|
||||
implements (IEmailLookup)
|
||||
|
@ -607,7 +600,7 @@ c['db'] = {
|
|||
## the documentation on each build step class to understand what it
|
||||
## does.
|
||||
|
||||
class CloneOrUpdateGDBMasterRepo (Git):
|
||||
class CloneOrUpdateGDBMasterRepo (steps.Git):
|
||||
|
||||
"""This build step updates the so-called "master" git repository. For
|
||||
each buildslave, we have one master GDB git repository, which is then
|
||||
|
@ -629,18 +622,18 @@ from the principal repository."""
|
|||
name = "update gdb master repo"
|
||||
description = r"fetching GDB master sources"
|
||||
descriptionDone = r"fetched GDB master sources"
|
||||
def __init__ (self):
|
||||
Git.__init__ (self,
|
||||
repourl = 'git://git.libreplanetbr.org/gdb.git',
|
||||
workdir = WithProperties (r"%s/../binutils-gdb-master/",
|
||||
r'builddir'),
|
||||
retryFetch = True,
|
||||
mode = r'incremental',
|
||||
progress = True)
|
||||
def __init__ (self, **kwargs):
|
||||
steps.Git.__init__ (self,
|
||||
repourl = 'git://git.libreplanetbr.org/gdb.git',
|
||||
workdir = util.Interpolate ("%(prop:builddir)s/../binutils-gdb-master/"),
|
||||
retryFetch = True,
|
||||
mode = 'incremental',
|
||||
progress = True,
|
||||
**kwargs)
|
||||
self.haltOnFailure = False
|
||||
self.flunkOnFailure = False
|
||||
|
||||
class CloneOrUpdateGDBRepo (Git):
|
||||
class CloneOrUpdateGDBRepo (steps.Git):
|
||||
"""This build step is used to clone the GDB git repository that will
|
||||
be used by an specific builder (inside a buildslave). The trick here
|
||||
is to use the "reference" parameter to initialize the class, which
|
||||
|
@ -649,24 +642,24 @@ present at the reference repository (i.e., locally)."""
|
|||
name = "clone gdb repo"
|
||||
description = "fetching GDB sources"
|
||||
descriptionDone = "fetched GDB sources"
|
||||
def __init__ (self):
|
||||
Git.__init__ (self,
|
||||
repourl = 'git://git.libreplanetbr.org/gdb.git',
|
||||
workdir = WithProperties ('%s/binutils-gdb/', 'builddir'),
|
||||
reference = WithProperties ("%s/../binutils-gdb-master/",
|
||||
'builddir'),
|
||||
retryFetch = True,
|
||||
progress = True)
|
||||
def __init__ (self, **kwargs):
|
||||
steps.Git.__init__ (self,
|
||||
repourl = 'git://git.libreplanetbr.org/gdb.git',
|
||||
workdir = util.Interpolate ('%(prop:builddir)s/binutils-gdb/'),
|
||||
reference = util.Interpolate ("%(prop:builddir)s/../binutils-gdb-master/"),
|
||||
retryFetch = True,
|
||||
progress = True,
|
||||
**kwargs)
|
||||
|
||||
class ConfigureGDB (Configure):
|
||||
class ConfigureGDB (steps.Configure):
|
||||
"""This build step runs the GDB "configure" command, providing extra
|
||||
flags for it if needed."""
|
||||
name = "configure gdb"
|
||||
description = r"configure GDB"
|
||||
descriptionDone = r"configured GDB"
|
||||
def __init__ (self, extra_conf_flags, **kwargs):
|
||||
Configure.__init__ (self, **kwargs)
|
||||
self.workdir = WithProperties (r"%s", r'builddir')
|
||||
steps.Configure.__init__ (self, **kwargs)
|
||||
self.workdir = util.Interpolate ('%(prop:builddir)s/build/')
|
||||
self.command = ['../binutils-gdb/configure',
|
||||
'--disable-binutils',
|
||||
'--disable-ld',
|
||||
|
@ -675,7 +668,7 @@ flags for it if needed."""
|
|||
'--disable-sim',
|
||||
'--disable-gprof'] + extra_conf_flags
|
||||
|
||||
class CompileGDB (Compile):
|
||||
class CompileGDB (steps.Compile):
|
||||
"""This build step runs "make" to compile the GDB sources. It
|
||||
provides extra "make" flags to "make" if needed. It also uses the
|
||||
"jobs" properties to figure out how many parallel jobs we can use when
|
||||
|
@ -687,27 +680,27 @@ buildslave."""
|
|||
descriptionDone = r"compiled GDB"
|
||||
def __init__ (self, make_command = 'make', extra_make_flags = [],
|
||||
**kwargs):
|
||||
Compile.__init__ (self, **kwargs)
|
||||
self.workdir = WithProperties (r"%s", r'builddir')
|
||||
steps.Compile.__init__ (self, **kwargs)
|
||||
self.workdir = util.Interpolate ('%(prop:builddir)s/build/')
|
||||
self.command = ['%s' % make_command,
|
||||
WithProperties (r"-j%s", r'jobs'),
|
||||
util.Interpolate ("-j%(prop:jobs)s"),
|
||||
'all'] + extra_make_flags
|
||||
|
||||
class MakeTAGSGDB (ShellCommand):
|
||||
class MakeTAGSGDB (steps.ShellCommand):
|
||||
name = 'make tags'
|
||||
description = 'running make TAGS'
|
||||
descriptionDone = 'ran make TAGS'
|
||||
def __init__ (self, **kwargs):
|
||||
ShellCommand.__init__ (self, make_command = 'make',
|
||||
**kwargs)
|
||||
self.workdir = WithProperties ("%s/build/gdb", 'builddir')
|
||||
steps.ShellCommand.__init__ (self, make_command = 'make',
|
||||
**kwargs)
|
||||
self.workdir = util.Interpolate ("%(prop:builddir)s/build/gdb")
|
||||
self.command = [ '%s' % make_command, 'TAGS' ]
|
||||
# We do not want to stop testing when this command fails.
|
||||
self.haltOnFailure = False
|
||||
self.flunkOnFailure = False
|
||||
self.flunkOnWarnings = False
|
||||
|
||||
class TestGDB (ShellCommand):
|
||||
class TestGDB (steps.ShellCommand):
|
||||
"""This build step runs the full testsuite for GDB. It can run in
|
||||
parallel mode (see BuildAndTestGDBFactory below), and it will also
|
||||
provide any extra flags for "make" if needed. Unfortunately, because
|
||||
|
@ -718,12 +711,12 @@ BuildBot halt on failure."""
|
|||
descriptionDone = r"tested GDB"
|
||||
def __init__ (self, make_command = 'make', extra_make_check_flags = [],
|
||||
test_env = {}, **kwargs):
|
||||
ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS,
|
||||
1 : SUCCESS,
|
||||
2 : SUCCESS },
|
||||
**kwargs)
|
||||
steps.ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS,
|
||||
1 : SUCCESS,
|
||||
2 : SUCCESS },
|
||||
**kwargs)
|
||||
|
||||
self.workdir = WithProperties (r"%s/build/gdb/testsuite", r'builddir')
|
||||
self.workdir = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite")
|
||||
self.command = ['%s' % make_command,
|
||||
'-k',
|
||||
'check'] + extra_make_check_flags
|
||||
|
@ -734,7 +727,7 @@ BuildBot halt on failure."""
|
|||
self.flunkOnFailure = False
|
||||
self.flunkOnWarnings = False
|
||||
|
||||
class TestRacyGDB (ShellCommand):
|
||||
class TestRacyGDB (steps.ShellCommand):
|
||||
"""This build step runs the full testsuite for GDB for racy testcases.
|
||||
It can run in parallel mode (see BuildAndTestGDBFactory below), and it
|
||||
will also provide any extra flags for "make" if needed.
|
||||
|
@ -745,12 +738,12 @@ command must not make BuildBot halt on failure."""
|
|||
descriptionDone = r"tested GDB (racy)"
|
||||
def __init__ (self, make_command = 'make', extra_make_check_flags = [],
|
||||
test_env = {}, **kwargs):
|
||||
ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS,
|
||||
1 : SUCCESS,
|
||||
2 : SUCCESS },
|
||||
**kwargs)
|
||||
steps.ShellCommand.__init__ (self, decodeRC = { 0 : SUCCESS,
|
||||
1 : SUCCESS,
|
||||
2 : SUCCESS },
|
||||
**kwargs)
|
||||
|
||||
self.workdir = WithProperties (r"%s/build/gdb/testsuite", r'builddir')
|
||||
self.workdir = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite")
|
||||
self.command = ['%s' % make_command,
|
||||
'-k',
|
||||
'check',
|
||||
|
@ -762,26 +755,6 @@ command must not make BuildBot halt on failure."""
|
|||
self.flunkOnFailure = False
|
||||
self.flunkOnWarnings = False
|
||||
|
||||
class CleanupBreakageLockfile (ShellCommand):
|
||||
"""Clean up (i.e., remove) the breakage lockfile for a specific builder."""
|
||||
name = "cleanup breakage lockfile"
|
||||
description = "cleaning up breakage lockfile"
|
||||
descriptionDone = "cleaned up breakage lockfile"
|
||||
command = [ 'true' ]
|
||||
|
||||
def __init__ (self, **kwargs):
|
||||
ShellCommand.__init__ (self, **kwargs)
|
||||
|
||||
def evaluateCommand (self, cmd):
|
||||
builder = self.getProperty ('buildername')
|
||||
branch = self.getProperty ('branch')
|
||||
lockfile = make_breakage_lockfile_name (builder, branch)
|
||||
|
||||
if os.path.isfile (lockfile):
|
||||
os.remove (lockfile)
|
||||
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def scheduler_is_racy (step):
|
||||
return step.getProperty ('scheduler').startswith ('racy')
|
||||
|
@ -900,29 +873,28 @@ The parameters of the class are:
|
|||
|
||||
# mjw asked me to delay the build by X number of seconds.
|
||||
if initial_delay:
|
||||
self.addStep (ShellCommand (command = ['sleep', '%d' % initial_delay],
|
||||
description = "delaying start of build by %d seconds" % initial_delay,
|
||||
descriptionDone = "delayed start of build by %d seconds" % initial_delay))
|
||||
self.addStep (steps.ShellCommand (command = ['sleep', '%d' % initial_delay],
|
||||
description = "delaying start of build by %d seconds" % initial_delay,
|
||||
descriptionDone = "delayed start of build by %d seconds" % initial_delay))
|
||||
|
||||
self.addStep (RemoveDirectory (dir = WithProperties (r"%s/build",
|
||||
r'builddir'),
|
||||
description = r"removing old build dir",
|
||||
descriptionDone = r"removed old build dir"))
|
||||
self.addStep (CloneOrUpdateGDBMasterRepo ())
|
||||
self.addStep (steps.RemoveDirectory (dir = util.Interpolate ("%(prop:builddir)s/build"),
|
||||
description = "removing old build dir",
|
||||
descriptionDone = "removed old build dir"))
|
||||
self.addStep (CloneOrUpdateGDBMasterRepo (hideStepIf = True))
|
||||
self.addStep (CloneOrUpdateGDBRepo ())
|
||||
|
||||
if self.run_testsuite:
|
||||
self.addStep (CopyOldGDBSumFile (doStepIf = scheduler_is_not_racy_try_do,
|
||||
hideStepIf = scheduler_is_racy_try_hide))
|
||||
hideStepIf = True))
|
||||
|
||||
if not self.extra_conf_flags:
|
||||
self.extra_conf_flags = []
|
||||
|
||||
if self.enable_targets_all:
|
||||
self.extra_conf_flags.append (r'--enable-targets=all')
|
||||
self.extra_conf_flags.append ('--enable-targets=all')
|
||||
|
||||
if self.use_system_debuginfo:
|
||||
self.extra_conf_flags.append (r'--with-separate-debug-dir=/usr/lib/debug')
|
||||
self.extra_conf_flags.append ('--with-separate-debug-dir=/usr/lib/debug')
|
||||
|
||||
self.addStep (self.ConfigureClass (self.extra_conf_flags + architecture_triplet,
|
||||
haltOnFailure = True))
|
||||
|
@ -935,7 +907,9 @@ The parameters of the class are:
|
|||
# This last will be executed when the build succeeds. It is
|
||||
# needed in order to cleanup the breakage lockfile, if it
|
||||
# exists.
|
||||
self.addStep (CleanupBreakageLockfile (hideStepIf = True))
|
||||
self.addStep (steps.MasterShellCommand (command = [ 'rm', '-f',
|
||||
util.Interpolate ('/tmp/gdb-buildbot-breakage-report-%(prop:branch)s-%(prop:buildername)s') ],
|
||||
hideStepIf = True))
|
||||
|
||||
# Disabling this until we figure out how to properly run + test
|
||||
# self.addStep (MakeTAGSGDB ())
|
||||
|
@ -948,7 +922,7 @@ The parameters of the class are:
|
|||
self.test_env = {}
|
||||
|
||||
if self.test_parallel:
|
||||
self.extra_make_check_flags.append (WithProperties (r"-j%s", r'jobs'))
|
||||
self.extra_make_check_flags.append (util.Interpolate ("-j%(prop:jobs)s"))
|
||||
self.extra_make_check_flags.append (r'FORCE_PARALLEL=1')
|
||||
|
||||
self.addStep (self.TestClass (self.make_command, self.extra_make_check_flags,
|
||||
|
@ -956,17 +930,16 @@ The parameters of the class are:
|
|||
doStepIf = scheduler_is_not_racy_do,
|
||||
hideStepIf = scheduler_is_racy_hide))
|
||||
|
||||
self.addStep (GdbCatSumfileCommand (workdir = WithProperties (r'%s/build/gdb/testsuite',
|
||||
r'builddir'),
|
||||
description = r'analyze test results',
|
||||
self.addStep (GdbCatSumfileCommand (workdir = util.Interpolate ('%(prop:builddir)s/build/gdb/testsuite'),
|
||||
description = 'analyzing test results',
|
||||
descriptionDone = 'analyzed test results',
|
||||
doStepIf = scheduler_is_not_racy_do,
|
||||
hideStepIf = scheduler_is_racy_hide))
|
||||
self.addStep (FileUpload (slavesrc = WithProperties (r"%s/build/gdb/testsuite/gdb.log",
|
||||
r'builddir'),
|
||||
masterdest = WithProperties (r"public_html/results/%s/gdb.log",
|
||||
r'buildername'),
|
||||
doStepIf = scheduler_is_not_racy_do,
|
||||
hideStepIf = scheduler_is_racy_hide))
|
||||
self.addStep (steps.FileUpload (workersrc = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite/gdb.log"),
|
||||
masterdest = util.Interpolate ("public_html/results/%(prop:buildername)s/tmp/$(prop:got_revision)s/gdb.log"),
|
||||
mode = 0644,
|
||||
doStepIf = scheduler_is_not_racy_do,
|
||||
hideStepIf = True))
|
||||
self.addStep (SaveGDBResults (doStepIf = scheduler_is_not_racy_do,
|
||||
hideStepIf = scheduler_is_racy_hide))
|
||||
|
||||
|
@ -977,8 +950,7 @@ The parameters of the class are:
|
|||
doStepIf = scheduler_is_racy_do,
|
||||
hideStepIf = scheduler_is_not_racy_hide))
|
||||
|
||||
self.addStep (GDBAnalyzeRacyTests (workdir = WithProperties ('%s/build/gdb/testsuite',
|
||||
'builddir'),
|
||||
self.addStep (GDBAnalyzeRacyTests (workdir = util.Interpolate ('%(prop:builddir)s/build/gdb/testsuite'),
|
||||
description = 'analyzing racy tests',
|
||||
descriptionDone = 'analyzed racy tests',
|
||||
doStepIf = scheduler_is_racy_do,
|
||||
|
@ -1061,16 +1033,16 @@ class RunTestGDBNativeExtendedGDBServer_c32t32 (BuildAndTestGDBFactory):
|
|||
class RunTestGDBIndexBuild (BuildAndTestGDBFactory):
|
||||
"""Testing with the "cc-with-tweaks.sh" passing -i."""
|
||||
def __init__ (self, **kwargs):
|
||||
self.extra_make_check_flags = [ WithProperties (r'CC_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc', r'builddir'),
|
||||
WithProperties (r'CXX_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++', r'builddir') ]
|
||||
self.extra_make_check_flags = [ util.Interpolate ('CC_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc'),
|
||||
util.Interpolate ('CXX_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++') ]
|
||||
BuildAndTestGDBFactory.__init__ (self, **kwargs)
|
||||
|
||||
class RunTestGDBIndexBuild_c32t32 (BuildAndTestGDBFactory):
|
||||
"""Testing with the "cc-with-tweaks.sh" passing -i. 32-bit version"""
|
||||
def __init__ (self, **kwargs):
|
||||
self.extra_conf_flags = [ 'CFLAGS=-m32', 'CXXFLAGS=-m32' ]
|
||||
self.extra_make_check_flags = [ WithProperties (r'CC_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc', r'builddir'),
|
||||
WithProperties (r'CXX_FOR_TARGET=/bin/sh %s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++', r'builddir'),
|
||||
self.extra_make_check_flags = [ util.Interpolate ('CC_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i gcc'),
|
||||
util.Interpolate ('CXX_FOR_TARGET=/bin/sh %(prop:builddir)s/binutils-gdb/gdb/contrib/cc-with-tweaks.sh -i g++'),
|
||||
'RUNTESTFLAGS=--target_board unix/-m32' ]
|
||||
BuildAndTestGDBFactory.__init__ (self, **kwargs)
|
||||
|
||||
|
|
124
scripts/update-logs.sh
Executable file
124
scripts/update-logs.sh
Executable file
|
@ -0,0 +1,124 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
umask u=rw,g=r,o=r
|
||||
|
||||
usage ()
|
||||
{
|
||||
cat > /dev/stderr <<EOF
|
||||
$0 -- Update build logs for builder
|
||||
|
||||
Usage: $0 [-c|--commit COMMIT] [-b|--builder BUILDER] [--branch BRANCH] [-d|--base-directory DIR] [-t|--is-try-sched yes|no] [-h|--help]
|
||||
EOF
|
||||
}
|
||||
|
||||
err ()
|
||||
{
|
||||
local msg=$1
|
||||
|
||||
echo "ERROR: $msg" > /dev/stderr
|
||||
exit 1
|
||||
}
|
||||
|
||||
msg ()
|
||||
{
|
||||
local msg=$1
|
||||
|
||||
echo ">>> INFO: $msg"
|
||||
}
|
||||
|
||||
while test "$1" != "" ; do
|
||||
case "$1" in
|
||||
"-c"|"--commit")
|
||||
COMMIT=$2
|
||||
shift 2
|
||||
;;
|
||||
"-b"|"--builder")
|
||||
BUILDER=$2
|
||||
shift 2
|
||||
;;
|
||||
"-d"|"--base-directory")
|
||||
BASE_DIR=$2
|
||||
shift 2
|
||||
;;
|
||||
"-h"|"--help")
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
"-t"|"--is-try-sched")
|
||||
IS_TRY_SCHED=$2
|
||||
shift 2
|
||||
;;
|
||||
"--branch")
|
||||
BRANCH=$2
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
DIR=$BASE_DIR/$BUILDER/
|
||||
|
||||
if test ! -d $DIR ; then
|
||||
msg "$DIR is not a valid directory. Creeating it..."
|
||||
mkdir -p $DIR
|
||||
fi
|
||||
|
||||
cd $DIR
|
||||
|
||||
DB_NAME=$DIR/${BUILDER}.db
|
||||
|
||||
if test ! -f $DB_NAME ; then
|
||||
msg "Database $DB_NAME does not exist. Creating it..."
|
||||
sqlite3 $DB_NAME "CREATE TABLE logs(commitid TEXT, branch TEXT DEFAULT 'master', trysched BOOLEAN DEFAULT 0, timestamp TIMESTAMP PRIMARY KEY DEFAULT (strftime('%s', 'now')) NOT NULL)"
|
||||
fi
|
||||
|
||||
COMMIT_2_DIG=`echo $COMMIT | sed 's/^\(..\).*$/\1/'`
|
||||
|
||||
CDIR=$COMMIT_2_DIG/$COMMIT/
|
||||
ISTRY=0
|
||||
if test "$IS_TRY_SCHED" = "yes" ; then
|
||||
CDIR=try/${CDIR}
|
||||
ISTRY=1
|
||||
fi
|
||||
|
||||
if test -d $CDIR ; then
|
||||
# If this is a try build, the user is doing a rebuild.
|
||||
# If this is a normal build, someone triggered a rebuild.
|
||||
# Either way, we need to delete the current log dir.
|
||||
msg "Log dir $CDIR already exists. Deleting it so that we can update the logs..."
|
||||
rm --verbose -rf $CDIR
|
||||
fi
|
||||
|
||||
msg "Creating directory structure $CDIR..."
|
||||
mkdir --verbose -p $CDIR
|
||||
cd $CDIR
|
||||
|
||||
TMP_DIR=$DIR/tmp/$COMMIT/
|
||||
|
||||
msg "Moving log files to $PWD..."
|
||||
mv --verbose $TMP_DIR/* .
|
||||
rmdir $TMP_DIR
|
||||
msg "Compressing log files..."
|
||||
xz --verbose --compress *
|
||||
|
||||
PREV_COMMIT=`sqlite3 $DB_NAME "SELECT commitid FROM logs WHERE branch = '$BRANCH' AND trysched = 0 ORDER BY timestamp DESC LIMIT 1"`
|
||||
|
||||
msg "Update database..."
|
||||
sqlite3 $DB_NAME "INSERT INTO logs(commitid, branch, trysched) VALUES('$COMMIT', '$BRANCH', $ISTRY)"
|
||||
|
||||
msg "Creating README.txt..."
|
||||
cat > README.txt <<EOF
|
||||
== README -- Logs for commit $COMMIT ==
|
||||
|
||||
Branch tested: $BRANCH
|
||||
|
||||
Previous commit: $PREV_COMMIT
|
||||
|
||||
EOF
|
||||
|
||||
exit 0
|
Loading…
Reference in a new issue