initial import of the code
This commit is contained in:
parent
7a22cf7523
commit
342fc84a46
6 changed files with 716 additions and 0 deletions
107
lib/gdbbuilder.py
Normal file
107
lib/gdbbuilder.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
# Define a GDB builder of some kind.
|
||||
|
||||
from buildbot.process import factory
|
||||
from buildbot.process.properties import WithProperties
|
||||
from buildbot.steps.shell import Compile
|
||||
from buildbot.steps.shell import Configure
|
||||
from buildbot.steps.shell import SetProperty
|
||||
from buildbot.steps.shell import ShellCommand
|
||||
from buildbot.steps.source import Git
|
||||
from buildbot.steps.transfer import FileDownload
|
||||
from gdbcommand import GdbCatSumfileCommand
|
||||
|
||||
giturl = 'git://sourceware.org/git/gdb.git'
|
||||
|
||||
# Initialize F with some basic build rules.
|
||||
def _init_gdb_factory(f, conf_flags):
|
||||
global giturl
|
||||
f.addStep(Git(repourl = giturl, workdir = 'gdb', mode = 'update',
|
||||
reference = '/home/buildbot/Git/gdb/.git'))
|
||||
f.addStep(ShellCommand(command=["rm", "-rf", "build"], workdir=".",
|
||||
description="clean build dir"))
|
||||
f.addStep(Configure(command=["../gdb/configure",
|
||||
'--enable-targets=all'] + conf_flags,
|
||||
workdir="build"))
|
||||
f.addStep(Compile(command=["make", "-j4", "all"], workdir="build"))
|
||||
f.addStep(Compile(command=["make", "-j4", "info"], workdir="build"))
|
||||
|
||||
def _add_summarizer(f):
|
||||
f.addStep(GdbCatSumfileCommand(workdir='build/gdb/testsuite',
|
||||
description='analyze test results'))
|
||||
|
||||
def _add_check(f, check_flags, check_env):
|
||||
f.addStep(Compile(command=["make", "-k", '-j4', "check"] + check_flags,
|
||||
workdir="build/gdb/testsuite",
|
||||
description='run test suite',
|
||||
env = check_env,
|
||||
# We have to set these due to dejagnu
|
||||
haltOnFailure = False,
|
||||
flunkOnFailure = False))
|
||||
|
||||
def _index_build(f):
|
||||
f.addStep(SetProperty(command=['pwd'], property='SRCDIR',
|
||||
workdir='gdb/gdb'))
|
||||
return [WithProperties (r'CC_FOR_TARGET=/bin/sh %s/cc-with-index.sh gcc',
|
||||
'SRCDIR'),
|
||||
WithProperties (r'CXX_FOR_TARGET=/bin/sh %s/cc-with-index.sh g++',
|
||||
'SRCDIR')]
|
||||
|
||||
def _gdbserver(f):
|
||||
f.addStep(ShellCommand(command = ['mkdir', '-p', 'stuff/boards'],
|
||||
workdir = 'build'))
|
||||
f.addStep(ShellCommand(command = ['touch', 'stuff/site.exp'],
|
||||
workdir = 'build'))
|
||||
f.addStep(FileDownload(mastersrc = '~/GDB/lib/native-gdbserver.exp',
|
||||
slavedest = 'stuff/boards/native-gdbserver.exp',
|
||||
workdir = 'build'))
|
||||
f.addStep(SetProperty(command = ['pwd'], property='STUFFDIR',
|
||||
workdir = 'build/stuff'))
|
||||
return { 'DEJAGNU' : WithProperties(r'%s/site.exp', 'STUFFDIR') }
|
||||
|
||||
def _make_one_gdb_builder(kind):
|
||||
f = factory.BuildFactory()
|
||||
_init_gdb_factory(f, [])
|
||||
check_flags = []
|
||||
check_env = {}
|
||||
if kind == 'index':
|
||||
check_flags = _index_build(f)
|
||||
elif kind == 'dwarf4':
|
||||
check_flags = ['RUNTESTFLAGS=--target_board unix/gdb:debug_flags=-gdwarf-4',
|
||||
'FORCE_PARALLEL=yes']
|
||||
elif kind == 'm32':
|
||||
check_flags = ['RUNTESTFLAGS=--target_board unix/-m32',
|
||||
'FORCE_PARALLEL=yes']
|
||||
elif kind == 'gdbserver':
|
||||
check_env = _gdbserver(f)
|
||||
check_flags = ['RUNTESTFLAGS=--target_board native-gdbserver',
|
||||
'FORCE_PARALLEL=yes']
|
||||
_add_check(f, check_flags, check_env)
|
||||
_add_summarizer(f)
|
||||
return f
|
||||
|
||||
# Future build kinds:
|
||||
# valgrind Run test suite under valgrind
|
||||
# bfd64 Build GDB with --enable-64-bit-bfd (32-bit only)
|
||||
# pie Build test cases with -fPIE.
|
||||
# nosysdebug Configure so that system debuginfo is ignored.
|
||||
|
||||
def make_gdb_builder(op_sys, arch, kind = ''):
|
||||
"""Make a new GDB builder.
|
||||
OP_SYS is the slave's operating system, e.g., 'f14'.
|
||||
ARCH is the slave's architecture, e.g., x86_64.
|
||||
KIND indicates the kind of builder to make. It is a string.
|
||||
The default, indicated by the empty string, is to make a basic builder.
|
||||
Other valid values are:
|
||||
dwarf4 Run test suite with -gdwarf-4.
|
||||
gdbserver Run test suite against gdbserver.
|
||||
index Run test suite with .gdb_index files.
|
||||
m32 Build GDB and run all tests with -m32 (64-bit only).
|
||||
"""
|
||||
name = 'gdb-' + op_sys + '-' + arch
|
||||
if kind != '':
|
||||
name = name + '-' + kind
|
||||
return { 'name' : name,
|
||||
'slavenames' : [ name ],
|
||||
'builddir' : name,
|
||||
'factory' : _make_one_gdb_builder(kind)
|
||||
}
|
40
lib/gdbcommand.py
Normal file
40
lib/gdbcommand.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# GDB .sum-fetching command.
|
||||
|
||||
from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
|
||||
from buildbot.steps.shell import ShellCommand
|
||||
from sumfiles import DejaResults
|
||||
|
||||
class GdbCatSumfileCommand(ShellCommand):
|
||||
name = 'regressions'
|
||||
command = ['cat', 'gdb.sum']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ShellCommand.__init__(self, **kwargs)
|
||||
|
||||
def evaluateCommand(self, cmd):
|
||||
rev = self.getProperty('got_revision')
|
||||
builder = self.getProperty('buildername')
|
||||
istry = self.getProperty('isTryBuilder')
|
||||
branch = self.getProperty('branch')
|
||||
if branch is None:
|
||||
branch = 'master'
|
||||
parser = DejaResults()
|
||||
cur_results = parser.read_sum_text(self.getLog('stdio').getText())
|
||||
if istry == 'no':
|
||||
baseline = parser.read_baseline (builder, branch)
|
||||
else:
|
||||
baseline = parser.read_sum_file(builder, rev)
|
||||
result = SUCCESS
|
||||
if baseline is not None:
|
||||
report = parser.compute_regressions(cur_results, baseline)
|
||||
if report is not '':
|
||||
self.addCompleteLog('regressions', report)
|
||||
result = FAILURE
|
||||
if istry == 'no':
|
||||
parser.write_sum_file(cur_results, builder, rev)
|
||||
# If there was no previous baseline, then this run
|
||||
# gets the honor.
|
||||
if baseline is None:
|
||||
baseline = cur_results
|
||||
parser.write_baseline(baseline, builder, branch)
|
||||
return result
|
227
lib/gdbgitpoller.py
Normal file
227
lib/gdbgitpoller.py
Normal file
|
@ -0,0 +1,227 @@
|
|||
# This file is part of Buildbot. Buildbot is free software: you can
|
||||
# redistribute it and/or modify it under the terms of the GNU General Public
|
||||
# License as published by the Free Software Foundation, version 2.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program; if not, write to the Free Software Foundation, Inc., 51
|
||||
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Copyright Buildbot Team Members
|
||||
|
||||
import time
|
||||
import tempfile
|
||||
import os
|
||||
import subprocess
|
||||
import types
|
||||
|
||||
from twisted.python import log
|
||||
from twisted.internet import defer, utils
|
||||
|
||||
from buildbot.changes import base, changes
|
||||
|
||||
class GDBGitPoller(base.PollingChangeSource):
|
||||
"""This source will poll a remote git repo for changes and submit
|
||||
them to the change master."""
|
||||
|
||||
compare_attrs = ["repourl", "branch", "workdir",
|
||||
"pollInterval", "gitbin", "usetimestamps",
|
||||
"category", "project"]
|
||||
|
||||
def __init__(self, repourl, branch='master',
|
||||
workdir=None, pollInterval=10*60,
|
||||
gitbin='git', usetimestamps=True,
|
||||
category=None, project=None,
|
||||
pollinterval=-2):
|
||||
# for backward compatibility; the parameter used to be spelled with 'i'
|
||||
if pollinterval != -2:
|
||||
pollInterval = pollinterval
|
||||
if project is None: project = ''
|
||||
|
||||
self.repourl = repourl
|
||||
if branch is not None and type(branch) is not types.ListType:
|
||||
branch = [branch]
|
||||
self.branch = branch
|
||||
self.pollInterval = pollInterval
|
||||
self.lastChange = time.time()
|
||||
self.lastPoll = time.time()
|
||||
self.gitbin = gitbin
|
||||
self.workdir = workdir
|
||||
self.usetimestamps = usetimestamps
|
||||
self.category = category
|
||||
self.project = project
|
||||
self.changeCount = 0
|
||||
self.commitInfo = {}
|
||||
|
||||
if self.workdir == None:
|
||||
self.workdir = tempfile.gettempdir() + '/gitpoller_work'
|
||||
|
||||
def startService(self):
|
||||
base.PollingChangeSource.startService(self)
|
||||
|
||||
dirpath = os.path.dirname(self.workdir.rstrip(os.sep))
|
||||
if not os.path.exists(dirpath):
|
||||
log.msg('gitpoller: creating parent directories for workdir')
|
||||
os.makedirs(dirpath)
|
||||
|
||||
if not os.path.exists(self.workdir + r'/.git'):
|
||||
log.msg('gitpoller: initializing working dir')
|
||||
subprocess.check_call([self.gitbin, 'init', self.workdir])
|
||||
subprocess.check_call([self.gitbin, 'remote', 'add', 'origin', self.repourl],
|
||||
cwd=self.workdir)
|
||||
subprocess.check_call([self.gitbin, 'fetch', 'origin'],
|
||||
cwd=self.workdir)
|
||||
|
||||
def describe(self):
|
||||
status = ""
|
||||
if not self.parent:
|
||||
status = "[STOPPED - check log]"
|
||||
str = 'GitPoller watching the remote git repository %s, branches: %s %s' \
|
||||
% (self.repourl, self.branch, status)
|
||||
return str
|
||||
|
||||
def poll(self):
|
||||
d = self._get_changes()
|
||||
d.addCallback(self._process_changes)
|
||||
d.addErrback(self._process_changes_failure)
|
||||
return d
|
||||
|
||||
def _get_commit_comments(self, rev):
|
||||
args = ['log', rev, '--no-walk', r'--format=%s%n%b']
|
||||
d = utils.getProcessOutput(self.gitbin, args, path=self.workdir, env=dict(PATH=os.environ['PATH']), errortoo=False )
|
||||
d.addCallback(self._get_commit_comments_from_output)
|
||||
return d
|
||||
|
||||
def _get_commit_comments_from_output(self,git_output):
|
||||
stripped_output = git_output.strip()
|
||||
if len(stripped_output) == 0:
|
||||
raise EnvironmentError('could not get commit comment for rev')
|
||||
self.commitInfo['comments'] = stripped_output
|
||||
return self.commitInfo['comments'] # for tests
|
||||
|
||||
def _get_commit_timestamp(self, rev):
|
||||
# unix timestamp
|
||||
args = ['log', rev, '--no-walk', r'--format=%ct']
|
||||
d = utils.getProcessOutput(self.gitbin, args, path=self.workdir, env=dict(PATH=os.environ['PATH']), errortoo=False )
|
||||
d.addCallback(self._get_commit_timestamp_from_output)
|
||||
return d
|
||||
|
||||
def _get_commit_timestamp_from_output(self, git_output):
|
||||
stripped_output = git_output.strip()
|
||||
if self.usetimestamps:
|
||||
try:
|
||||
stamp = float(stripped_output)
|
||||
except Exception, e:
|
||||
log.msg('gitpoller: caught exception converting output \'%s\' to timestamp' % stripped_output)
|
||||
raise e
|
||||
self.commitInfo['timestamp'] = stamp
|
||||
else:
|
||||
self.commitInfo['timestamp'] = None
|
||||
return self.commitInfo['timestamp'] # for tests
|
||||
|
||||
def _get_commit_files(self, rev):
|
||||
args = ['log', rev, '--name-only', '--no-walk', r'--format=%n']
|
||||
d = utils.getProcessOutput(self.gitbin, args, path=self.workdir, env=dict(PATH=os.environ['PATH']), errortoo=False )
|
||||
d.addCallback(self._get_commit_files_from_output)
|
||||
return d
|
||||
|
||||
def _get_commit_files_from_output(self, git_output):
|
||||
fileList = git_output.split()
|
||||
self.commitInfo['files'] = fileList
|
||||
return self.commitInfo['files'] # for tests
|
||||
|
||||
def _get_commit_name(self, rev):
|
||||
args = ['log', rev, '--no-walk', r'--format=%aE']
|
||||
d = utils.getProcessOutput(self.gitbin, args, path=self.workdir, env=dict(PATH=os.environ['PATH']), errortoo=False )
|
||||
d.addCallback(self._get_commit_name_from_output)
|
||||
return d
|
||||
|
||||
def _get_commit_name_from_output(self, git_output):
|
||||
stripped_output = git_output.strip()
|
||||
if len(stripped_output) == 0:
|
||||
raise EnvironmentError('could not get commit name for rev')
|
||||
self.commitInfo['name'] = stripped_output
|
||||
return self.commitInfo['name'] # for tests
|
||||
|
||||
def _get_changes(self):
|
||||
log.msg('gitpoller: polling git repo at %s' % self.repourl)
|
||||
|
||||
self.lastPoll = time.time()
|
||||
|
||||
# get a deferred object that performs the git fetch
|
||||
|
||||
# This command always produces data on stderr, but we actually do not care
|
||||
# about the stderr or stdout from this command. We set errortoo=True to
|
||||
# avoid an errback from the deferred. The callback which will be added to this
|
||||
# deferred will not use the response.
|
||||
args = ['fetch', self.repourl]
|
||||
d = utils.getProcessOutput(self.gitbin, args, path=self.workdir, env=dict(PATH=os.environ['PATH']), errortoo=True )
|
||||
|
||||
return d
|
||||
|
||||
def _process_changes(self, unused_output):
|
||||
# get the change list
|
||||
for branch in self.branch:
|
||||
revListArgs = ['log',
|
||||
'origin/%s@{1}..origin/%s' % (branch, branch),
|
||||
r'--format=%H']
|
||||
d = utils.getProcessOutput(self.gitbin, revListArgs, path=self.workdir, env=dict(PATH=os.environ['PATH']), errortoo=False )
|
||||
d.addCallback(self._process_changes_in_output, branch)
|
||||
return None
|
||||
|
||||
@defer.deferredGenerator
|
||||
def _process_changes_in_output(self, git_output, branch):
|
||||
self.changeCount = 0
|
||||
|
||||
# process oldest change first
|
||||
revList = git_output.split()
|
||||
if revList:
|
||||
revList.reverse()
|
||||
self.changeCount = len(revList)
|
||||
|
||||
log.msg('gitpoller: processing %d changes: %s in "%s"' % (self.changeCount, revList, self.workdir) )
|
||||
|
||||
for rev in revList:
|
||||
self.commitInfo = {}
|
||||
|
||||
deferreds = [
|
||||
self._get_commit_timestamp(rev),
|
||||
self._get_commit_name(rev),
|
||||
self._get_commit_files(rev),
|
||||
self._get_commit_comments(rev),
|
||||
]
|
||||
dl = defer.DeferredList(deferreds)
|
||||
dl.addCallback(self._add_change,rev,branch)
|
||||
|
||||
# wait for that deferred to finish before starting the next
|
||||
wfd = defer.waitForDeferred(dl)
|
||||
yield wfd
|
||||
wfd.getResult()
|
||||
|
||||
|
||||
def _add_change(self, results, rev, branch):
|
||||
log.msg('gitpoller: _add_change results: "%s", rev: "%s" in "%s"' % (results, rev, self.workdir))
|
||||
|
||||
c = changes.Change(who=self.commitInfo['name'],
|
||||
revision=rev,
|
||||
files=self.commitInfo['files'],
|
||||
comments=self.commitInfo['comments'],
|
||||
when=self.commitInfo['timestamp'],
|
||||
branch=branch,
|
||||
category=self.category,
|
||||
project=self.project,
|
||||
repository=self.repourl)
|
||||
log.msg('gitpoller: change "%s" in "%s on branch %s"' % (c, self.workdir, branch))
|
||||
self.parent.addChange(c)
|
||||
self.lastChange = self.lastPoll
|
||||
|
||||
|
||||
def _process_changes_failure(self, f):
|
||||
log.msg('gitpoller: repo poll failed')
|
||||
log.err(f)
|
||||
# eat the failure to continue along the defered chain - we still want to catch up
|
||||
return None
|
54
lib/native-gdbserver.exp
Normal file
54
lib/native-gdbserver.exp
Normal file
|
@ -0,0 +1,54 @@
|
|||
# gdbserver running native.
|
||||
|
||||
load_generic_config "gdbserver"
|
||||
process_multilib_options ""
|
||||
|
||||
# The default compiler for this target.
|
||||
set_board_info compiler "[find_gcc]"
|
||||
|
||||
# This gdbserver can only run a process once per session.
|
||||
set_board_info gdb,do_reload_on_run 1
|
||||
|
||||
# There's no support for argument-passing (yet).
|
||||
set_board_info noargs 1
|
||||
|
||||
# Can't do input (or output) in the current gdbserver.
|
||||
set_board_info gdb,noinferiorio 1
|
||||
|
||||
# gdbserver does not intercept target file operations and perform them
|
||||
# on the host.
|
||||
set_board_info gdb,nofileio 1
|
||||
|
||||
# Can't do hardware watchpoints, in general.
|
||||
set_board_info gdb,no_hardware_watchpoints 1
|
||||
|
||||
set_board_info sockethost "localhost:"
|
||||
set_board_info use_gdb_stub 1
|
||||
|
||||
# We will be using the standard GDB remote protocol.
|
||||
set_board_info gdb_protocol "remote"
|
||||
# Test the copy of gdbserver in the build directory.
|
||||
set_board_info gdb_server_prog "../gdbserver/gdbserver"
|
||||
|
||||
proc ${board}_spawn { board cmd } {
|
||||
global board_info
|
||||
|
||||
set baseboard [lindex [split $board "/"] 0]
|
||||
|
||||
set board_info($baseboard,isremote) 0
|
||||
set result [remote_spawn $board $cmd]
|
||||
set board_info($baseboard,isremote) 1
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
proc ${board}_download { board host dest } {
|
||||
return $host
|
||||
}
|
||||
|
||||
proc ${board}_file { dest op args } {
|
||||
if { $op == "delete" } {
|
||||
return 0
|
||||
}
|
||||
return [eval [list standard_file $dest $op] $args]
|
||||
}
|
121
lib/sumfiles.py
Normal file
121
lib/sumfiles.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
# Functions for manipulating .sum summary files.
|
||||
|
||||
import re
|
||||
import os.path
|
||||
from StringIO import StringIO
|
||||
|
||||
# Helper regex for parse_sum_line.
|
||||
sum_matcher = re.compile('^(.?(PASS|FAIL)): (.*)$')
|
||||
|
||||
# You must call set_web_base at startup to set this.
|
||||
gdb_web_base = None
|
||||
|
||||
def set_web_base(arg):
|
||||
global gdb_web_base
|
||||
gdb_web_base = arg
|
||||
if not os.path.isdir(gdb_web_base):
|
||||
# If the parent doesn't exist, we're confused.
|
||||
# So, use mkdir and not makedirs.
|
||||
os.mkdir(gdb_web_base, 0755)
|
||||
|
||||
class DejaResults(object):
|
||||
def __init__(self):
|
||||
object.__init__(self)
|
||||
|
||||
# Parse a single line from a .sum file.
|
||||
# Uniquify the name, and put the result into OUT_DICT.
|
||||
# If the line does not appear to be about a test, ignore it.
|
||||
def parse_sum_line(self, out_dict, line):
|
||||
global sum_matcher
|
||||
line = line.rstrip()
|
||||
m = re.match(sum_matcher, line)
|
||||
if m:
|
||||
result = m.group(1)
|
||||
test_name = m.group(3)
|
||||
if test_name in out_dict:
|
||||
i = 2
|
||||
while True:
|
||||
nname = test_name + ' <<' + str(i) + '>>'
|
||||
if nname not in out_dict:
|
||||
break
|
||||
i = i + 1
|
||||
test_name = nname
|
||||
out_dict[test_name] = result
|
||||
|
||||
def _write_sum_file(self, sum_dict, subdir, filename):
|
||||
global gdb_web_base
|
||||
bdir = os.path.join(gdb_web_base, subdir)
|
||||
if not os.path.isdir(bdir):
|
||||
os.makedirs(bdir, 0755)
|
||||
fname = os.path.join(bdir, filename)
|
||||
keys = sum_dict.keys()
|
||||
keys.sort()
|
||||
f = open(fname, 'w')
|
||||
for k in keys:
|
||||
f.write(sum_dict[k] + ': ' + k + '\n')
|
||||
f.close()
|
||||
|
||||
def write_sum_file(self, sum_dict, builder, filename):
|
||||
self._write_sum_file(sum_dict, builder, filename)
|
||||
|
||||
def write_baseline(self, sum_dict, builder, branch):
|
||||
self.write_sum_file(sum_dict, os.path.join(builder, branch),
|
||||
'baseline')
|
||||
|
||||
# Read a .sum file.
|
||||
# The builder name is BUILDER.
|
||||
# The base file name is given in FILENAME. This should be a git
|
||||
# revision; to read the baseline file for a branch, use `read_baseline'.
|
||||
# Returns a dictionary holding the .sum contents, or None if the
|
||||
# file did not exist.
|
||||
def read_sum_file(self, builder, filename):
|
||||
global gdb_web_base
|
||||
fname = os.path.join(gdb_web_base, builder, filename)
|
||||
if os.path.exists(fname):
|
||||
result = {}
|
||||
f = open(fname, 'r')
|
||||
for line in f:
|
||||
self.parse_sum_line (result, line)
|
||||
f.close()
|
||||
else:
|
||||
result = None
|
||||
return result
|
||||
|
||||
def read_baseline(self, builder, branch):
|
||||
return self.read_sum_file(builder, os.path.join(branch, 'baseline'))
|
||||
|
||||
# Parse some text as a .sum file and return the resulting
|
||||
# dictionary.
|
||||
def read_sum_text(self, text):
|
||||
cur_file = StringIO(text)
|
||||
cur_results = {}
|
||||
for line in cur_file.readlines():
|
||||
self.parse_sum_line(cur_results, line)
|
||||
return cur_results
|
||||
|
||||
# Compute regressions between RESULTS and BASELINE.
|
||||
# BASELINE will be modified if any new PASSes are seen.
|
||||
# Returns a regression report, as a string.
|
||||
def compute_regressions(self, results, baseline):
|
||||
our_keys = results.keys()
|
||||
our_keys.sort()
|
||||
result = ''
|
||||
xfails = self.read_sum_file('', 'xfail')
|
||||
if xfails is None:
|
||||
xfails = {}
|
||||
for key in our_keys:
|
||||
# An XFAIL entry means we have an unreliable test.
|
||||
if key in xfails:
|
||||
continue
|
||||
# A transition to PASS means we should update the baseline.
|
||||
if results[key] == 'PASS':
|
||||
if key not in baseline or baseline[key] != 'PASS':
|
||||
baseline[key] = 'PASS'
|
||||
# A regression is just a transition to FAIL.
|
||||
if results[key] != 'FAIL':
|
||||
continue
|
||||
if key not in baseline:
|
||||
result = result + 'new FAIL: ' + key + '\n'
|
||||
elif baseline[key] != 'FAIL':
|
||||
result = result + baseline[key] + ' -> FAIL: ' + key + '\n'
|
||||
return result
|
167
master.cfg
Normal file
167
master.cfg
Normal file
|
@ -0,0 +1,167 @@
|
|||
# -*- python -*-
|
||||
# ex: set syntax=python:
|
||||
|
||||
from buildbot.buildslave import BuildSlave
|
||||
from gdbgitpoller import GDBGitPoller
|
||||
# from buildbot.changes.gitpoller import GitPoller
|
||||
from buildbot.changes.pb import PBChangeSource
|
||||
from buildbot.process import factory
|
||||
from buildbot.process.buildstep import LogLineObserver
|
||||
from buildbot.process.properties import WithProperties
|
||||
from buildbot.scheduler import AnyBranchScheduler
|
||||
from buildbot.scheduler import Scheduler
|
||||
from buildbot.scheduler import Try_Jobdir
|
||||
from buildbot.scheduler import Try_Userpass
|
||||
from buildbot.schedulers.filter import ChangeFilter
|
||||
from buildbot.status import html
|
||||
from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION
|
||||
from buildbot.steps.python_twisted import Trial
|
||||
from buildbot.steps.shell import Compile
|
||||
from buildbot.steps.shell import Configure
|
||||
from buildbot.steps.shell import SetProperty
|
||||
from buildbot.steps.shell import ShellCommand
|
||||
from buildbot.steps.source import Git
|
||||
from gdbbuilder import make_gdb_builder
|
||||
from sumfiles import DejaResults, set_web_base
|
||||
import os.path
|
||||
import urllib
|
||||
from buildbot.status import words
|
||||
|
||||
|
||||
# This is the dictionary that the buildmaster pays attention to. We also use
|
||||
# a shorter alias to save typing.
|
||||
c = BuildmasterConfig = {}
|
||||
|
||||
c['mergeRequests'] = False
|
||||
|
||||
c['slavePortnum'] = 9989
|
||||
|
||||
c['change_source'] = [
|
||||
PBChangeSource(),
|
||||
# Didn't finish fixing this; it was simpler to just use cron.
|
||||
# GDBGitPoller(repourl = 'git://sourceware.org/git/gdb.git',
|
||||
# workdir = '/home/buildbot/GitWatcher/gdb/',
|
||||
# branch = ['master', 'gdb_7_3-branch'])
|
||||
]
|
||||
|
||||
# Base directory for the web server.
|
||||
gdb_web_base = os.path.expanduser(os.path.join(basedir, 'public_html',
|
||||
'results'))
|
||||
set_web_base (gdb_web_base)
|
||||
|
||||
all_gdb_builders = [
|
||||
make_gdb_builder ('f14', 'x86_64'),
|
||||
make_gdb_builder ('f14', 'x86_64', 'dwarf4'),
|
||||
make_gdb_builder ('f14', 'x86_64', 'index'),
|
||||
make_gdb_builder ('f14', 'x86_64', 'm32'),
|
||||
make_gdb_builder ('f14', 'x86_64', 'gdbserver'),
|
||||
]
|
||||
|
||||
all_gdb_builder_names = []
|
||||
c['slaves'] = []
|
||||
|
||||
for builder in all_gdb_builders:
|
||||
name = builder['name']
|
||||
all_gdb_builder_names.append(name)
|
||||
c['slaves'].append(BuildSlave(name, name + '-password', # yes -- lame
|
||||
max_builds = 1))
|
||||
|
||||
c['builders'] = all_gdb_builders
|
||||
|
||||
# FIXME: we'd like to make the Try builder run the baseline build
|
||||
# using a triggerable builder, but it isn't clear whether this is
|
||||
# possible.
|
||||
|
||||
c['schedulers'] = []
|
||||
|
||||
branch_filter = ChangeFilter(branch = ['master',
|
||||
'gdb_7_3-branch'])
|
||||
c['schedulers'].append(AnyBranchScheduler(name="all",
|
||||
change_filter = branch_filter,
|
||||
treeStableTimer = 0,
|
||||
builderNames = all_gdb_builder_names,
|
||||
properties = { 'isTryBuilder' : 'no' }))
|
||||
# c['schedulers'].append(AnyBranchScheduler(name="all",
|
||||
# branch = 'master',
|
||||
# treeStableTimer = 0,
|
||||
# builderNames = all_gdb_builder_names,
|
||||
# properties = { 'isTryBuilder' : 'no' }))
|
||||
|
||||
# c['schedulers'].append(Try_Jobdir("try1",
|
||||
# builderNames = all_gdb_builder_names,
|
||||
# jobdir = '/home/buildbot/Jobs',
|
||||
# properties = { 'isTryBuilder' : 'yes' }))
|
||||
|
||||
gdb_users = []
|
||||
# FIXME init gdb_users here
|
||||
|
||||
c['schedulers'].append(Try_Userpass("try1",
|
||||
builderNames = all_gdb_builder_names,
|
||||
port = 8031,
|
||||
userpass = gdb_users,
|
||||
properties = { 'isTryBuilder' : 'yes' }))
|
||||
|
||||
|
||||
####### STATUS TARGETS
|
||||
|
||||
# 'status' is a list of Status Targets. The results of each build will be
|
||||
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
|
||||
# including web pages, email senders, and IRC bots.
|
||||
|
||||
c['status'] = []
|
||||
|
||||
# Catch things like PR gdb/42, PR16, PR 16 or bug #11,
|
||||
# and turn them into gdb bugzilla URLs.
|
||||
cc_re_tuple = (r'(PR [a-z]+/|PR ?|#)(\d+)',
|
||||
r'http://sourceware.org/bugzilla/show_bug.cgi?id=\2')
|
||||
|
||||
c['status'].append(html.WebStatus(http_port=8010,
|
||||
allowForce=False,
|
||||
order_console_by_time=True,
|
||||
changecommentlink=cc_re_tuple
|
||||
))
|
||||
|
||||
c['status'].append(words.IRC(host="irc.yyz.redhat.com", nick="gdbbot",
|
||||
channels=["#gdb"]))
|
||||
|
||||
# from buildbot.status import client
|
||||
# c['status'].append(client.PBListener(9988))
|
||||
|
||||
|
||||
####### DEBUGGING OPTIONS
|
||||
|
||||
# if you set 'debugPassword', then you can connect to the buildmaster with
|
||||
# the diagnostic tool in contrib/debugclient.py . From this tool, you can
|
||||
# manually force builds and inject changes, which may be useful for testing
|
||||
# your buildmaster without actually committing changes to your repository (or
|
||||
# before you have a functioning 'sources' set up). The debug tool uses the
|
||||
# same port number as the slaves do: 'slavePortnum'.
|
||||
|
||||
#c['debugPassword'] = "debugpassword"
|
||||
|
||||
# if you set 'manhole', you can ssh into the buildmaster and get an
|
||||
# interactive python shell, which may be useful for debugging buildbot
|
||||
# internals. It is probably only useful for buildbot developers. You can also
|
||||
# use an authorized_keys file, or plain telnet.
|
||||
#from buildbot import manhole
|
||||
#c['manhole'] = manhole.PasswordManhole("tcp:9999:interface=127.0.0.1",
|
||||
# "admin", "password")
|
||||
|
||||
|
||||
####### PROJECT IDENTITY
|
||||
|
||||
# the 'projectName' string will be used to describe the project that this
|
||||
# buildbot is working on. For example, it is used as the title of the
|
||||
# waterfall HTML page. The 'projectURL' string will be used to provide a link
|
||||
# from buildbot HTML pages to your project's home page.
|
||||
|
||||
c['projectName'] = "GDB"
|
||||
c['projectURL'] = "http://sourceware.org/gdb/"
|
||||
|
||||
# the 'buildbotURL' string should point to the location where the buildbot's
|
||||
# internal web server (usually the html.Waterfall page) is visible. This
|
||||
# typically uses the port number set in the Waterfall 'status' entry, but
|
||||
# with an externally-visible host name which the buildbot cannot figure out
|
||||
# without some help.
|
||||
|
||||
c['buildbotURL'] = "http://localhost:8010/"
|
Loading…
Reference in a new issue