New method for storing log files.

This is a new method developed to store the log files for builds.  It
doesn't use git anymore, but instead uses a set of sqlite databases +
scripts + xz in order to make the files smaller and more organized.
Also, it is now possible to safely remove old build logs.
This commit is contained in:
Sergio Durigan Junior 2016-11-10 01:45:57 -05:00
parent c0ab924f25
commit 78ac2523c8
5 changed files with 74 additions and 40 deletions

View file

@ -10,7 +10,7 @@ class SaveGDBResults (steps.MasterShellCommand):
descriptionDone = 'saved build results'
def __init__ (self, **kwargs):
steps.MasterShellCommand.__init__ (self, **kwargs)
steps.MasterShellCommand.__init__ (self, command = None, **kwargs)
self.command = [ os.path.expanduser ("~/scripts/update-logs.sh"),
"--commit", util.Property ('got_revision'),
"--builder", util.Property ('buildername'),

View file

@ -1,6 +1,6 @@
# GDB .sum-fetching command.
from buildbot.process.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.status.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.plugins import steps, util
from sumfiles import DejaResults, get_web_base
from gdbgitdb import switch_to_branch
@ -22,16 +22,27 @@ def create_copy_command (props):
con = sqlite3.connect (db_file)
c = con.cursor ()
c.execute ('SELECT commitid WHERE branch = "%s" AND trysched = 0 FROM logs ORDER BY timestamp DESC LIMIT 1' % branch)
c.execute ('SELECT commitid FROM logs WHERE branch = "%s" AND trysched = 0 ORDER BY timestamp DESC LIMIT 1' % branch)
comm = c.fetchone ()
con.close ()
commit = c.fetchone ()[0]
from_path = os.path.join (get_web_base (), commit[:2], commit, 'gdb.sum')
if istry and istry == 'yes':
to_path = os.path.join (get_web_base (), 'try', rev[:2], rev, 'previous_gdb.sum')
if comm:
commit = comm[0]
else:
to_path = os.path.join (get_web_base (), rev[:2], rev, 'previous_gdb.sum')
return [ 'true' ]
from_path = os.path.join (get_web_base (), builder, commit[:2], commit, 'gdb.sum')
if istry and istry == 'yes':
to_path = os.path.join (get_web_base (), builder, 'try', rev[:2], rev)
else:
to_path = os.path.join (get_web_base (), builder, rev[:2], rev)
if not os.path.exists (to_path):
old_umask = os.umask (0022)
os.makedirs (to_path)
os.umask (old_umask)
to_path = os.path.join (to_path, 'previous_gdb.sum.xz')
command += [ from_path, to_path ]
@ -79,11 +90,20 @@ class GdbCatSumfileCommand(steps.ShellCommand):
con = sqlite3.connect (db_file)
c = con.cursor ()
c.execute ('SELECT commitid WHERE branch = "%s" AND trysched = 0 FROM logs ORDER BY timestamp DESC LIMIT 1' % branch)
prev = c.fetchone ()
con.close ()
prevcommit = c.fetchone ()[0]
# Switch to the right branch inside the BUILDER repo
# switch_to_branch (builder, branch, force_switch = False)
if prev:
prevcommit = prev[0]
else:
# This takes care of our very first build.
parser.write_sum_file (cur_results, builder, branch, rev)
# If there was no previous baseline, then this run
# gets the honor.
if baseline is None:
baseline = cur_results
parser.write_baseline (baseline, builder, branch, rev)
return SUCCESS
baseline = parser.read_baseline (builder, branch, prevcommit)
old_sum = parser.read_sum_file (builder, branch, prevcommit)

View file

@ -74,7 +74,9 @@ class DejaResults(object):
bdir = os.path.join (gdb_web_base, builder, rev[:2], rev)
if not os.path.exists (bdir):
old_umask = os.umask (0022)
os.makedirs (bdir)
os.umask (old_umask)
fname = os.path.join (bdir, filename)
keys = sum_dict[0].keys ()
mode = 'w'
@ -126,10 +128,11 @@ class DejaResults(object):
self.parse_sum_line (result, line,
is_racy_file = is_racy_file)
elif os.path.exists (fname + '.xz'):
with lzma.open (fname, 'r') as f:
for line in f:
self.parse_sum_line (result, line,
is_racy_file = is_racy_file)
f = lzma.LZMAFile (fname, 'r')
for line in f:
self.parse_sum_line (result, line,
is_racy_file = is_racy_file)
f.close ()
else:
return None
return result
@ -140,8 +143,8 @@ class DejaResults(object):
def read_baseline(self, builder, branch, rev):
return self._read_sum_file (builder, branch, rev, 'baseline')
def read_xfail (self, builder, branch, rev):
return self._read_sum_file (builder, branch, rev, 'xfail', is_xfail_file = True)
def read_xfail (self, builder, branch):
return self._read_sum_file (builder, branch, None, 'xfail', is_xfail_file = True)
def read_old_sum_file (self, builder, branch, rev):
return self._read_sum_file (builder, branch, rev, 'previous_gdb.sum')

View file

@ -15,14 +15,13 @@ from buildbot.schedulers.timed import Nightly
from buildbot.schedulers.trysched import Try_Jobdir
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.process import factory
from buildbot.plugins import util, reporters, steps, worker
from buildbot.reporters import utils
from buildbot.plugins import util, steps, buildslave
from buildbot.changes.filter import ChangeFilter
from buildbot.process.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.status.results import SUCCESS, WARNINGS, FAILURE, EXCEPTION
from buildbot.interfaces import IEmailLookup
from zope.interface import implements
from gdbcommand import CopyOldGDBSumFile, GdbCatSumfileCommand
from gdbgitdb import SaveGDBResults, get_builder_commit_id
from fsdb import SaveGDBResults
from racyanalyze import GDBAnalyzeRacyTests
from urllib import quote
@ -254,7 +253,8 @@ subsequent commits are made after X, by different people."""
def MessageGDBTesters (mode, name, build, results, master_status):
"""This function is responsible for composing the message that will be
send to the gdb-testers mailing list."""
res_url = "http://gdb-build.sergiodj.net/cgit"
res_url = "http://gdb-build.sergiodj.net/results"
sourcestamp = build.getSourceStamps ()[0]
branch = build.getSourceStamps ()[0].branch
cur_change = build.getSourceStamps ()[0].changes[0]
properties = build.getProperties ()
@ -447,8 +447,8 @@ send to the gdb-testers mailing list."""
# URL to find more info about what went wrong.
text += "\nTestsuite log (gdb.sum and gdb.log) URL(s):\n"
text += "\t<%s/%s/%s/%s/>\n" % (res_url, name, sourcestamp.revision[:2],
sourcestamp.revision)
text += "\t<%s/%s/try/%s/%s/>\n" % (res_url, name, sourcestamp.revision[:2],
sourcestamp.revision)
# commit_id = get_builder_commit_id (name, sourcestamp.revision,
# sourcestamp.branch)
@ -582,8 +582,7 @@ mn_try = MyMailNotifier(fromaddr = GDB_MAIL_FROM,
lookup = LookupEmailTryBuild (),
tags = [ "MAIL", "TRY" ],
extraHeaders = { 'X-GDB-Buildbot' : '1',
'In-Reply-To' : WithProperties ("<%s-try@gdb-build>",
'got_revision') })
'In-Reply-To' : util.Interpolate ("<%(prop:got_revision)s-try@gdb-build>")})
c['status'].append (mn)
c['status'].append (mn_try)
@ -941,8 +940,8 @@ The parameters of the class are:
descriptionDone = 'analyzed test results',
doStepIf = scheduler_is_not_racy_do,
hideStepIf = scheduler_is_racy_hide))
self.addStep (steps.FileUpload (workersrc = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite/gdb.log"),
masterdest = util.Interpolate ("public_html/results/%(prop:buildername)s/tmp/$(prop:got_revision)s/gdb.log"),
self.addStep (steps.FileUpload (slavesrc = util.Interpolate ("%(prop:builddir)s/build/gdb/testsuite/gdb.log"),
masterdest = util.Interpolate ("~/results/%(prop:buildername)s/tmp/%(prop:got_revision)s/gdb.log"),
mode = 0644,
doStepIf = scheduler_is_not_racy_do,
hideStepIf = True))
@ -1164,11 +1163,11 @@ def load_config (c):
config = load (open ("lib/config.json"))
passwd = load (open ("lib/passwords.json"))
c['slaves'] = [BuildSlave (slave['name'], passwd[slave['name']],
max_builds = 1,
notify_on_missing = [ str (slave['admin']) ],
missing_timeout = 300,
properties = { 'jobs' : slave['jobs'] })
c['slaves'] = [buildslave.BuildSlave (slave['name'], passwd[slave['name']],
max_builds = 1,
notify_on_missing = [ str (slave['admin']) ],
missing_timeout = 300,
properties = { 'jobs' : slave['jobs'] })
for slave in config['slaves']]
c['schedulers'] = []

View file

@ -65,7 +65,7 @@ DIR=$BASE_DIR/$BUILDER/
if test ! -d $DIR ; then
msg "$DIR is not a valid directory. Creeating it..."
mkdir -p $DIR
(umask 0022 && mkdir --verbose -p $DIR)
fi
cd $DIR
@ -86,7 +86,8 @@ if test "$IS_TRY_SCHED" = "yes" ; then
ISTRY=1
fi
if test -d $CDIR ; then
CDIR_EXISTS=`sqlite3 $DB_NAME "SELECT commitid FROM logs WHERE commitid = '${COMMIT}'"`
if test -n "$CDIR_EXISTS" ; then
# If this is a try build, the user is doing a rebuild.
# If this is a normal build, someone triggered a rebuild.
# Either way, we need to delete the current log dir.
@ -94,8 +95,10 @@ if test -d $CDIR ; then
rm --verbose -rf $CDIR
fi
msg "Creating directory structure $CDIR..."
mkdir --verbose -p $CDIR
if test ! -d $CDIR ; then
msg "Creating directory structure $CDIR..."
(umask 0022 && mkdir --verbose -p $CDIR)
fi
cd $CDIR
TMP_DIR=$DIR/tmp/$COMMIT/
@ -104,21 +107,30 @@ msg "Moving log files to $PWD..."
mv --verbose $TMP_DIR/* .
rmdir $TMP_DIR
msg "Compressing log files..."
xz --verbose --compress *
find . -type f ! -name "*.xz" | xargs xz --verbose --compress
PREV_COMMIT=`sqlite3 $DB_NAME "SELECT commitid FROM logs WHERE branch = '$BRANCH' AND trysched = 0 ORDER BY timestamp DESC LIMIT 1"`
if test "$IS_TRY_SCHED" != "yes" ; then
PREV_2DIG=`echo $PREV_COMMIT | sed 's/^\(..\).*$/\1/'`
ln -s $DIR/$PREV_2DIG/$PREV_COMMIT PREVIOUS_COMMIT
ln -s $DIR/$CDIR $DIR/$PREV_2DIG/$PREV_COMMIT/NEXT_COMMIT
fi
msg "Update database..."
sqlite3 $DB_NAME "INSERT INTO logs(commitid, branch, trysched) VALUES('$COMMIT', '$BRANCH', $ISTRY)"
msg "Creating README.txt..."
cat > README.txt <<EOF
== README -- Logs for commit $COMMIT ==
=== README ===
Logs for: $COMMIT
Branch tested: $BRANCH
Previous commit: $PREV_COMMIT
Patch: <http://sourceware.org/git/?p=binutils-gdb.git;a=commitdiff;h=${COMMIT}>
EOF
exit 0