import os
import re
+import sys
import base64
import subprocess
-import ConfigParser
+import configparser
+
+from dateutil.tz import tzutc
+from datetime import datetime, timedelta
+
+from twisted.internet import defer
+from twisted.python import log
from buildbot import locks
+from buildbot.data import resultspec
+from buildbot.changes import filter
+from buildbot.changes.gitpoller import GitPoller
+from buildbot.config import BuilderConfig
+from buildbot.plugins import schedulers
+from buildbot.plugins import steps
+from buildbot.plugins import util
+from buildbot.process import results
+from buildbot.process.factory import BuildFactory
+from buildbot.process.properties import Property
+from buildbot.process.properties import Interpolate
+from buildbot.process import properties
+from buildbot.schedulers.basic import SingleBranchScheduler
+from buildbot.schedulers.forcesched import ForceScheduler
+from buildbot.steps.master import MasterShellCommand
+from buildbot.steps.shell import SetPropertyFromCommand
+from buildbot.steps.shell import ShellCommand
+from buildbot.steps.transfer import FileDownload
+from buildbot.steps.transfer import FileUpload
+from buildbot.steps.transfer import StringDownload
+from buildbot.worker import Worker
+
+
+if not os.path.exists("twistd.pid"):
+ with open("twistd.pid", "w") as pidfile:
+ pidfile.write("{}".format(os.getpid()))
-ini = ConfigParser.ConfigParser()
+ini = configparser.ConfigParser()
ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
buildbot_url = ini.get("phase2", "buildbot_url")
# a shorter alias to save typing.
c = BuildmasterConfig = {}
-####### BUILDSLAVES
+####### BUILDWORKERS
-# The 'slaves' list defines the set of recognized buildslaves. Each element is
-# a BuildSlave object, specifying a unique slave name and password. The same
-# slave name and password must be configured on the slave.
-from buildbot.buildslave import BuildSlave
+# The 'workers' list defines the set of recognized buildworkers. Each element is
+# a Worker object, specifying a unique worker name and password. The same
+# worker name and password must be configured on the worker.
-slave_port = 9990
+worker_port = 9990
persistent = False
-other_builds = 0
tree_expire = 0
git_ssh = False
git_ssh_key = None
if ini.has_option("phase2", "port"):
- slave_port = ini.getint("phase2", "port")
+ worker_port = ini.get("phase2", "port")
if ini.has_option("phase2", "persistent"):
persistent = ini.getboolean("phase2", "persistent")
-if ini.has_option("phase2", "other_builds"):
- other_builds = ini.getint("phase2", "other_builds")
-
if ini.has_option("phase2", "expire"):
tree_expire = ini.getint("phase2", "expire")
else:
git_ssh = False
-c['slaves'] = []
+c['workers'] = []
max_builds = dict()
for section in ini.sections():
- if section.startswith("slave "):
+ if section.startswith("worker "):
if ini.has_option(section, "name") and ini.has_option(section, "password") and \
- ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
+ ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
name = ini.get(section, "name")
password = ini.get(section, "password")
sl_props = { 'shared_wd': False }
if sl_props['shared_wd'] and (max_builds != 1):
raise ValueError('max_builds must be 1 with shared workdir!')
- c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name], properties = sl_props))
+ c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
-# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
-# This must match the value configured into the buildslaves (with their
+# 'workerPortnum' defines the TCP port to listen on for connections from workers.
+# This must match the value configured into the buildworkers (with their
# --master option)
-c['slavePortnum'] = slave_port
+c['protocols'] = {'pb': {'port': worker_port}}
# coalesce builds
-c['mergeRequests'] = True
+c['collapseRequests'] = True
# Reduce amount of backlog data
-c['buildHorizon'] = 30
-c['logHorizon'] = 20
+c['configurators'] = [util.JanitorConfigurator(
+ logHorizon=timedelta(days=3),
+ hour=6,
+)]
####### CHANGESOURCES
if ini.has_option("rsync", "sdk_pattern"):
rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
+rsync_defopts = ["-4", "-v", "--timeout=120"]
+
repo_url = ini.get("repo", "url")
repo_branch = "master"
else:
subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
-findarches = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'architectures'],
+os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
+findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
while True:
line = findarches.stdout.readline()
if not line:
break
- at = line.strip().split()
+ at = line.decode().strip().split()
arches.append(at)
archnames.append(at[0])
feeds = []
feedbranches = dict()
-from buildbot.changes.gitpoller import GitPoller
c['change_source'] = []
def parse_feed_entry(line):
parts = line.strip().split()
- if parts[0] == "src-git":
+ if parts[0].startswith("src-git"):
feeds.append(parts)
url = parts[2].strip().split(';')
branch = url[1] if len(url) > 1 else 'master'
line = make.stdout.readline()
if line:
- parse_feed_entry(line)
+ parse_feed_entry(str(line, 'utf-8'))
-with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
+with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
for line in f:
parse_feed_entry(line)
+if len(c['change_source']) == 0:
+ log.err("FATAL ERROR: no change_sources defined, aborting!")
+ sys.exit(-1)
####### SCHEDULERS
# Configure the Schedulers, which decide how to react to incoming changes. In this
# case, just kick off a 'basebuild' build
-def branch_change_filter(change):
- return change.branch == feedbranches[change.repository]
-
-from buildbot.schedulers.basic import SingleBranchScheduler
-from buildbot.schedulers.forcesched import ForceScheduler
-from buildbot.changes import filter
c['schedulers'] = []
c['schedulers'].append(SingleBranchScheduler(
- name="all",
- change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
- treeStableTimer=60,
- builderNames=archnames))
+ name = "all",
+ change_filter = filter.ChangeFilter(
+ filter_fn = lambda change: change.branch == feedbranches[change.repository]
+ ),
+ treeStableTimer = 60,
+ builderNames = archnames))
c['schedulers'].append(ForceScheduler(
- name="force",
- builderNames=archnames))
+ name = "force",
+ buttonName = "Force builds",
+ label = "Force build details",
+ builderNames = [ "00_force_build" ],
+
+ codebases = [
+ util.CodebaseParameter(
+ "",
+ label = "Repository",
+ branch = util.FixedParameter(name = "branch", default = ""),
+ revision = util.FixedParameter(name = "revision", default = ""),
+ repository = util.FixedParameter(name = "repository", default = ""),
+ project = util.FixedParameter(name = "project", default = "")
+ )
+ ],
+
+ reason = util.StringParameter(
+ name = "reason",
+ label = "Reason",
+ default = "Trigger build",
+ required = True,
+ size = 80
+ ),
+
+ properties = [
+ util.NestedParameter(
+ name="options",
+ label="Build Options",
+ layout="vertical",
+ fields=[
+ util.ChoiceStringParameter(
+ name = "architecture",
+ label = "Build architecture",
+ default = "all",
+ choices = [ "all" ] + archnames
+ )
+ ]
+ )
+ ]
+))
####### BUILDERS
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
-# what steps, and which slaves can execute them. Note that any particular build will
-# only take place on one slave.
-
-from buildbot.process.factory import BuildFactory
-from buildbot.steps.source import Git
-from buildbot.steps.shell import ShellCommand
-from buildbot.steps.shell import SetProperty
-from buildbot.steps.transfer import FileUpload
-from buildbot.steps.transfer import FileDownload
-from buildbot.steps.transfer import StringDownload
-from buildbot.steps.master import MasterShellCommand
-from buildbot.process.properties import WithProperties
-
+# what steps, and which workers can execute them. Note that any particular build will
+# only take place on one worker.
+@properties.renderer
def GetDirectorySuffix(props):
- verpat = re.compile('^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
+ verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
if props.hasProperty("release_version"):
m = verpat.match(props["release_version"])
if m is not None:
return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
return ""
+@properties.renderer
def GetNumJobs(props):
- if props.hasProperty("slavename") and props.hasProperty("nproc"):
- return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
+ if props.hasProperty("workername") and props.hasProperty("nproc"):
+ return str(int(props["nproc"]) / max_builds[props["workername"]])
else:
- return 1
+ return "1"
+@properties.renderer
def GetCwd(props):
if props.hasProperty("builddir"):
return props["builddir"]
else:
return "/"
+def IsArchitectureSelected(target):
+ def CheckArchitectureProperty(step):
+ try:
+ options = step.getProperty("options")
+ if type(options) is dict:
+ selected_arch = options.get("architecture", "all")
+ if selected_arch != "all" and selected_arch != target:
+ return False
+ except KeyError:
+ pass
+
+ return True
+
+ return CheckArchitectureProperty
+
def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
try:
seckey = base64.b64decode(seckey)
- except:
+ except Exception:
return None
return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
def IsSharedWorkdir(step):
return bool(step.getProperty("shared_wd"))
+@defer.inlineCallbacks
+def getNewestCompleteTime(bldr):
+ """Returns the complete_at of the latest completed and not SKIPPED
+ build request for this builder, or None if there are no such build
+ requests. We need to filter out SKIPPED requests because we're
+ using collapseRequests=True which is unfortunately marking all
+ previous requests as complete when new buildset is created.
+
+ @returns: datetime instance or None, via Deferred
+ """
+
+ bldrid = yield bldr.getBuilderId()
+ completed = yield bldr.master.data.get(
+ ('builders', bldrid, 'buildrequests'),
+ [
+ resultspec.Filter('complete', 'eq', [True]),
+ resultspec.Filter('results', 'ne', [results.SKIPPED]),
+ ],
+ order=['-complete_at'], limit=1)
+ if not completed:
+ return
+ complete_at = completed[0]['complete_at']
+
+ last_build = yield bldr.master.data.get(
+ ('builds', ),
+ [
+ resultspec.Filter('builderid', 'eq', [bldrid]),
+ ],
+ order=['-started_at'], limit=1)
+
+ if last_build and last_build[0]:
+ last_complete_at = last_build[0]['complete_at']
+ if last_complete_at and (last_complete_at > complete_at):
+ return last_complete_at
+
+ return complete_at
+
+@defer.inlineCallbacks
+def prioritizeBuilders(master, builders):
+ """Returns sorted list of builders by their last timestamp of completed and
+ not skipped build.
+
+ @returns: list of sorted builders
+ """
+
+ def is_building(bldr):
+ return bool(bldr.building) or bool(bldr.old_building)
+
+ def bldr_info(bldr):
+ d = defer.maybeDeferred(getNewestCompleteTime, bldr)
+ d.addCallback(lambda complete_at: (complete_at, bldr))
+ return d
+
+ def bldr_sort(item):
+ (complete_at, bldr) = item
+
+ if not complete_at:
+ date = datetime.min
+ complete_at = date.replace(tzinfo=tzutc())
+
+ if is_building(bldr):
+ date = datetime.max
+ complete_at = date.replace(tzinfo=tzutc())
+
+ return (complete_at, bldr.name)
+
+ results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
+ results.sort(key=bldr_sort)
+
+ for r in results:
+ log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
+
+ return [r[1] for r in results]
+
+c['prioritizeBuilders'] = prioritizeBuilders
c['builders'] = []
-dlLock = locks.SlaveLock("slave_dl")
+dlLock = locks.WorkerLock("worker_dl")
+
+workerNames = [ ]
-slaveNames = [ ]
+for worker in c['workers']:
+ workerNames.append(worker.workername)
-for slave in c['slaves']:
- slaveNames.append(slave.slavename)
+force_factory = BuildFactory()
+
+c['builders'].append(BuilderConfig(
+ name = "00_force_build",
+ workernames = workerNames,
+ factory = force_factory))
for arch in arches:
ts = arch[1].split('/')
doStepIf = IsSharedWorkdir))
# find number of cores
- factory.addStep(SetProperty(
+ factory.addStep(SetPropertyFromCommand(
name = "nproc",
property = "nproc",
description = "Finding number of CPUs",
# prepare workspace
factory.addStep(FileDownload(
mastersrc = scripts_dir + '/cleanup.sh',
- slavedest = "../cleanup.sh",
- mode = 0755))
+ workerdest = "../cleanup.sh",
+ mode = 0o755))
if not persistent:
factory.addStep(ShellCommand(
name = "cleanold",
description = "Cleaning previous builds",
- command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
+ command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
workdir = ".",
haltOnFailure = True,
timeout = 2400))
factory.addStep(ShellCommand(
name = "cleanup",
description = "Cleaning work area",
- command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
+ command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
workdir = ".",
haltOnFailure = True,
timeout = 2400))
elif tree_expire > 0:
factory.addStep(FileDownload(
mastersrc = scripts_dir + '/expire.sh',
- slavedest = "../expire.sh",
- mode = 0755))
+ workerdest = "../expire.sh",
+ mode = 0o755))
factory.addStep(ShellCommand(
name = "expire",
factory.addStep(ShellCommand(
name = "downloadsdk",
description = "Downloading SDK archive",
- command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
+ command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
env={'RSYNC_PASSWORD': rsync_sdk_key},
haltOnFailure = True,
logEnviron = False))
factory.addStep(ShellCommand(
name = "updatesdk",
description = "Updating SDK",
- command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
+ command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
haltOnFailure = True))
factory.addStep(ShellCommand(
factory.addStep(StringDownload(
name = "writeversionmk",
s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
- slavedest = "sdk/getversion.mk",
- mode = 0755))
+ workerdest = "sdk/getversion.mk",
+ mode = 0o755))
- factory.addStep(SetProperty(
+ factory.addStep(SetPropertyFromCommand(
name = "getversion",
property = "release_version",
description = "Finding SDK release version",
factory.addStep(StringDownload(
name = "dlkeybuildpub",
s = UsignSec2Pub(usign_key, usign_comment),
- slavedest = "sdk/key-build.pub",
- mode = 0600))
+ workerdest = "sdk/key-build.pub",
+ mode = 0o600))
factory.addStep(StringDownload(
name = "dlkeybuild",
s = "# fake private key",
- slavedest = "sdk/key-build",
- mode = 0600))
+ workerdest = "sdk/key-build",
+ mode = 0o600))
factory.addStep(StringDownload(
name = "dlkeybuilducert",
s = "# fake certificate",
- slavedest = "sdk/key-build.ucert",
- mode = 0600))
+ workerdest = "sdk/key-build.ucert",
+ mode = 0o600))
factory.addStep(ShellCommand(
name = "mkdldir",
factory.addStep(FileDownload(
mastersrc = scripts_dir + '/ccache.sh',
- slavedest = 'sdk/ccache.sh',
- mode = 0755))
+ workerdest = 'sdk/ccache.sh',
+ mode = 0o755))
factory.addStep(ShellCommand(
name = "prepccache",
factory.addStep(StringDownload(
name = "dlgitclonekey",
s = git_ssh_key,
- slavedest = "../git-clone.key",
- mode = 0600))
+ workerdest = "../git-clone.key",
+ mode = 0o600))
factory.addStep(ShellCommand(
name = "patchfeedsconf",
description = "Updating feeds",
workdir = "build/sdk",
command = ["./scripts/feeds", "update", "-f"],
- env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
+ env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
haltOnFailure = True))
if git_ssh:
description = "Clearing failure logs",
workdir = "build/sdk",
command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
- haltOnFailure = False
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = True,
))
factory.addStep(ShellCommand(
description = "Building packages",
workdir = "build/sdk",
timeout = 3600,
- command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
- env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
+ command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
+ env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
haltOnFailure = True))
factory.addStep(ShellCommand(
))
factory.addStep(FileUpload(
- slavesrc = "sdk/sign.tar.gz",
+ workersrc = "sdk/sign.tar.gz",
masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
haltOnFailure = True
))
factory.addStep(FileDownload(
mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
- slavedest = "sdk/sign.tar.gz",
+ workerdest = "sdk/sign.tar.gz",
haltOnFailure = True
))
name = "uploadprepare",
description = "Preparing package directory",
workdir = "build/sdk",
- command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
+ command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = True,
logEnviron = False
name = "packageupload",
description = "Uploading package files",
workdir = "build/sdk",
- command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
+ command = ["rsync"] + rsync_defopts + ["--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = True,
logEnviron = False
name = "logprepare",
description = "Preparing log directory",
workdir = "build/sdk",
- command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
+ command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = True,
logEnviron = False
description = "Finding failure logs",
workdir = "build/sdk/logs/package/feeds",
command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
- haltOnFailure = False
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = True,
))
factory.addStep(ShellCommand(
name = "logcollect",
description = "Collecting failure logs",
workdir = "build/sdk",
- command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
- haltOnFailure = False
+ command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = True,
))
factory.addStep(ShellCommand(
name = "logupload",
description = "Uploading failure logs",
workdir = "build/sdk",
- command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
+ command = ["rsync"] + rsync_defopts + ["--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = True,
logEnviron = False
))
name = "sourcelist",
description = "Finding source archives to upload",
workdir = "build/sdk",
- command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
+ command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
haltOnFailure = True
))
name = "sourceupload",
description = "Uploading source archives",
workdir = "build/sdk",
- command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
- WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
+ command = ["rsync"] + rsync_defopts + ["--files-from=sourcelist", "--progress", "--checksum", "--delay-updates",
+ Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-az", "dl/", "%s/" %(rsync_src_url)],
env={'RSYNC_PASSWORD': rsync_src_key},
haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = True,
logEnviron = False
))
command=["df", "-h", "."],
env={'LC_ALL': 'C'},
haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
alwaysRun = True
))
- from buildbot.config import BuilderConfig
+ factory.addStep(ShellCommand(
+ name = "du",
+ description = "Reporting estimated file space usage",
+ command=["du", "-sh", "."],
+ env={'LC_ALL': 'C'},
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
+ alwaysRun = True
+ ))
- c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
+ factory.addStep(ShellCommand(
+ name = "ccachestat",
+ description = "Reporting ccache stats",
+ command=["ccache", "-s"],
+ want_stderr = False,
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
+ alwaysRun = True,
+ ))
+
+ c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
+ c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
+ force_factory.addStep(steps.Trigger(
+ name = "trigger_%s" % arch[0],
+ description = "Triggering %s build" % arch[0],
+ schedulerNames = [ "trigger_%s" % arch[0] ],
+ set_properties = { "reason": Property("reason") },
+ doStepIf = IsArchitectureSelected(arch[0])
+ ))
####### STATUS arches
# pushed to these arches. buildbot/status/*.py has a variety to choose from,
# including web pages, email senders, and IRC bots.
-c['status'] = []
-
-from buildbot.status import html
-from buildbot.status.web import authz, auth
-
if ini.has_option("phase2", "status_bind"):
+ c['www'] = {
+ 'port': ini.get("phase2", "status_bind"),
+ 'plugins': {
+ 'waterfall_view': True,
+ 'console_view': True,
+ 'grid_view': True
+ }
+ }
+
if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
- authz_cfg=authz.Authz(
- # change any of these to True to enable; see the manual for more
- # options
- auth=auth.BasicAuth([(ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))]),
- gracefulShutdown = 'auth',
- forceBuild = 'auth', # use this to test your slave once it is set up
- forceAllBuilds = 'auth',
- pingBuilder = False,
- stopBuild = 'auth',
- stopAllBuilds = 'auth',
- cancelPendingBuild = 'auth',
+ c['www']['auth'] = util.UserPasswordAuth([
+ (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
+ ])
+ c['www']['authz'] = util.Authz(
+ allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
+ roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
)
- c['status'].append(html.WebStatus(http_port=ini.get("phase2", "status_bind"), authz=authz_cfg))
- else:
- c['status'].append(html.WebStatus(http_port=ini.get("phase2", "status_bind")))
####### PROJECT IDENTITY
# this at its default for all but the largest installations.
'db_url' : "sqlite:///state.sqlite",
}
+
+c['buildbotNetUsageData'] = None