import os
import re
+import base64
import subprocess
-import ConfigParser
+import configparser
+
+from dateutil.tz import tzutc
+from datetime import datetime, timedelta
+
+from twisted.internet import defer
+from twisted.python import log
from buildbot import locks
+from buildbot.data import resultspec
+from buildbot.changes import filter
+from buildbot.changes.gitpoller import GitPoller
+from buildbot.config import BuilderConfig
+from buildbot.plugins import reporters
+from buildbot.plugins import schedulers
+from buildbot.plugins import steps
+from buildbot.plugins import util
+from buildbot.process import properties
+from buildbot.process import results
+from buildbot.process.factory import BuildFactory
+from buildbot.process.properties import Interpolate
+from buildbot.process.properties import Property
+from buildbot.schedulers.basic import SingleBranchScheduler
+from buildbot.schedulers.forcesched import BaseParameter
+from buildbot.schedulers.forcesched import ForceScheduler
+from buildbot.schedulers.forcesched import ValidationError
+from buildbot.steps.master import MasterShellCommand, SetProperty
+from buildbot.steps.shell import SetPropertyFromCommand
+from buildbot.steps.shell import ShellCommand
+from buildbot.steps.source.git import Git
+from buildbot.steps.transfer import FileDownload
+from buildbot.steps.transfer import FileUpload
+from buildbot.steps.transfer import StringDownload
+from buildbot.worker import Worker
+
+
+if not os.path.exists("twistd.pid"):
+ with open("twistd.pid", "w") as pidfile:
+ pidfile.write("{}".format(os.getpid()))
# This is a sample buildmaster config file. It must be installed as
# 'master.cfg' in your buildmaster's base directory.
-ini = ConfigParser.ConfigParser()
-ini.read("./config.ini")
+ini = configparser.ConfigParser()
+ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
+
+if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
+ raise ValueError("Fix your configuration")
+
+inip1 = ini['phase1']
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
# installation's html.WebStatus home page (linked to the
# 'titleURL') and is embedded in the title of the waterfall HTML page.
-c['title'] = ini.get("general", "title")
-c['titleURL'] = ini.get("general", "title_url")
+c['title'] = ini['general'].get("title")
+c['titleURL'] = ini['general'].get("title_url")
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server (usually the html.WebStatus page) is visible. This
# with an externally-visible host name which the buildbot cannot figure out
# without some help.
-c['buildbotURL'] = ini.get("general", "buildbot_url")
+c['buildbotURL'] = inip1.get("buildbot_url")
-####### BUILDSLAVES
+####### BUILDWORKERS
-# The 'slaves' list defines the set of recognized buildslaves. Each element is
-# a BuildSlave object, specifying a unique slave name and password. The same
-# slave name and password must be configured on the slave.
-from buildbot.buildslave import BuildSlave
+# The 'workers' list defines the set of recognized buildworkers. Each element is
+# a Worker object, specifying a unique worker name and password. The same
+# worker name and password must be configured on the worker.
-slave_port = 9989
-
-if ini.has_option("general", "port"):
- slave_port = ini.getint("general", "port")
-
-c['slaves'] = []
-max_builds = dict()
-do_cleanup = dict()
+c['workers'] = []
NetLocks = dict()
for section in ini.sections():
- if section.startswith("slave "):
- if ini.has_option(section, "name") and ini.has_option(section, "password"):
- sl_props = { 'dl_lock':None, 'ul_lock':None, }
+ if section.startswith("worker "):
+ if ini.has_option(section, "name") and ini.has_option(section, "password") and \
+ (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
+ sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'shared_wd':True }
name = ini.get(section, "name")
password = ini.get(section, "password")
- max_builds[name] = 1
- do_cleanup[name] = False
- if ini.has_option(section, "builds"):
- max_builds[name] = ini.getint(section, "builds")
if ini.has_option(section, "cleanup"):
- do_cleanup[name] = ini.getboolean(section, "cleanup")
+ sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
if ini.has_option(section, "dl_lock"):
lockname = ini.get(section, "dl_lock")
sl_props['dl_lock'] = lockname
sl_props['ul_lock'] = lockname
if lockname not in NetLocks:
NetLocks[lockname] = locks.MasterLock(lockname)
- c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name], properties = sl_props))
+ if ini.has_option(section, "shared_wd"):
+ shared_wd = ini.getboolean(section, "shared_wd")
+ sl_props['shared_wd'] = shared_wd
+ c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
-# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
-# This must match the value configured into the buildslaves (with their
-# --master option)
-c['slavePortnum'] = slave_port
+# PB port can be either a numeric port or a connection string
+pb_port = inip1.get("port") or 9989
+c['protocols'] = {'pb': {'port': pb_port}}
# coalesce builds
-c['mergeRequests'] = True
+c['collapseRequests'] = True
# Reduce amount of backlog data
-c['buildHorizon'] = 30
-c['logHorizon'] = 20
+c['configurators'] = [util.JanitorConfigurator(
+ logHorizon=timedelta(days=3),
+ hour=6,
+)]
-####### CHANGESOURCES
+@defer.inlineCallbacks
+def getNewestCompleteTime(bldr):
+ """Returns the complete_at of the latest completed and not SKIPPED
+ build request for this builder, or None if there are no such build
+ requests. We need to filter out SKIPPED requests because we're
+ using collapseRequests=True which is unfortunately marking all
+ previous requests as complete when new buildset is created.
+
+ @returns: datetime instance or None, via Deferred
+ """
+
+ bldrid = yield bldr.getBuilderId()
+ completed = yield bldr.master.data.get(
+ ('builders', bldrid, 'buildrequests'),
+ [
+ resultspec.Filter('complete', 'eq', [True]),
+ resultspec.Filter('results', 'ne', [results.SKIPPED]),
+ ],
+ order=['-complete_at'], limit=1)
+ if not completed:
+ return
+
+ complete_at = completed[0]['complete_at']
+
+ last_build = yield bldr.master.data.get(
+ ('builds', ),
+ [
+ resultspec.Filter('builderid', 'eq', [bldrid]),
+ ],
+ order=['-started_at'], limit=1)
+
+ if last_build and last_build[0]:
+ last_complete_at = last_build[0]['complete_at']
+ if last_complete_at and (last_complete_at > complete_at):
+ return last_complete_at
+
+ return complete_at
+
+@defer.inlineCallbacks
+def prioritizeBuilders(master, builders):
+ """Returns sorted list of builders by their last timestamp of completed and
+ not skipped build.
+
+ @returns: list of sorted builders
+ """
+
+ def is_building(bldr):
+ return bool(bldr.building) or bool(bldr.old_building)
+
+ def bldr_info(bldr):
+ d = defer.maybeDeferred(getNewestCompleteTime, bldr)
+ d.addCallback(lambda complete_at: (complete_at, bldr))
+ return d
+
+ def bldr_sort(item):
+ (complete_at, bldr) = item
+
+ if not complete_at:
+ date = datetime.min
+ complete_at = date.replace(tzinfo=tzutc())
+
+ if is_building(bldr):
+ date = datetime.max
+ complete_at = date.replace(tzinfo=tzutc())
-home_dir = os.path.abspath(ini.get("general", "homedir"))
-tree_expire = 0
-other_builds = 0
-cc_version = None
+ return (complete_at, bldr.name)
-cc_command = "gcc"
-cxx_command = "g++"
+ results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
+ results.sort(key=bldr_sort)
-if ini.has_option("general", "expire"):
- tree_expire = ini.getint("general", "expire")
+ for r in results:
+ log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
-if ini.has_option("general", "other_builds"):
- other_builds = ini.getint("general", "other_builds")
+ return [r[1] for r in results]
-if ini.has_option("general", "cc_version"):
- cc_version = ini.get("general", "cc_version").split()
- if len(cc_version) == 1:
- cc_version = ["eq", cc_version[0]]
+c['prioritizeBuilders'] = prioritizeBuilders
-repo_url = ini.get("repo", "url")
-repo_branch = "master"
+####### CHANGESOURCES
-if ini.has_option("repo", "branch"):
- repo_branch = ini.get("repo", "branch")
+work_dir = os.path.abspath(ini['general'].get("workdir", "."))
+scripts_dir = os.path.abspath("../scripts")
-rsync_bin_url = ini.get("rsync", "binary_url")
-rsync_bin_key = ini.get("rsync", "binary_password")
+tree_expire = inip1.getint("expire", 0)
+config_seed = inip1.get("config_seed", "")
-rsync_src_url = None
-rsync_src_key = None
+repo_url = ini['repo'].get("url")
+repo_branch = ini['repo'].get("branch", "master")
-if ini.has_option("rsync", "source_url"):
- rsync_src_url = ini.get("rsync", "source_url")
- rsync_src_key = ini.get("rsync", "source_password")
+rsync_bin_url = ini['rsync'].get("binary_url")
+rsync_bin_key = ini['rsync'].get("binary_password")
+rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
-gpg_home = "~/.gnupg"
-gpg_keyid = None
-gpg_comment = "Unattended build signature"
-gpg_passfile = "/dev/null"
+if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
+ rsync_bin_defopts += ["--contimeout=20"]
-if ini.has_option("gpg", "home"):
- gpg_home = ini.get("gpg", "home")
+rsync_src_url = ini['rsync'].get("source_url")
+rsync_src_key = ini['rsync'].get("source_password")
+rsync_src_defopts = ["-v", "-4", "--timeout=120"]
-if ini.has_option("gpg", "keyid"):
- gpg_keyid = ini.get("gpg", "keyid")
+if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
+ rsync_src_defopts += ["--contimeout=20"]
-if ini.has_option("gpg", "comment"):
- gpg_comment = ini.get("gpg", "comment")
+usign_key = None
+usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
-if ini.has_option("gpg", "passfile"):
- gpg_passfile = ini.get("gpg", "passfile")
+if ini.has_section("usign"):
+ usign_key = ini['usign'].get("key")
+ usign_comment = ini['usign'].get("comment", usign_comment)
-enable_kmod_archive = True
+enable_kmod_archive = inip1.getboolean("kmod_archive", False)
# find targets
targets = [ ]
-if not os.path.isdir(home_dir+'/source.git'):
- subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
+if not os.path.isdir(work_dir+'/source.git'):
+ subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
else:
- subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
+ subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
-findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
- stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
+os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
+findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
+ stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
while True:
line = findtargets.stdout.readline()
if not line:
break
- ta = line.strip().split(' ')
+ ta = line.decode().strip().split(' ')
targets.append(ta[0])
# the 'change_source' setting tells the buildmaster how it should find out
# about source code changes. Here we point to the buildbot clone of pyflakes.
-from buildbot.changes.gitpoller import GitPoller
c['change_source'] = []
c['change_source'].append(GitPoller(
repo_url,
- workdir=home_dir+'/work.git', branch=repo_branch,
+ workdir=work_dir+'/work.git', branch=repo_branch,
pollinterval=300))
####### SCHEDULERS
# Configure the Schedulers, which decide how to react to incoming changes. In this
# case, just kick off a 'basebuild' build
-from buildbot.schedulers.basic import SingleBranchScheduler
-from buildbot.schedulers.forcesched import ForceScheduler
-from buildbot.changes import filter
+class TagChoiceParameter(BaseParameter):
+ spec_attributes = ["strict", "choices"]
+ type = "list"
+ strict = True
+
+ def __init__(self, name, label=None, **kw):
+ super().__init__(name, label, **kw)
+ self._choice_list = []
+
+ @property
+ def choices(self):
+ taglist = []
+ basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
+
+ if basever:
+ findtags = subprocess.Popen(
+ ['git', 'ls-remote', '--tags', repo_url],
+ stdout = subprocess.PIPE)
+
+ while True:
+ line = findtags.stdout.readline()
+
+ if not line:
+ break
+
+ tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
+
+ if tagver and tagver[1].find(basever[1]) == 0:
+ taglist.append(tagver[1])
+
+ taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
+ taglist.insert(0, '')
+
+ self._choice_list = taglist
+
+ return self._choice_list
+
+ def parse_from_arg(self, s):
+ if self.strict and s not in self._choice_list:
+ raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
+ return s
+
c['schedulers'] = []
c['schedulers'].append(SingleBranchScheduler(
- name="all",
- change_filter=filter.ChangeFilter(branch=repo_branch),
- treeStableTimer=60,
- builderNames=targets))
+ name = "all",
+ change_filter = filter.ChangeFilter(branch=repo_branch),
+ treeStableTimer = 60,
+ builderNames = targets))
c['schedulers'].append(ForceScheduler(
- name="force",
- builderNames=targets))
+ name = "force",
+ buttonName = "Force builds",
+ label = "Force build details",
+ builderNames = [ "00_force_build" ],
+
+ codebases = [
+ util.CodebaseParameter(
+ "",
+ label = "Repository",
+ branch = util.FixedParameter(name = "branch", default = ""),
+ revision = util.FixedParameter(name = "revision", default = ""),
+ repository = util.FixedParameter(name = "repository", default = ""),
+ project = util.FixedParameter(name = "project", default = "")
+ )
+ ],
+
+ reason = util.StringParameter(
+ name = "reason",
+ label = "Reason",
+ default = "Trigger build",
+ required = True,
+ size = 80
+ ),
+
+ properties = [
+ util.NestedParameter(
+ name="options",
+ label="Build Options",
+ layout="vertical",
+ fields=[
+ util.ChoiceStringParameter(
+ name = "target",
+ label = "Build target",
+ default = "all",
+ choices = [ "all" ] + targets
+ ),
+ TagChoiceParameter(
+ name = "tag",
+ label = "Build tag",
+ default = ""
+ )
+ ]
+ )
+ ]
+))
####### BUILDERS
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
-# what steps, and which slaves can execute them. Note that any particular build will
-# only take place on one slave.
-
-from buildbot.process.factory import BuildFactory
-from buildbot.steps.source.git import Git
-from buildbot.steps.shell import ShellCommand
-from buildbot.steps.shell import SetProperty
-from buildbot.steps.transfer import FileUpload
-from buildbot.steps.transfer import FileDownload
-from buildbot.steps.master import MasterShellCommand
-from buildbot.process.properties import WithProperties
-from buildbot.process import properties
-
-
-CleanTargetMap = [
- [ "tools", "tools/clean" ],
- [ "chain", "toolchain/clean" ],
- [ "linux", "target/linux/clean" ],
- [ "dir", "dirclean" ],
- [ "dist", "distclean" ]
-]
+# what steps, and which workers can execute them. Note that any particular build will
+# only take place on one worker.
-def IsMakeCleanRequested(pattern):
- def CheckCleanProperty(step):
- val = step.getProperty("clean")
- if val and re.match(pattern, val):
- return True
- else:
- return False
-
- return CheckCleanProperty
+def IsSharedWorkdir(step):
+ return bool(step.getProperty("shared_wd"))
def IsCleanupRequested(step):
- val = step.getProperty("slavename")
- if val and do_cleanup[val]:
+ if IsSharedWorkdir(step):
+ return False
+ do_cleanup = step.getProperty("do_cleanup")
+ if do_cleanup:
return True
else:
return False
def IsExpireRequested(step):
- return not IsCleanupRequested(step)
+ if IsSharedWorkdir(step):
+ return False
+ else:
+ return not IsCleanupRequested(step)
def IsTaggingRequested(step):
val = step.getProperty("tag")
- if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
+ if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
return True
else:
return False
-def IsNoTaggingRequested(step):
- return not IsTaggingRequested(step)
-
def IsNoMasterBuild(step):
return repo_branch != "master"
-def IsCleanupConfigured(step):
- slave = step.getProperty("slavename")
- if slave and slave in do_cleanup:
- return do_cleanup[slave] > 0
- else:
- return False
-
-def GetBaseVersion(props):
- if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
+def GetBaseVersion():
+ if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
return repo_branch.split('-')[1]
else:
return "master"
+@properties.renderer
def GetVersionPrefix(props):
- basever = GetBaseVersion(props)
- if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
+ basever = GetBaseVersion()
+ if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
return "%s/" % props["tag"]
elif basever != "master":
return "%s-SNAPSHOT/" % basever
else:
return ""
-def GetNumJobs(props):
- if props.hasProperty("slavename") and props.hasProperty("nproc"):
- return (int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds))
- else:
- return 1
-
-def GetCC(props):
- if props.hasProperty("cc_command"):
- return props["cc_command"]
- else:
- return "gcc"
-
-def GetCXX(props):
- if props.hasProperty("cxx_command"):
- return props["cxx_command"]
- else:
- return "g++"
-
-def GetCwd(props):
- if props.hasProperty("builddir"):
- return props["builddir"]
- elif props.hasProperty("workdir"):
- return props["workdir"]
- else:
- return "/"
-
def GetNextBuild(builder, requests):
for r in requests:
if r.properties and r.properties.hasProperty("tag"):
return r
- return requests[0]
-def MakeEnv(overrides=None):
+ r = requests[0]
+ log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
+ return r
+
+def MakeEnv(overrides=None, tryccache=False):
env = {
- 'CC': WithProperties("%(cc)s", cc=GetCC),
- 'CXX': WithProperties("%(cxx)s", cxx=GetCXX),
- 'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)
+ 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
+ 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
}
+ if tryccache:
+ env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
+ env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
+ env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
+ else:
+ env['CC'] = env['CCC']
+ env['CXX'] = env['CCXX']
+ env['CCACHE'] = ''
if overrides is not None:
env.update(overrides)
return env
else:
return []
+@util.renderer
+def TagPropertyValue(props):
+ if props.hasProperty("options"):
+ options = props.getProperty("options")
+ if type(options) is dict:
+ return options.get("tag")
+ return None
+
+def IsTargetSelected(target):
+ def CheckTargetProperty(step):
+ try:
+ options = step.getProperty("options")
+ if type(options) is dict:
+ selected_target = options.get("target", "all")
+ if selected_target != "all" and selected_target != target:
+ return False
+ except KeyError:
+ pass
+
+ return True
+
+ return CheckTargetProperty
+
+def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
+ try:
+ seckey = base64.b64decode(seckey)
+ except:
+ return None
+
+ return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
+ base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
+
+
c['builders'] = []
-dlLock = locks.SlaveLock("slave_dl")
-
-checkBuiltin = re.sub('[\t\n ]+', ' ', """
- checkBuiltin() {
- local symbol op path file;
- for file in $CHANGED_FILES; do
- case "$file" in
- package/*/*) : ;;
- *) return 0 ;;
- esac;
- done;
- while read symbol op path; do
- case "$symbol" in package-*)
- symbol="${symbol##*(}";
- symbol="${symbol%)}";
- for file in $CHANGED_FILES; do
- case "$file" in "package/$path/"*)
- grep -qsx "$symbol=y" .config && return 0
- ;; esac;
- done;
- esac;
- done < tmp/.packagedeps;
- return 1;
- }
-""").strip()
-
-
-class IfBuiltinShellCommand(ShellCommand):
- def _quote(self, str):
- if re.search("[^a-zA-Z0-9/_.-]", str):
- return "'%s'" %(re.sub("'", "'\"'\"'", str))
- return str
-
- def setCommand(self, command):
- if not isinstance(command, (str, unicode)):
- command = ' '.join(map(self._quote, command))
- self.command = [
- '/bin/sh', '-c',
- '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
- ]
-
- def setupEnvironment(self, cmd):
- slaveEnv = self.slaveEnvironment
- if slaveEnv is None:
- slaveEnv = { }
- changedFiles = { }
- for request in self.build.requests:
- for source in request.sources:
- for change in source.changes:
- for file in change.files:
- changedFiles[file] = True
- fullSlaveEnv = slaveEnv.copy()
- fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
- cmd.args['env'] = fullSlaveEnv
-
-slaveNames = [ ]
-
-for slave in c['slaves']:
- slaveNames.append(slave.slavename)
+dlLock = locks.WorkerLock("worker_dl")
+
+workerNames = [ ]
+
+for worker in c['workers']:
+ workerNames.append(worker.workername)
+
+force_factory = BuildFactory()
+
+c['builders'].append(BuilderConfig(
+ name = "00_force_build",
+ workernames = workerNames,
+ factory = force_factory))
for target in targets:
ts = target.split('/')
factory = BuildFactory()
+ # setup shared work directory if required
+ factory.addStep(ShellCommand(
+ name = "sharedwd",
+ description = "Setting up shared work directory",
+ command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
+ workdir = ".",
+ haltOnFailure = True,
+ doStepIf = IsSharedWorkdir))
+
# find number of cores
- factory.addStep(SetProperty(
+ factory.addStep(SetPropertyFromCommand(
name = "nproc",
property = "nproc",
description = "Finding number of CPUs",
command = ["nproc"]))
+ # set number of jobs
+ factory.addStep(SetProperty(
+ name = "njobs",
+ property = "njobs",
+ description = "Set max concurrency",
+ value = Interpolate("%(prop:nproc:-1)s")))
+
# find gcc and g++ compilers
- if cc_version is not None:
- factory.addStep(FileDownload(
- mastersrc = "findbin.pl",
- slavedest = "../findbin.pl",
- mode = 0755))
-
- factory.addStep(SetProperty(
- name = "gcc",
- property = "cc_command",
- description = "Finding gcc command",
- command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
- haltOnFailure = True))
-
- factory.addStep(SetProperty(
- name = "g++",
- property = "cxx_command",
- description = "Finding g++ command",
- command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
- haltOnFailure = True))
+ factory.addStep(FileDownload(
+ name = "dlfindbinpl",
+ mastersrc = scripts_dir + '/findbin.pl',
+ workerdest = "../findbin.pl",
+ mode = 0o755))
+
+ factory.addStep(SetPropertyFromCommand(
+ name = "gcc",
+ property = "cc_command",
+ description = "Finding gcc command",
+ command = [
+ "../findbin.pl", "gcc", "", "",
+ ],
+ haltOnFailure = True))
+
+ factory.addStep(SetPropertyFromCommand(
+ name = "g++",
+ property = "cxx_command",
+ description = "Finding g++ command",
+ command = [
+ "../findbin.pl", "g++", "", "",
+ ],
+ haltOnFailure = True))
+
+ # see if ccache is available
+ factory.addStep(SetPropertyFromCommand(
+ property = "ccache_command",
+ command = ["which", "ccache"],
+ description = "Testing for ccache command",
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
+ ))
# expire tree if needed
if tree_expire > 0:
factory.addStep(FileDownload(
+ name = "dlexpiresh",
doStepIf = IsExpireRequested,
- mastersrc = "expire.sh",
- slavedest = "../expire.sh",
- mode = 0755))
+ mastersrc = scripts_dir + '/expire.sh',
+ workerdest = "../expire.sh",
+ mode = 0o755))
factory.addStep(ShellCommand(
name = "expire",
# cleanup.sh if needed
factory.addStep(FileDownload(
- mastersrc = "cleanup.sh",
- slavedest = "../cleanup.sh",
- mode = 0755,
+ name = "dlcleanupsh",
+ mastersrc = scripts_dir + '/cleanup.sh',
+ workerdest = "../cleanup.sh",
+ mode = 0o755,
doStepIf = IsCleanupRequested))
factory.addStep(ShellCommand(
name = "cleanold",
description = "Cleaning previous builds",
- command = ["./cleanup.sh", c['buildbotURL'], WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
+ command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
workdir = ".",
haltOnFailure = True,
doStepIf = IsCleanupRequested,
factory.addStep(ShellCommand(
name = "cleanup",
description = "Cleaning work area",
- command = ["./cleanup.sh", c['buildbotURL'], WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
+ command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
workdir = ".",
haltOnFailure = True,
doStepIf = IsCleanupRequested,
timeout = 2400))
- # user-requested clean targets
- for tuple in CleanTargetMap:
- factory.addStep(ShellCommand(
- name = tuple[1],
- description = 'User-requested "make %s"' % tuple[1],
- command = ["make", tuple[1], "V=s"],
- env = MakeEnv(),
- doStepIf = IsMakeCleanRequested(tuple[0])
- ))
-
- # switch to branch
+ # Workaround bug when switching from a checked out tag back to a branch
+ # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
factory.addStep(ShellCommand(
- name = "switchbranch",
- description = "Checking out Git branch",
- command = "if [ -d .git ]; then git fetch && git checkout '%s'; else exit 0; fi" % repo_branch,
- haltOnFailure = True,
- doStepIf = IsNoTaggingRequested,
- locks = NetLockDl,
- ))
+ name = "gitcheckout",
+ description = "Ensure that Git HEAD is sane",
+ command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
+ haltOnFailure = True))
# check out the source
+ # Git() runs:
+ # if repo doesn't exist: 'git clone repourl'
+ # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
+ # 'git fetch -t repourl branch; git reset --hard revision'
factory.addStep(Git(
+ name = "git",
repourl = repo_url,
branch = repo_branch,
- mode = 'incremental',
- method = 'clean',
+ mode = 'full',
+ method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
locks = NetLockDl,
+ haltOnFailure = True,
))
# update remote refs
haltOnFailure = True
))
- # fetch tags
- factory.addStep(ShellCommand(
- name = "fetchtag",
- description = "Fetching Git tags",
- command = ["git", "fetch", "--tags", "--", repo_url],
- haltOnFailure = True,
- doStepIf = IsTaggingRequested,
- locks = NetLockDl,
- ))
-
# switch to tag
factory.addStep(ShellCommand(
name = "switchtag",
description = "Checking out Git tag",
- command = ["git", "checkout", WithProperties("tags/v%(tag:-)s")],
+ command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
haltOnFailure = True,
doStepIf = IsTaggingRequested
))
+ # Verify that Git HEAD points to a tag or branch
+ # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
+ factory.addStep(ShellCommand(
+ name = "gitverify",
+ description = "Ensure that Git HEAD is pointing to a branch or tag",
+ command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
+ haltOnFailure = True))
+
factory.addStep(ShellCommand(
name = "rmtmp",
description = "Remove tmp folder",
command=["rm", "-rf", "tmp/"]))
# feed
-# factory.addStep(ShellCommand(
-# name = "feedsconf",
-# description = "Copy the feeds.conf",
-# command='''cp ~/feeds.conf ./feeds.conf''' ))
-
- # feed
factory.addStep(ShellCommand(
name = "rmfeedlinks",
description = "Remove feed symlinks",
command=["rm", "-rf", "package/feeds/"]))
+ factory.addStep(StringDownload(
+ name = "ccachecc",
+ s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
+ workerdest = "../ccache_cc.sh",
+ mode = 0o755,
+ ))
+
+ factory.addStep(StringDownload(
+ name = "ccachecxx",
+ s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
+ workerdest = "../ccache_cxx.sh",
+ mode = 0o755,
+ ))
+
# feed
factory.addStep(ShellCommand(
name = "updatefeeds",
description = "Updating feeds",
command=["./scripts/feeds", "update"],
- env = MakeEnv(),
+ env = MakeEnv(tryccache=True),
+ haltOnFailure = True,
locks = NetLockDl,
))
name = "installfeeds",
description = "Installing feeds",
command=["./scripts/feeds", "install", "-a"],
- env = MakeEnv()))
+ env = MakeEnv(tryccache=True),
+ haltOnFailure = True
+ ))
# seed config
- factory.addStep(FileDownload(
- mastersrc = "config.seed",
- slavedest = ".config",
- mode = 0644
- ))
+ if config_seed is not None:
+ factory.addStep(StringDownload(
+ name = "dlconfigseed",
+ s = config_seed + '\n',
+ workerdest = ".config",
+ mode = 0o644
+ ))
# configure
factory.addStep(ShellCommand(
name = "newconfig",
description = "Seeding .config",
- command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
+ command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
))
factory.addStep(ShellCommand(
))
# find libc suffix
- factory.addStep(SetProperty(
+ factory.addStep(SetPropertyFromCommand(
name = "libc",
property = "libc",
description = "Finding libc suffix",
command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
- # ccache helper
- factory.addStep(FileDownload(
- mastersrc = "ccache.sh",
- slavedest = "ccache.sh",
- mode = 0755
- ))
+ # install build key
+ if usign_key is not None:
+ factory.addStep(StringDownload(
+ name = "dlkeybuildpub",
+ s = UsignSec2Pub(usign_key, usign_comment),
+ workerdest = "key-build.pub",
+ mode = 0o600,
+ ))
- # ccache prepare
- factory.addStep(ShellCommand(
- name = "prepccache",
- description = "Preparing ccache",
- command = ["./ccache.sh"]
- ))
+ factory.addStep(StringDownload(
+ name = "dlkeybuild",
+ s = "# fake private key",
+ workerdest = "key-build",
+ mode = 0o600,
+ ))
- # install build key
- factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
- factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
+ factory.addStep(StringDownload(
+ name = "dlkeybuilducert",
+ s = "# fake certificate",
+ workerdest = "key-build.ucert",
+ mode = 0o600,
+ ))
# prepare dl
factory.addStep(ShellCommand(
factory.addStep(ShellCommand(
name = "dltar",
description = "Building and installing GNU tar",
- command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
- env = MakeEnv(),
+ command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/tar/compile", "V=s"],
+ env = MakeEnv(tryccache=True),
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "dlrun",
description = "Populating dl/",
- command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "download", "V=s"],
+ command = ["make", Interpolate("-j%(prop:njobs)s"), "download", "V=s"],
env = MakeEnv(),
logEnviron = False,
locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
factory.addStep(ShellCommand(
name = "tools",
description = "Building and installing tools",
- command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "tools/install", "V=s"],
- env = MakeEnv(),
+ command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/install", "V=s"],
+ env = MakeEnv(tryccache=True),
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "toolchain",
description = "Building and installing toolchain",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "toolchain/install", "V=s"],
+ command=["make", Interpolate("-j%(prop:njobs)s"), "toolchain/install", "V=s"],
env = MakeEnv(),
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "kmods",
description = "Building kmods",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
+ command=["make", Interpolate("-j%(prop:njobs)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
env = MakeEnv(),
- #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
haltOnFailure = True
))
# find kernel version
- factory.addStep(SetProperty(
+ factory.addStep(SetPropertyFromCommand(
name = "kernelversion",
property = "kernelversion",
description = "Finding the effective Kernel version",
command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
- env = { 'TOPDIR': WithProperties("%(cwd)s/build", cwd=GetCwd) }
+ env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
))
factory.addStep(ShellCommand(
factory.addStep(ShellCommand(
name = "pkgbuild",
description = "Building packages",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
+ command=["make", Interpolate("-j%(prop:njobs)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
env = MakeEnv(),
- #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
haltOnFailure = True
))
- # factory.addStep(IfBuiltinShellCommand(
factory.addStep(ShellCommand(
name = "pkginstall",
description = "Installing packages",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/install", "V=s"],
+ command=["make", Interpolate("-j%(prop:njobs)s"), "package/install", "V=s"],
env = MakeEnv(),
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "pkgindex",
description = "Indexing packages",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/index", "V=s"],
+ command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
+ env = MakeEnv(),
+ haltOnFailure = True
+ ))
+
+ factory.addStep(ShellCommand(
+ name = "images",
+ description = "Building and installing images",
+ command=["make", Interpolate("-j%(prop:njobs)s"), "target/install", "V=s"],
+ env = MakeEnv(),
+ haltOnFailure = True
+ ))
+
+ factory.addStep(ShellCommand(
+ name = "buildinfo",
+ description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
+ command = "make -j1 buildinfo V=s || true",
+ env = MakeEnv(),
+ haltOnFailure = True
+ ))
+
+ factory.addStep(ShellCommand(
+ name = "json_overview_image_info",
+ description = "Generate profiles.json in target folder",
+ command = "make -j1 json_overview_image_info V=s || true",
+ env = MakeEnv(),
+ haltOnFailure = True
+ ))
+
+ factory.addStep(ShellCommand(
+ name = "checksums",
+ description = "Calculating checksums",
+ command=["make", "-j1", "checksum", "V=s"],
env = MakeEnv(),
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "kmoddir",
description = "Creating kmod directory",
- command=["mkdir", "-p", WithProperties("bin/targets/%s/%s%%(libc)s/kmods/%%(kernelversion)s" %(ts[0], ts[1]))],
+ command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
haltOnFailure = True
))
name = "kmodprepare",
description = "Preparing kmod archive",
command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
- WithProperties("bin/targets/%s/%s%%(libc)s/packages/" %(ts[0], ts[1])),
- WithProperties("bin/targets/%s/%s%%(libc)s/kmods/%%(kernelversion)s/" %(ts[0], ts[1]))],
+ Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
+ Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "kmodindex",
description = "Indexing kmod archive",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/index", "V=s",
- WithProperties("PACKAGE_SUBDIRS=bin/targets/%s/%s%%(libc)s/kmods/%%(kernelversion)s/" %(ts[0], ts[1]))],
+ command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
+ Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
env = MakeEnv(),
haltOnFailure = True
))
- # find rootfs staging directory
- factory.addStep(SetProperty(
- name = "stageroot",
- property = "stageroot",
- description = "Finding the rootfs staging directory",
- command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
- env = { 'TOPDIR': WithProperties("%(cwd)s/build", cwd=GetCwd) }
- ))
-
- factory.addStep(ShellCommand(
- name = "filesdir",
- description = "Creating file overlay directory",
- command=["mkdir", "-p", "files/etc/opkg"],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "kmodconfig",
- description = "Embedding kmod repository configuration",
- command=WithProperties("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(kernelversion)s#' " +
- "%(stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
- haltOnFailure = True
- ))
-
- #factory.addStep(IfBuiltinShellCommand(
- factory.addStep(ShellCommand(
- name = "images",
- description = "Building and installing images",
- command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "target/install", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "diffconfig",
- description = "Generating config.seed",
- command=["make", "-j1", "diffconfig", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "checksums",
- description = "Calculating checksums",
- command=["make", "-j1", "checksum", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
# sign
- if gpg_keyid is not None:
+ if ini.has_option("gpg", "key") or usign_key is not None:
factory.addStep(MasterShellCommand(
name = "signprepare",
description = "Preparing temporary signing directory",
- command = ["mkdir", "-p", "%s/signing" %(home_dir)],
+ command = ["mkdir", "-p", "%s/signing" %(work_dir)],
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "signpack",
description = "Packing files to sign",
- command = WithProperties("find bin/targets/%s/%s%%(libc)s/ bin/targets/%s/%s%%(libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(ts[0], ts[1], ts[0], ts[1])),
+ command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
haltOnFailure = True
))
factory.addStep(FileUpload(
- slavesrc = "sign.tar.gz",
- masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
+ workersrc = "sign.tar.gz",
+ masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
haltOnFailure = True
))
factory.addStep(MasterShellCommand(
name = "signfiles",
description = "Signing files",
- command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
- env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
+ command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
+ env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
haltOnFailure = True
))
factory.addStep(FileDownload(
- mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
- slavedest = "sign.tar.gz",
+ name = "dlsigntargz",
+ mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
+ workerdest = "sign.tar.gz",
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "dirprepare",
description = "Preparing upload directory structure",
- command = ["mkdir", "-p", WithProperties("tmp/upload/%%(prefix)stargets/%s/%s" %(ts[0], ts[1]), prefix=GetVersionPrefix)],
+ command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "linkprepare",
description = "Preparing repository symlink",
- command = ["ln", "-s", "-f", WithProperties("../packages-%(basever)s", basever=GetBaseVersion), WithProperties("tmp/upload/%(prefix)spackages", prefix=GetVersionPrefix)],
+ command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
doStepIf = IsNoMasterBuild,
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "kmoddirprepare",
description = "Preparing kmod archive upload directory",
- command = ["mkdir", "-p", WithProperties("tmp/upload/%%(prefix)stargets/%s/%s/kmods/%%(kernelversion)s" %(ts[0], ts[1]), prefix=GetVersionPrefix)],
+ command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
haltOnFailure = True
))
factory.addStep(ShellCommand(
name = "dirupload",
description = "Uploading directory structure",
- command = ["rsync", "-4", "--info=name", "-az", "tmp/upload/", "%s/" %(rsync_bin_url)],
+ command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = True,
logEnviron = False,
locks = NetLockUl,
))
+ # download remote sha256sums to 'target-sha256sums'
+ factory.addStep(ShellCommand(
+ name = "target-sha256sums",
+ description = "Fetching remote sha256sums for target",
+ command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
+ env={'RSYNC_PASSWORD': rsync_bin_key},
+ logEnviron = False,
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
+ ))
+
+ # build list of files to upload
+ factory.addStep(FileDownload(
+ name = "dlsha2rsyncpl",
+ mastersrc = scripts_dir + '/sha2rsync.pl',
+ workerdest = "../sha2rsync.pl",
+ mode = 0o755,
+ ))
+
+ factory.addStep(ShellCommand(
+ name = "buildlist",
+ description = "Building list of files to upload",
+ command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
+ haltOnFailure = True,
+ ))
+
factory.addStep(FileDownload(
- mastersrc = "rsync.sh",
- slavedest = "../rsync.sh",
- mode = 0755))
+ name = "dlrsync.sh",
+ mastersrc = scripts_dir + '/rsync.sh',
+ workerdest = "../rsync.sh",
+ mode = 0o755
+ ))
+ # upload new files and update existing ones
factory.addStep(ShellCommand(
name = "targetupload",
description = "Uploading target files",
- command=["../rsync.sh", "-4", "--info=name", "--exclude=/kmods/", "--delete", "--size-only", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]),
- "-a", WithProperties("bin/targets/%s/%s%%(libc)s/" %(ts[0], ts[1])),
- WithProperties("%s/%%(prefix)stargets/%s/%s/" %(rsync_bin_url, ts[0], ts[1]), prefix=GetVersionPrefix)],
+ command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
+ ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
+ Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
+ env={'RSYNC_PASSWORD': rsync_bin_key},
+ haltOnFailure = True,
+ logEnviron = False,
+ ))
+
+ # delete files which don't exist locally
+ factory.addStep(ShellCommand(
+ name = "targetprune",
+ description = "Pruning target files",
+ command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
+ ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
+ Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = True,
logEnviron = False,
factory.addStep(ShellCommand(
name = "kmodupload",
description = "Uploading kmod archive",
- command=["../rsync.sh", "-4", "--info=name", "--delete", "--size-only", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]),
- "-a", WithProperties("bin/targets/%s/%s%%(libc)s/kmods/%%(kernelversion)s/" %(ts[0], ts[1])),
- WithProperties("%s/%%(prefix)stargets/%s/%s/kmods/%%(kernelversion)s/" %(rsync_bin_url, ts[0], ts[1]), prefix=GetVersionPrefix)],
+ command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
+ ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
+ Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
env={'RSYNC_PASSWORD': rsync_bin_key},
haltOnFailure = True,
logEnviron = False,
))
if rsync_src_url is not None:
+ factory.addStep(ShellCommand(
+ name = "sourcelist",
+ description = "Finding source archives to upload",
+ command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
+ haltOnFailure = True
+ ))
+
factory.addStep(ShellCommand(
name = "sourceupload",
description = "Uploading source archives",
- command=["../rsync.sh", "-4", "--info=name", "--size-only", "--delay-updates",
- WithProperties("--partial-dir=.~tmp~%s~%s~%%(slavename)s" %(ts[0], ts[1])), "-a", "dl/", "%s/" %(rsync_src_url)],
+ command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
+ [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
env={'RSYNC_PASSWORD': rsync_src_key},
haltOnFailure = True,
logEnviron = False,
locks = NetLockUl,
))
- if False:
- factory.addStep(ShellCommand(
- name = "packageupload",
- description = "Uploading package files",
- command=["../rsync.sh", "-4", "--info=name", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a", "bin/packages/", "%s/packages/" %(rsync_bin_url)],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = False,
- logEnviron = False,
- locks = NetLockUl,
- ))
-
- # logs
- if False:
- factory.addStep(ShellCommand(
- name = "upload",
- description = "Uploading logs",
- command=["../rsync.sh", "-4", "-info=name", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az", "logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = False,
- alwaysRun = True,
- logEnviron = False,
- locks = NetLockUl,
- ))
-
factory.addStep(ShellCommand(
name = "df",
description = "Reporting disk usage",
command=["df", "-h", "."],
env={'LC_ALL': 'C'},
haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
alwaysRun = True
))
- from buildbot.config import BuilderConfig
+ factory.addStep(ShellCommand(
+ name = "du",
+ description = "Reporting estimated file space usage",
+ command=["du", "-sh", "."],
+ env={'LC_ALL': 'C'},
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
+ alwaysRun = True
+ ))
- c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
+ factory.addStep(ShellCommand(
+ name = "ccachestat",
+ description = "Reporting ccache stats",
+ command=["ccache", "-s"],
+ env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
+ want_stderr = False,
+ haltOnFailure = False,
+ flunkOnFailure = False,
+ warnOnFailure = False,
+ alwaysRun = True,
+ ))
+
+ c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
+
+ c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
+ force_factory.addStep(steps.Trigger(
+ name = "trigger_%s" % target,
+ description = "Triggering %s build" % target,
+ schedulerNames = [ "trigger_%s" % target ],
+ set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
+ doStepIf = IsTargetSelected(target)
+ ))
####### STATUS TARGETS
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
# including web pages, email senders, and IRC bots.
-c['status'] = []
-
-from buildbot.status import html
-from buildbot.status.web import authz, auth
-
-if ini.has_option("status", "bind"):
- if ini.has_option("status", "user") and ini.has_option("status", "password"):
- authz_cfg=authz.Authz(
- # change any of these to True to enable; see the manual for more
- # options
- auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
- gracefulShutdown = 'auth',
- forceBuild = 'auth', # use this to test your slave once it is set up
- forceAllBuilds = 'auth',
- pingBuilder = False,
- stopBuild = 'auth',
- stopAllBuilds = 'auth',
- cancelPendingBuild = 'auth',
- )
- c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
- else:
- c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
-
-
-from buildbot.status import words
-
-if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
- irc_host = ini.get("irc", "host")
- irc_port = 6667
- irc_chan = ini.get("irc", "channel")
- irc_nick = ini.get("irc", "nickname")
- irc_pass = None
+if "status_bind" in inip1:
+ c['www'] = {
+ 'port': inip1.get("status_bind"),
+ 'plugins': {
+ 'waterfall_view': True,
+ 'console_view': True,
+ 'grid_view': True
+ }
+ }
- if ini.has_option("irc", "port"):
- irc_port = ini.getint("irc", "port")
+ if "status_user" in inip1 and "status_password" in inip1:
+ c['www']['auth'] = util.UserPasswordAuth([
+ (inip1.get("status_user"), inip1.get("status_password"))
+ ])
+ c['www']['authz'] = util.Authz(
+ allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
+ roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
+ )
- if ini.has_option("irc", "password"):
- irc_pass = ini.get("irc", "password")
+c['services'] = []
+if ini.has_section("irc"):
+ iniirc = ini['irc']
+ irc_host = iniirc.get("host", None)
+ irc_port = iniirc.getint("port", 6667)
+ irc_chan = iniirc.get("channel", None)
+ irc_nick = iniirc.get("nickname", None)
+ irc_pass = iniirc.get("password", None)
+
+ if irc_host and irc_nick and irc_chan:
+ irc = reporters.IRC(irc_host, irc_nick,
+ port = irc_port,
+ password = irc_pass,
+ channels = [ irc_chan ],
+ notify_events = [ 'exception', 'problem', 'recovery' ]
+ )
- irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
- channels = [{ "channel": irc_chan }],
- notify_events = {
- 'exception': 1,
- 'successToFailure': 1,
- 'failureToSuccess': 1
- }
- )
+ c['services'].append(irc)
- c['status'].append(irc)
+c['revlink'] = util.RevlinkMatch([
+ r'https://git.openwrt.org/openwrt/(.*).git'
+ ],
+ r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
####### DB URL
# this at its default for all but the largest installations.
'db_url' : "sqlite:///state.sqlite",
}
+
+c['buildbotNetUsageData'] = None