from buildbot import locks
from buildbot.data import resultspec
-from buildbot.changes import filter
from buildbot.changes.gitpoller import GitPoller
from buildbot.config import BuilderConfig
from buildbot.plugins import reporters
from buildbot.process.factory import BuildFactory
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Property
-from buildbot.schedulers.basic import SingleBranchScheduler
+from buildbot.schedulers.basic import AnyBranchScheduler
from buildbot.schedulers.forcesched import BaseParameter
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.forcesched import ValidationError
from buildbot.steps.transfer import FileUpload
from buildbot.steps.transfer import StringDownload
from buildbot.worker import Worker
+from buildbot.worker.local import LocalWorker
if not os.path.exists("twistd.pid"):
ini = configparser.ConfigParser()
ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
+if "general" not in ini or "phase1" not in ini:
+ raise ValueError("Fix your configuration")
+
+inip1 = ini["phase1"]
+
+# Globals
+work_dir = os.path.abspath(ini["general"].get("workdir", "."))
+scripts_dir = os.path.abspath("../scripts")
+
+repo_url = ini["repo"].get("url")
+
+rsync_defopts = ["-v", "--timeout=120"]
+
+# if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
+# rsync_bin_defopts += ["--contimeout=20"]
+
+branches = {}
+
+
+def ini_parse_branch(section):
+ b = {}
+ name = section.get("name")
+
+ if not name:
+ raise ValueError("missing 'name' in " + repr(section))
+ if name in branches:
+ raise ValueError("duplicate branch name in " + repr(section))
+
+ b["name"] = name
+ b["bin_url"] = section.get("binary_url")
+ b["bin_key"] = section.get("binary_password")
+
+ b["src_url"] = section.get("source_url")
+ b["src_key"] = section.get("source_password")
+
+ b["gpg_key"] = section.get("gpg_key")
+
+ b["usign_key"] = section.get("usign_key")
+ usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
+ b["usign_comment"] = section.get("usign_comment", usign_comment)
+
+ b["config_seed"] = section.get("config_seed")
+
+ b["kmod_archive"] = section.getboolean("kmod_archive", False)
+
+ branches[name] = b
+ log.msg("Configured branch: {}".format(name))
+
+
+# PB port can be either a numeric port or a connection string
+pb_port = inip1.get("port") or 9989
+
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
c = BuildmasterConfig = {}
# installation's html.WebStatus home page (linked to the
# 'titleURL') and is embedded in the title of the waterfall HTML page.
-c['title'] = ini.get("general", "title")
-c['titleURL'] = ini.get("general", "title_url")
+c["title"] = ini["general"].get("title")
+c["titleURL"] = ini["general"].get("title_url")
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server (usually the html.WebStatus page) is visible. This
# with an externally-visible host name which the buildbot cannot figure out
# without some help.
-c['buildbotURL'] = ini.get("phase1", "buildbot_url")
+c["buildbotURL"] = inip1.get("buildbot_url")
####### BUILDWORKERS
# a Worker object, specifying a unique worker name and password. The same
# worker name and password must be configured on the worker.
-worker_port = 9989
+c["workers"] = []
+NetLocks = dict()
-if ini.has_option("phase1", "port"):
- worker_port = ini.get("phase1", "port")
-c['workers'] = []
-NetLocks = dict()
+def ini_parse_workers(section):
+ name = section.get("name")
+ password = section.get("password")
+ phase = section.getint("phase")
+ tagonly = section.getboolean("tag_only")
+ rsyncipv4 = section.getboolean("rsync_ipv4")
+
+ if not name or not password or not phase == 1:
+ log.msg("invalid worker configuration ignored: {}".format(repr(section)))
+ return
+
+ sl_props = {"tag_only": tagonly}
+ if "dl_lock" in section:
+ lockname = section.get("dl_lock")
+ sl_props["dl_lock"] = lockname
+ if lockname not in NetLocks:
+ NetLocks[lockname] = locks.MasterLock(lockname)
+ if "ul_lock" in section:
+ lockname = section.get("ul_lock")
+ sl_props["ul_lock"] = lockname
+ if lockname not in NetLocks:
+ NetLocks[lockname] = locks.MasterLock(lockname)
+ if rsyncipv4:
+ sl_props[
+ "rsync_ipv4"
+ ] = True # only set prop if required, we use '+' Interpolate substitution
+
+ log.msg("Configured worker: {}".format(name))
+ # NB: phase1 build factory requires workers to be single-build only
+ c["workers"].append(Worker(name, password, max_builds=1, properties=sl_props))
+
for section in ini.sections():
- if section.startswith("worker "):
- if ini.has_option(section, "name") and ini.has_option(section, "password") and \
- (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
- sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
- name = ini.get(section, "name")
- password = ini.get(section, "password")
- max_builds = 1
- if ini.has_option(section, "builds"):
- max_builds = ini.getint(section, "builds")
- sl_props['max_builds'] = max_builds
- if max_builds == 1:
- sl_props['shared_wd'] = True
- if ini.has_option(section, "cleanup"):
- sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
- if ini.has_option(section, "dl_lock"):
- lockname = ini.get(section, "dl_lock")
- sl_props['dl_lock'] = lockname
- if lockname not in NetLocks:
- NetLocks[lockname] = locks.MasterLock(lockname)
- if ini.has_option(section, "ul_lock"):
- lockname = ini.get(section, "dl_lock")
- sl_props['ul_lock'] = lockname
- if lockname not in NetLocks:
- NetLocks[lockname] = locks.MasterLock(lockname)
- if ini.has_option(section, "shared_wd"):
- shared_wd = ini.getboolean(section, "shared_wd")
- sl_props['shared_wd'] = shared_wd
- if shared_wd and (max_builds != 1):
- raise ValueError('max_builds must be 1 with shared workdir!')
- c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
-
-# 'workerPortnum' defines the TCP port to listen on for connections from workers.
-# This must match the value configured into the buildworkers (with their
-# --master option)
-c['protocols'] = {'pb': {'port': worker_port}}
+ if section.startswith("branch "):
+ ini_parse_branch(ini[section])
+
+ if section.startswith("worker "):
+ ini_parse_workers(ini[section])
+
+# list of branches in build-priority order
+branchNames = [branches[b]["name"] for b in branches]
+
+c["protocols"] = {"pb": {"port": pb_port}}
# coalesce builds
-c['collapseRequests'] = True
+c["collapseRequests"] = True
# Reduce amount of backlog data
-c['configurators'] = [util.JanitorConfigurator(
- logHorizon=timedelta(days=3),
- hour=6,
-)]
+c["configurators"] = [
+ util.JanitorConfigurator(
+ logHorizon=timedelta(days=3),
+ hour=6,
+ )
+]
+
@defer.inlineCallbacks
def getNewestCompleteTime(bldr):
- """Returns the complete_at of the latest completed and not SKIPPED
- build request for this builder, or None if there are no such build
- requests. We need to filter out SKIPPED requests because we're
- using collapseRequests=True which is unfortunately marking all
- previous requests as complete when new buildset is created.
-
- @returns: datetime instance or None, via Deferred
- """
-
- bldrid = yield bldr.getBuilderId()
- completed = yield bldr.master.data.get(
- ('builders', bldrid, 'buildrequests'),
- [
- resultspec.Filter('complete', 'eq', [True]),
- resultspec.Filter('results', 'ne', [results.SKIPPED]),
- ],
- order=['-complete_at'], limit=1)
- if not completed:
- return
-
- return completed[0]['complete_at']
+ """Returns the complete_at of the latest completed and not SKIPPED
+ build request for this builder, or None if there are no such build
+ requests. We need to filter out SKIPPED requests because we're
+ using collapseRequests=True which is unfortunately marking all
+ previous requests as complete when new buildset is created.
+
+ @returns: datetime instance or None, via Deferred
+ """
+
+ bldrid = yield bldr.getBuilderId()
+ completed = yield bldr.master.data.get(
+ ("builders", bldrid, "buildrequests"),
+ [
+ resultspec.Filter("complete", "eq", [True]),
+ resultspec.Filter("results", "ne", [results.SKIPPED]),
+ ],
+ order=["-complete_at"],
+ limit=1,
+ )
+ if not completed:
+ return
+
+ complete_at = completed[0]["complete_at"]
+
+ last_build = yield bldr.master.data.get(
+ ("builds",),
+ [
+ resultspec.Filter("builderid", "eq", [bldrid]),
+ ],
+ order=["-started_at"],
+ limit=1,
+ )
+
+ if last_build and last_build[0]:
+ last_complete_at = last_build[0]["complete_at"]
+ if last_complete_at and (last_complete_at > complete_at):
+ return last_complete_at
+
+ return complete_at
+
@defer.inlineCallbacks
def prioritizeBuilders(master, builders):
- """Returns sorted list of builders by their last timestamp of completed and
- not skipped build.
-
- @returns: list of sorted builders
- """
+ """Returns sorted list of builders by their last timestamp of completed and
+ not skipped build, ordered first by branch name.
- def is_building(bldr):
- return bool(bldr.building) or bool(bldr.old_building)
+ @returns: list of sorted builders
+ """
- def bldr_info(bldr):
- d = defer.maybeDeferred(getNewestCompleteTime, bldr)
- d.addCallback(lambda complete_at: (complete_at, bldr))
- return d
+ bldrNamePrio = {"__Janitor": 0, "00_force_build": 0}
+ i = 1
+ for bname in branchNames:
+ bldrNamePrio[bname] = i
+ i += 1
- def bldr_sort(item):
- (complete_at, bldr) = item
+ def is_building(bldr):
+ return bool(bldr.building) or bool(bldr.old_building)
- if not complete_at:
- date = datetime.min
- complete_at = date.replace(tzinfo=tzutc())
+ def bldr_info(bldr):
+ d = defer.maybeDeferred(getNewestCompleteTime, bldr)
+ d.addCallback(lambda complete_at: (complete_at, bldr))
+ return d
- if is_building(bldr):
- date = datetime.max
- complete_at = date.replace(tzinfo=tzutc())
+ def bldr_sort(item):
+ (complete_at, bldr) = item
- return (complete_at, bldr.name)
+ pos = 99
+ for name, prio in bldrNamePrio.items():
+ if bldr.name.startswith(name):
+ pos = prio
+ break
- results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
- results.sort(key=bldr_sort)
+ if not complete_at:
+ date = datetime.min
+ complete_at = date.replace(tzinfo=tzutc())
- for r in results:
- log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
+ if is_building(bldr):
+ date = datetime.max
+ complete_at = date.replace(tzinfo=tzutc())
- return [r[1] for r in results]
+ return (pos, complete_at, bldr.name)
-c['prioritizeBuilders'] = prioritizeBuilders
+ results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
+ results.sort(key=bldr_sort)
-####### CHANGESOURCES
-
-work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
-scripts_dir = os.path.abspath("../scripts")
-tree_expire = 0
-other_builds = 0
-cc_version = None
+ # for r in results:
+ # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
-cc_command = "gcc"
-cxx_command = "g++"
+ return [r[1] for r in results]
-config_seed = ""
-git_ssh = False
-git_ssh_key = None
+c["prioritizeBuilders"] = prioritizeBuilders
-if ini.has_option("phase1", "expire"):
- tree_expire = ini.getint("phase1", "expire")
+####### CHANGESOURCES
-if ini.has_option("phase1", "other_builds"):
- other_builds = ini.getint("phase1", "other_builds")
+# find targets
+targets = dict()
+
+
+def populateTargets():
+ """fetch a shallow clone of each configured branch in turn:
+ execute dump-target-info.pl and collate the results to ensure
+ targets that only exist in specific branches get built.
+ This takes a while during master startup but is executed only once.
+ """
+ log.msg("Populating targets, this will take time")
+ sourcegit = work_dir + "/source.git"
+ for branch in branchNames:
+ if os.path.isdir(sourcegit):
+ subprocess.call(["rm", "-rf", sourcegit])
+
+ subprocess.call(
+ [
+ "git",
+ "clone",
+ "-q",
+ "--depth=1",
+ "--branch=" + branch,
+ repo_url,
+ sourcegit,
+ ]
+ )
+
+ os.makedirs(sourcegit + "/tmp", exist_ok=True)
+ findtargets = subprocess.Popen(
+ ["./scripts/dump-target-info.pl", "targets"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ cwd=sourcegit,
+ )
+
+ targets[branch] = set()
+ while True:
+ line = findtargets.stdout.readline()
+ if not line:
+ break
+ ta = line.decode().strip().split(" ")
+ targets[branch].add(ta[0])
+
+ subprocess.call(["rm", "-rf", sourcegit])
+
+
+populateTargets()
-if ini.has_option("phase1", "cc_version"):
- cc_version = ini.get("phase1", "cc_version").split()
- if len(cc_version) == 1:
- cc_version = ["eq", cc_version[0]]
+# the 'change_source' setting tells the buildmaster how it should find out
+# about source code changes.
+
+c["change_source"] = []
+c["change_source"].append(
+ GitPoller(
+ repo_url,
+ workdir=work_dir + "/work.git",
+ branches=branchNames,
+ pollAtLaunch=True,
+ pollinterval=300,
+ )
+)
-if ini.has_option("general", "git_ssh"):
- git_ssh = ini.getboolean("general", "git_ssh")
+####### SCHEDULERS
-if ini.has_option("general", "git_ssh_key"):
- git_ssh_key = ini.get("general", "git_ssh_key")
-else:
- git_ssh = False
+# Configure the Schedulers, which decide how to react to incoming changes.
-if ini.has_option("phase1", "config_seed"):
- config_seed = ini.get("phase1", "config_seed")
-repo_url = ini.get("repo", "url")
-repo_branch = "master"
+# Selector for known valid tags
+class TagChoiceParameter(BaseParameter):
+ spec_attributes = ["strict", "choices"]
+ type = "list"
+ strict = True
-if ini.has_option("repo", "branch"):
- repo_branch = ini.get("repo", "branch")
+ def __init__(self, name, label=None, **kw):
+ super().__init__(name, label, **kw)
+ self._choice_list = []
-rsync_bin_url = ini.get("rsync", "binary_url")
-rsync_bin_key = ini.get("rsync", "binary_password")
-rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
+ def getRevTags(self, findtag=None):
+ taglist = []
+ branchvers = []
-if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
- rsync_bin_defopts += ["--contimeout=20"]
+ # we will filter out tags that do no match the configured branches
+ for b in branchNames:
+ basever = re.search(r"-([0-9]+\.[0-9]+)$", b)
+ if basever:
+ branchvers.append(basever[1])
-rsync_src_url = None
-rsync_src_key = None
-rsync_src_defopts = ["-v", "-4", "--timeout=120"]
+ # grab tags from remote repository
+ alltags = subprocess.Popen(
+ ["git", "ls-remote", "--tags", repo_url], stdout=subprocess.PIPE
+ )
-if ini.has_option("rsync", "source_url"):
- rsync_src_url = ini.get("rsync", "source_url")
- rsync_src_key = ini.get("rsync", "source_password")
+ while True:
+ line = alltags.stdout.readline()
- if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
- rsync_src_defopts += ["--contimeout=20"]
+ if not line:
+ break
-usign_key = None
-usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
+ (rev, tag) = line.split()
-if ini.has_option("usign", "key"):
- usign_key = ini.get("usign", "key")
+ # does it match known format? ('vNN.NN.NN(-rcN)')
+ tagver = re.search(
+ r"\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$",
+ tag.decode().strip(),
+ )
-if ini.has_option("usign", "comment"):
- usign_comment = ini.get("usign", "comment")
+ # only list valid tags matching configured branches
+ if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
+ # if we want a specific tag, ignore all that don't match
+ if findtag and findtag != tagver[1]:
+ continue
+ taglist.append({"rev": rev.decode().strip(), "tag": tagver[1]})
-enable_kmod_archive = False
-embed_kmod_repository = False
+ return taglist
-if ini.has_option("phase1", "kmod_archive"):
- enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
+ @property
+ def choices(self):
+ taglist = [rt["tag"] for rt in self.getRevTags()]
+ taglist.sort(
+ reverse=True,
+ key=lambda tag: tag if re.search(r"-rc[0-9]+$", tag) else tag + "-z",
+ )
+ taglist.insert(0, "")
-if ini.has_option("phase1", "kmod_repository"):
- embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
+ self._choice_list = taglist
+ return self._choice_list
-# find targets
-targets = [ ]
+ def updateFromKwargs(self, properties, kwargs, **unused):
+ tag = self.getFromKwargs(kwargs)
+ properties[self.name] = tag
-if not os.path.isdir(work_dir+'/source.git'):
- subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
-else:
- subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
+ # find the commit matching the tag
+ findtag = self.getRevTags(tag)
-os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
-findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
- stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
+ if not findtag:
+ raise ValidationError("Couldn't find tag")
-while True:
- line = findtargets.stdout.readline()
- if not line:
- break
- ta = line.decode().strip().split(' ')
- targets.append(ta[0])
+ properties["force_revision"] = findtag[0]["rev"]
+ # find the branch matching the tag
+ branch = None
+ branchver = re.search(r"v([0-9]+\.[0-9]+)", tag)
+ for b in branchNames:
+ if b.endswith(branchver[1]):
+ branch = b
-# the 'change_source' setting tells the buildmaster how it should find out
-# about source code changes. Here we point to the buildbot clone of pyflakes.
+ if not branch:
+ raise ValidationError("Couldn't find branch")
-c['change_source'] = []
-c['change_source'].append(GitPoller(
- repo_url,
- workdir=work_dir+'/work.git', branch=repo_branch,
- pollinterval=300))
+ properties["force_branch"] = branch
-####### SCHEDULERS
+ def parse_from_arg(self, s):
+ if self.strict and s not in self._choice_list:
+ raise ValidationError(
+ "'%s' does not belong to list of available choices '%s'"
+ % (s, self._choice_list)
+ )
+ return s
-# Configure the Schedulers, which decide how to react to incoming changes. In this
-# case, just kick off a 'basebuild' build
-class TagChoiceParameter(BaseParameter):
- spec_attributes = ["strict", "choices"]
- type = "list"
- strict = True
-
- def __init__(self, name, label=None, **kw):
- super().__init__(name, label, **kw)
- self._choice_list = []
-
- @property
- def choices(self):
- taglist = []
- basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
-
- if basever:
- findtags = subprocess.Popen(
- ['git', 'ls-remote', '--tags', repo_url],
- stdout = subprocess.PIPE)
-
- while True:
- line = findtags.stdout.readline()
-
- if not line:
- break
-
- tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
-
- if tagver and tagver[1].find(basever[1]) == 0:
- taglist.append(tagver[1])
-
- taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
- taglist.insert(0, '')
-
- self._choice_list = taglist
-
- return self._choice_list
-
- def parse_from_arg(self, s):
- if self.strict and s not in self._choice_list:
- raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
- return s
-
-c['schedulers'] = []
-c['schedulers'].append(SingleBranchScheduler(
- name = "all",
- change_filter = filter.ChangeFilter(branch=repo_branch),
- treeStableTimer = 60,
- builderNames = targets))
-
-c['schedulers'].append(ForceScheduler(
- name = "force",
- buttonName = "Force builds",
- label = "Force build details",
- builderNames = [ "00_force_build" ],
-
- codebases = [
- util.CodebaseParameter(
- "",
- label = "Repository",
- branch = util.FixedParameter(name = "branch", default = ""),
- revision = util.FixedParameter(name = "revision", default = ""),
- repository = util.FixedParameter(name = "repository", default = ""),
- project = util.FixedParameter(name = "project", default = "")
- )
- ],
-
- reason = util.StringParameter(
- name = "reason",
- label = "Reason",
- default = "Trigger build",
- required = True,
- size = 80
- ),
-
- properties = [
- util.NestedParameter(
- name="options",
- label="Build Options",
- layout="vertical",
- fields=[
- util.ChoiceStringParameter(
- name = "target",
- label = "Build target",
- default = "all",
- choices = [ "all" ] + targets
- ),
- TagChoiceParameter(
- name = "tag",
- label = "Build tag",
- default = ""
- )
- ]
- )
- ]
-))
+@util.renderer
+@defer.inlineCallbacks
+def builderNames(props):
+ """since we have per branch and per target builders,
+ address the relevant builder for each new buildrequest
+ based on the request's desired branch and target.
+ """
+ branch = props.getProperty("branch")
+ target = props.getProperty("target", "")
+
+ if target == "all":
+ target = ""
+
+ # if that didn't work, try sourcestamp to find a branch
+ if not branch:
+ # match builders with target branch
+ ss = props.sourcestamps[0]
+ if ss:
+ branch = ss["branch"]
+ else:
+ log.msg("couldn't find builder")
+ return [] # nothing works
+
+ bname = branch + "_" + target
+ builders = []
+
+ for b in (yield props.master.data.get(("builders",))):
+ if not b["name"].startswith(bname):
+ continue
+ builders.append(b["name"])
+
+ return builders
+
+
+c["schedulers"] = []
+c["schedulers"].append(
+ AnyBranchScheduler(
+ name="all",
+ change_filter=util.ChangeFilter(branch=branchNames),
+ treeStableTimer=15 * 60,
+ builderNames=builderNames,
+ )
+)
+
+c["schedulers"].append(
+ ForceScheduler(
+ name="force",
+ buttonName="Force builds",
+ label="Force build details",
+ builderNames=["00_force_build"],
+ codebases=[
+ util.CodebaseParameter(
+ "",
+ label="Repository",
+ branch=util.FixedParameter(name="branch", default=""),
+ revision=util.FixedParameter(name="revision", default=""),
+ repository=util.FixedParameter(name="repository", default=""),
+ project=util.FixedParameter(name="project", default=""),
+ )
+ ],
+ reason=util.StringParameter(
+ name="reason",
+ label="Reason",
+ default="Trigger build",
+ required=True,
+ size=80,
+ ),
+ properties=[
+ # NB: avoid nesting to simplify processing of properties
+ util.ChoiceStringParameter(
+ name="target",
+ label="Build target",
+ default="all",
+ choices=["all"] + [t for b in branchNames for t in targets[b]],
+ ),
+ TagChoiceParameter(name="tag", label="Build tag", default=""),
+ ],
+ )
+)
+
+c["schedulers"].append(
+ schedulers.Triggerable(name="trigger", builderNames=builderNames)
+)
####### BUILDERS
# what steps, and which workers can execute them. Note that any particular build will
# only take place on one worker.
-CleanTargetMap = [
- [ "tools", "tools/clean" ],
- [ "chain", "toolchain/clean" ],
- [ "linux", "target/linux/clean" ],
- [ "dir", "dirclean" ],
- [ "dist", "distclean" ]
-]
-
-def IsMakeCleanRequested(pattern):
- def CheckCleanProperty(step):
- val = step.getProperty("clean")
- if val and re.match(pattern, val):
- return True
- else:
- return False
-
- return CheckCleanProperty
-
-def IsSharedWorkdir(step):
- return bool(step.getProperty("shared_wd"))
-
-def IsCleanupRequested(step):
- if IsSharedWorkdir(step):
- return False
- do_cleanup = step.getProperty("do_cleanup")
- if do_cleanup:
- return True
- else:
- return False
-
-def IsExpireRequested(step):
- if IsSharedWorkdir(step):
- return False
- else:
- return not IsCleanupRequested(step)
-
-def IsGitFreshRequested(step):
- do_cleanup = step.getProperty("do_cleanup")
- if do_cleanup:
- return True
- else:
- return False
-
-def IsGitCleanRequested(step):
- return not IsGitFreshRequested(step)
-
-def IsTaggingRequested(step):
- val = step.getProperty("tag")
- if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
- return True
- else:
- return False
-
-def IsNoTaggingRequested(step):
- return not IsTaggingRequested(step)
def IsNoMasterBuild(step):
- return repo_branch != "master"
+ return step.getProperty("branch") != "master"
+
+
+def IsUsignEnabled(step):
+ branch = step.getProperty("branch")
+ return branch and branches[branch].get("usign_key")
+
+
+def IsSignEnabled(step):
+ branch = step.getProperty("branch")
+ return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
+
+
+def IsKmodArchiveEnabled(step):
+ branch = step.getProperty("branch")
+ return branch and branches[branch].get("kmod_archive")
+
+
+def IsKmodArchiveAndRsyncEnabled(step):
+ branch = step.getProperty("branch")
+ return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
+
+
+def GetBaseVersion(branch):
+ if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
+ return branch.split("-")[1]
+ else:
+ return "master"
-def GetBaseVersion():
- if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
- return repo_branch.split('-')[1]
- else:
- return "master"
@properties.renderer
def GetVersionPrefix(props):
- basever = GetBaseVersion()
- if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
- return "%s/" % props["tag"]
- elif basever != "master":
- return "%s-SNAPSHOT/" % basever
- else:
- return ""
+ branch = props.getProperty("branch")
+ basever = GetBaseVersion(branch)
+ if props.hasProperty("tag") and re.match(
+ r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]
+ ):
+ return "%s/" % props["tag"][1:]
+ elif basever != "master":
+ return "%s-SNAPSHOT/" % basever
+ else:
+ return ""
-@properties.renderer
-def GetNumJobs(props):
- if props.hasProperty("max_builds") and props.hasProperty("nproc"):
- return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
- else:
- return "1"
-@properties.renderer
-def GetCC(props):
- if props.hasProperty("cc_command"):
- return props["cc_command"]
- else:
- return "gcc"
+@util.renderer
+def GetConfigSeed(props):
+ branch = props.getProperty("branch")
+ return branch and branches[branch].get("config_seed") or ""
-@properties.renderer
-def GetCXX(props):
- if props.hasProperty("cxx_command"):
- return props["cxx_command"]
- else:
- return "g++"
-@properties.renderer
-def GetCwd(props):
- if props.hasProperty("builddir"):
- return props["builddir"]
- elif props.hasProperty("workdir"):
- return props["workdir"]
- else:
- return "/"
+@util.renderer
+def GetRsyncParams(props, srcorbin, urlorkey):
+ # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
+ branch = props.getProperty("branch")
+ opt = srcorbin + "_" + urlorkey
+ return branch and branches[branch].get(opt)
+
+
+@util.renderer
+def GetUsignKey(props):
+ branch = props.getProperty("branch")
+ return branch and branches[branch].get("usign_key")
-@properties.renderer
-def GetCCache(props):
- if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
- return props["ccache_command"]
- else:
- return ""
def GetNextBuild(builder, requests):
- for r in requests:
- if r.properties and r.properties.hasProperty("tag"):
- return r
+ for r in requests:
+ if r.properties:
+ # order tagged build first
+ if r.properties.hasProperty("tag"):
+ return r
+
+ r = requests[0]
+ # log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
+ return r
- r = requests[0]
- log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
- return r
def MakeEnv(overrides=None, tryccache=False):
- env = {
- 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
- 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
- }
- if tryccache:
- env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
- env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
- env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
- else:
- env['CC'] = env['CCC']
- env['CXX'] = env['CCXX']
- env['CCACHE'] = ''
- if overrides is not None:
- env.update(overrides)
- return env
+ env = {
+ "CCC": Interpolate("%(prop:cc_command:-gcc)s"),
+ "CCXX": Interpolate("%(prop:cxx_command:-g++)s"),
+ }
+ if tryccache:
+ env["CC"] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
+ env["CXX"] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
+ env["CCACHE"] = Interpolate("%(prop:ccache_command:-)s")
+ else:
+ env["CC"] = env["CCC"]
+ env["CXX"] = env["CCXX"]
+ env["CCACHE"] = ""
+ if overrides is not None:
+ env.update(overrides)
+ return env
+
@properties.renderer
-def NetLockDl(props):
- lock = None
- if props.hasProperty("dl_lock"):
- lock = NetLocks[props["dl_lock"]]
- if lock is not None:
- return [lock.access('exclusive')]
- else:
- return []
+def NetLockDl(props, extralock=None):
+ lock = None
+ if props.hasProperty("dl_lock"):
+ lock = NetLocks[props["dl_lock"]]
+ if lock is not None:
+ return [lock.access("exclusive")]
+ else:
+ return []
+
@properties.renderer
def NetLockUl(props):
- lock = None
- if props.hasProperty("ul_lock"):
- lock = NetLocks[props["ul_lock"]]
- if lock is not None:
- return [lock.access('exclusive')]
- else:
- return []
+ lock = None
+ if props.hasProperty("ul_lock"):
+ lock = NetLocks[props["ul_lock"]]
+ if lock is not None:
+ return [lock.access("exclusive")]
+ else:
+ return []
-@util.renderer
-def TagPropertyValue(props):
- if props.hasProperty("options"):
- options = props.getProperty("options")
- if type(options) is dict:
- return options.get("tag")
- return None
def IsTargetSelected(target):
- def CheckTargetProperty(step):
- try:
- options = step.getProperty("options")
- if type(options) is dict:
- selected_target = options.get("target", "all")
- if selected_target != "all" and selected_target != target:
- return False
- except KeyError:
- pass
-
- return True
-
- return CheckTargetProperty
-
-def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
- try:
- seckey = base64.b64decode(seckey)
- except:
- return None
-
- return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
- base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
-
-
-c['builders'] = []
-
-dlLock = locks.WorkerLock("worker_dl")
-
-checkBuiltin = re.sub('[\t\n ]+', ' ', """
- checkBuiltin() {
- local symbol op path file;
- for file in $CHANGED_FILES; do
- case "$file" in
- package/*/*) : ;;
- *) return 0 ;;
- esac;
- done;
- while read symbol op path; do
- case "$symbol" in package-*)
- symbol="${symbol##*(}";
- symbol="${symbol%)}";
- for file in $CHANGED_FILES; do
- case "$file" in "package/$path/"*)
- grep -qsx "$symbol=y" .config && return 0
- ;; esac;
- done;
- esac;
- done < tmp/.packagedeps;
- return 1;
- }
-""").strip()
-
-
-class IfBuiltinShellCommand(ShellCommand):
- def _quote(self, str):
- if re.search("[^a-zA-Z0-9/_.-]", str):
- return "'%s'" %(re.sub("'", "'\"'\"'", str))
- return str
-
- def setCommand(self, command):
- if not isinstance(command, (str, unicode)):
- command = ' '.join(map(self._quote, command))
- self.command = [
- '/bin/sh', '-c',
- '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
- ]
-
- def setupEnvironment(self, cmd):
- workerEnv = self.workerEnvironment
- if workerEnv is None:
- workerEnv = { }
- changedFiles = { }
- for request in self.build.requests:
- for source in request.sources:
- for change in source.changes:
- for file in change.files:
- changedFiles[file] = True
- fullSlaveEnv = workerEnv.copy()
- fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
- cmd.args['env'] = fullSlaveEnv
-
-workerNames = [ ]
-
-for worker in c['workers']:
- workerNames.append(worker.workername)
+ def CheckTargetProperty(step):
+ selected_target = step.getProperty("target", "all")
+ if selected_target != "all" and selected_target != target:
+ return False
+ return True
-force_factory = BuildFactory()
+ return CheckTargetProperty
+
+
+@util.renderer
+def UsignSec2Pub(props):
+ branch = props.getProperty("branch")
+ try:
+ comment = (
+ branches[branch].get("usign_comment") or "untrusted comment: secret key"
+ )
+ seckey = branches[branch].get("usign_key")
+ seckey = base64.b64decode(seckey)
+ except Exception:
+ return None
+
+ return "{}\n{}".format(
+ re.sub(r"\bsecret key$", "public key", comment),
+ base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]),
+ )
-c['builders'].append(BuilderConfig(
- name = "00_force_build",
- workernames = workerNames,
- factory = force_factory))
-
-for target in targets:
- ts = target.split('/')
-
- factory = BuildFactory()
-
- # setup shared work directory if required
- factory.addStep(ShellCommand(
- name = "sharedwd",
- description = "Setting up shared work directory",
- command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
- workdir = ".",
- haltOnFailure = True,
- doStepIf = IsSharedWorkdir))
-
- # find number of cores
- factory.addStep(SetPropertyFromCommand(
- name = "nproc",
- property = "nproc",
- description = "Finding number of CPUs",
- command = ["nproc"]))
-
- # find gcc and g++ compilers
- factory.addStep(FileDownload(
- name = "dlfindbinpl",
- mastersrc = scripts_dir + '/findbin.pl',
- workerdest = "../findbin.pl",
- mode = 0o755))
-
- factory.addStep(SetPropertyFromCommand(
- name = "gcc",
- property = "cc_command",
- description = "Finding gcc command",
- command = [
- "../findbin.pl", "gcc",
- cc_version[0] if cc_version is not None else '',
- cc_version[1] if cc_version is not None else ''
- ],
- haltOnFailure = True))
-
- factory.addStep(SetPropertyFromCommand(
- name = "g++",
- property = "cxx_command",
- description = "Finding g++ command",
- command = [
- "../findbin.pl", "g++",
- cc_version[0] if cc_version is not None else '',
- cc_version[1] if cc_version is not None else ''
- ],
- haltOnFailure = True))
-
- # see if ccache is available
- factory.addStep(SetPropertyFromCommand(
- property = "ccache_command",
- command = ["which", "ccache"],
- description = "Testing for ccache command",
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = False,
- ))
-
- # expire tree if needed
- if tree_expire > 0:
- factory.addStep(FileDownload(
- name = "dlexpiresh",
- doStepIf = IsExpireRequested,
- mastersrc = scripts_dir + '/expire.sh',
- workerdest = "../expire.sh",
- mode = 0o755))
-
- factory.addStep(ShellCommand(
- name = "expire",
- description = "Checking for build tree expiry",
- command = ["./expire.sh", str(tree_expire)],
- workdir = ".",
- haltOnFailure = True,
- doStepIf = IsExpireRequested,
- timeout = 2400))
-
- # cleanup.sh if needed
- factory.addStep(FileDownload(
- name = "dlcleanupsh",
- mastersrc = scripts_dir + '/cleanup.sh',
- workerdest = "../cleanup.sh",
- mode = 0o755,
- doStepIf = IsCleanupRequested))
-
- factory.addStep(ShellCommand(
- name = "cleanold",
- description = "Cleaning previous builds",
- command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
- workdir = ".",
- haltOnFailure = True,
- doStepIf = IsCleanupRequested,
- timeout = 2400))
-
- factory.addStep(ShellCommand(
- name = "cleanup",
- description = "Cleaning work area",
- command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
- workdir = ".",
- haltOnFailure = True,
- doStepIf = IsCleanupRequested,
- timeout = 2400))
-
- # user-requested clean targets
- for tuple in CleanTargetMap:
- factory.addStep(ShellCommand(
- name = tuple[1],
- description = 'User-requested "make %s"' % tuple[1],
- command = ["make", tuple[1], "V=s"],
- env = MakeEnv(),
- doStepIf = IsMakeCleanRequested(tuple[0])
- ))
-
- # Workaround bug when switching from a checked out tag back to a branch
- # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
- factory.addStep(ShellCommand(
- name = "gitcheckout",
- description = "Ensure that Git HEAD is sane",
- command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
- haltOnFailure = True))
-
- # check out the source
- # Git() runs:
- # if repo doesn't exist: 'git clone repourl'
- # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
- # 'git fetch -t repourl branch; git reset --hard revision'
- # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
- # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
- factory.addStep(Git(
- name = "gitclean",
- repourl = repo_url,
- branch = repo_branch,
- mode = 'full',
- method = 'clean',
- haltOnFailure = True,
- doStepIf = IsGitCleanRequested,
- ))
-
- factory.addStep(Git(
- name = "gitfresh",
- repourl = repo_url,
- branch = repo_branch,
- mode = 'full',
- method = 'fresh',
- haltOnFailure = True,
- doStepIf = IsGitFreshRequested,
- ))
-
- # update remote refs
- factory.addStep(ShellCommand(
- name = "fetchrefs",
- description = "Fetching Git remote refs",
- command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
- haltOnFailure = True
- ))
-
- # switch to tag
- factory.addStep(ShellCommand(
- name = "switchtag",
- description = "Checking out Git tag",
- command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
- haltOnFailure = True,
- doStepIf = IsTaggingRequested
- ))
-
- # Verify that Git HEAD points to a tag or branch
- # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
- factory.addStep(ShellCommand(
- name = "gitverify",
- description = "Ensure that Git HEAD is pointing to a branch or tag",
- command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
- haltOnFailure = True))
-
- factory.addStep(ShellCommand(
- name = "rmtmp",
- description = "Remove tmp folder",
- command=["rm", "-rf", "tmp/"]))
-
- # feed
-# factory.addStep(ShellCommand(
-# name = "feedsconf",
-# description = "Copy the feeds.conf",
-# command='''cp ~/feeds.conf ./feeds.conf''' ))
-
- # feed
- factory.addStep(ShellCommand(
- name = "rmfeedlinks",
- description = "Remove feed symlinks",
- command=["rm", "-rf", "package/feeds/"]))
-
- factory.addStep(StringDownload(
- name = "ccachecc",
- s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
- workerdest = "../ccache_cc.sh",
- mode = 0o755,
- ))
-
- factory.addStep(StringDownload(
- name = "ccachecxx",
- s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
- workerdest = "../ccache_cxx.sh",
- mode = 0o755,
- ))
-
- # Git SSH
- if git_ssh:
- factory.addStep(StringDownload(
- name = "dlgitclonekey",
- s = git_ssh_key,
- workerdest = "../git-clone.key",
- mode = 0o600,
- ))
-
- factory.addStep(ShellCommand(
- name = "patchfeedsconf",
- description = "Patching feeds.conf",
- command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
- haltOnFailure = True
- ))
-
- # feed
- factory.addStep(ShellCommand(
- name = "updatefeeds",
- description = "Updating feeds",
- command=["./scripts/feeds", "update"],
- env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
- haltOnFailure = True
- ))
-
- # Git SSH
- if git_ssh:
- factory.addStep(ShellCommand(
- name = "rmfeedsconf",
- description = "Removing feeds.conf",
- command=["rm", "feeds.conf"],
- haltOnFailure = True
- ))
-
- # feed
- factory.addStep(ShellCommand(
- name = "installfeeds",
- description = "Installing feeds",
- command=["./scripts/feeds", "install", "-a"],
- env = MakeEnv(tryccache=True),
- haltOnFailure = True
- ))
-
- # seed config
- if config_seed is not None:
- factory.addStep(StringDownload(
- name = "dlconfigseed",
- s = config_seed + '\n',
- workerdest = ".config",
- mode = 0o644
- ))
-
- # configure
- factory.addStep(ShellCommand(
- name = "newconfig",
- description = "Seeding .config",
- command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
- ))
-
- factory.addStep(ShellCommand(
- name = "delbin",
- description = "Removing output directory",
- command = ["rm", "-rf", "bin/"]
- ))
-
- factory.addStep(ShellCommand(
- name = "defconfig",
- description = "Populating .config",
- command = ["make", "defconfig"],
- env = MakeEnv()
- ))
-
- # check arch
- factory.addStep(ShellCommand(
- name = "checkarch",
- description = "Checking architecture",
- command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
- logEnviron = False,
- want_stdout = False,
- want_stderr = False,
- haltOnFailure = True
- ))
-
- # find libc suffix
- factory.addStep(SetPropertyFromCommand(
- name = "libc",
- property = "libc",
- description = "Finding libc suffix",
- command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
-
- # install build key
- if usign_key is not None:
- factory.addStep(StringDownload(
- name = "dlkeybuildpub",
- s = UsignSec2Pub(usign_key, usign_comment),
- workerdest = "key-build.pub",
- mode = 0o600,
- ))
-
- factory.addStep(StringDownload(
- name = "dlkeybuild",
- s = "# fake private key",
- workerdest = "key-build",
- mode = 0o600,
- ))
-
- factory.addStep(StringDownload(
- name = "dlkeybuilducert",
- s = "# fake certificate",
- workerdest = "key-build.ucert",
- mode = 0o600,
- ))
-
- # prepare dl
- factory.addStep(ShellCommand(
- name = "dldir",
- description = "Preparing dl/",
- command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
- logEnviron = False,
- want_stdout = False
- ))
-
- # prepare tar
- factory.addStep(ShellCommand(
- name = "dltar",
- description = "Building and installing GNU tar",
- command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
- env = MakeEnv(tryccache=True),
- haltOnFailure = True
- ))
-
- # populate dl
- factory.addStep(ShellCommand(
- name = "dlrun",
- description = "Populating dl/",
- command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
- env = MakeEnv(),
- logEnviron = False,
- locks = [dlLock.access('exclusive')],
- ))
-
- factory.addStep(ShellCommand(
- name = "cleanbase",
- description = "Cleaning base-files",
- command=["make", "package/base-files/clean", "V=s"]
- ))
-
- # build
- factory.addStep(ShellCommand(
- name = "tools",
- description = "Building and installing tools",
- command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
- env = MakeEnv(tryccache=True),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "toolchain",
- description = "Building and installing toolchain",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "kmods",
- description = "Building kmods",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
- env = MakeEnv(),
- #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
- haltOnFailure = True
- ))
-
- # find kernel version
- factory.addStep(SetPropertyFromCommand(
- name = "kernelversion",
- property = "kernelversion",
- description = "Finding the effective Kernel version",
- command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
- env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
- ))
-
- factory.addStep(ShellCommand(
- name = "pkgclean",
- description = "Cleaning up package build",
- command=["make", "package/cleanup", "V=s"]
- ))
-
- factory.addStep(ShellCommand(
- name = "pkgbuild",
- description = "Building packages",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
- env = MakeEnv(),
- #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
- haltOnFailure = True
- ))
-
- # factory.addStep(IfBuiltinShellCommand(
- factory.addStep(ShellCommand(
- name = "pkginstall",
- description = "Installing packages",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "pkgindex",
- description = "Indexing packages",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- if enable_kmod_archive and embed_kmod_repository:
- # embed kmod repository. Must happen before 'images'
-
- # find rootfs staging directory
- factory.addStep(SetPropertyFromCommand(
- name = "stageroot",
- property = "stageroot",
- description = "Finding the rootfs staging directory",
- command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
- env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
- want_stderr = False
- ))
-
- factory.addStep(ShellCommand(
- name = "filesdir",
- description = "Creating file overlay directory",
- command=["mkdir", "-p", "files/etc/opkg"],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "kmodconfig",
- description = "Embedding kmod repository configuration",
- command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
- "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
- haltOnFailure = True
- ))
-
- #factory.addStep(IfBuiltinShellCommand(
- factory.addStep(ShellCommand(
- name = "images",
- description = "Building and installing images",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "buildinfo",
- description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
- command = "make -j1 buildinfo V=s || true",
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "json_overview_image_info",
- description = "Generate profiles.json in target folder",
- command = "make -j1 json_overview_image_info V=s || true",
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "checksums",
- description = "Calculating checksums",
- command=["make", "-j1", "checksum", "V=s"],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- if enable_kmod_archive:
- factory.addStep(ShellCommand(
- name = "kmoddir",
- description = "Creating kmod directory",
- command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "kmodprepare",
- description = "Preparing kmod archive",
- command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
- Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
- Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "kmodindex",
- description = "Indexing kmod archive",
- command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
- Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
- env = MakeEnv(),
- haltOnFailure = True
- ))
-
- # sign
- if ini.has_option("gpg", "key") or usign_key is not None:
- factory.addStep(MasterShellCommand(
- name = "signprepare",
- description = "Preparing temporary signing directory",
- command = ["mkdir", "-p", "%s/signing" %(work_dir)],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "signpack",
- description = "Packing files to sign",
- command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
- haltOnFailure = True
- ))
-
- factory.addStep(FileUpload(
- workersrc = "sign.tar.gz",
- masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
- haltOnFailure = True
- ))
-
- factory.addStep(MasterShellCommand(
- name = "signfiles",
- description = "Signing files",
- command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
- env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
- haltOnFailure = True
- ))
-
- factory.addStep(FileDownload(
- name = "dlsigntargz",
- mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
- workerdest = "sign.tar.gz",
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "signunpack",
- description = "Unpacking signed files",
- command = ["tar", "-xzf", "sign.tar.gz"],
- haltOnFailure = True
- ))
-
- # upload
- factory.addStep(ShellCommand(
- name = "dirprepare",
- description = "Preparing upload directory structure",
- command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "linkprepare",
- description = "Preparing repository symlink",
- command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
- doStepIf = IsNoMasterBuild,
- haltOnFailure = True
- ))
-
- if enable_kmod_archive:
- factory.addStep(ShellCommand(
- name = "kmoddirprepare",
- description = "Preparing kmod archive upload directory",
- command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "dirupload",
- description = "Uploading directory structure",
- command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = True,
- logEnviron = False,
- ))
-
- # download remote sha256sums to 'target-sha256sums'
- factory.addStep(ShellCommand(
- name = "target-sha256sums",
- description = "Fetching remote sha256sums for target",
- command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- logEnviron = False,
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = False,
- ))
-
- # build list of files to upload
- factory.addStep(FileDownload(
- name = "dlsha2rsyncpl",
- mastersrc = scripts_dir + '/sha2rsync.pl',
- workerdest = "../sha2rsync.pl",
- mode = 0o755,
- ))
-
- factory.addStep(ShellCommand(
- name = "buildlist",
- description = "Building list of files to upload",
- command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
- haltOnFailure = True,
- ))
-
- factory.addStep(FileDownload(
- name = "dlrsync.sh",
- mastersrc = scripts_dir + '/rsync.sh',
- workerdest = "../rsync.sh",
- mode = 0o755
- ))
-
- # upload new files and update existing ones
- factory.addStep(ShellCommand(
- name = "targetupload",
- description = "Uploading target files",
- command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
- ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
- Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = True,
- logEnviron = False,
- ))
-
- # delete files which don't exist locally
- factory.addStep(ShellCommand(
- name = "targetprune",
- description = "Pruning target files",
- command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
- ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
- Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = True,
- logEnviron = False,
- ))
-
- if enable_kmod_archive:
- factory.addStep(ShellCommand(
- name = "kmodupload",
- description = "Uploading kmod archive",
- command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
- ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
- Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = True,
- logEnviron = False,
- ))
-
- if rsync_src_url is not None:
- factory.addStep(ShellCommand(
- name = "sourcelist",
- description = "Finding source archives to upload",
- command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
- haltOnFailure = True
- ))
-
- factory.addStep(ShellCommand(
- name = "sourceupload",
- description = "Uploading source archives",
- command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
- [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
- env={'RSYNC_PASSWORD': rsync_src_key},
- haltOnFailure = True,
- logEnviron = False,
- ))
-
- if False:
- factory.addStep(ShellCommand(
- name = "packageupload",
- description = "Uploading package files",
- command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = True,
- logEnviron = False,
- ))
-
- # logs
- if False:
- factory.addStep(ShellCommand(
- name = "upload",
- description = "Uploading logs",
- command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
- env={'RSYNC_PASSWORD': rsync_bin_key},
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = True,
- alwaysRun = True,
- logEnviron = False,
- ))
-
- factory.addStep(ShellCommand(
- name = "df",
- description = "Reporting disk usage",
- command=["df", "-h", "."],
- env={'LC_ALL': 'C'},
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = False,
- alwaysRun = True
- ))
-
- factory.addStep(ShellCommand(
- name = "du",
- description = "Reporting estimated file space usage",
- command=["du", "-sh", "."],
- env={'LC_ALL': 'C'},
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = False,
- alwaysRun = True
- ))
-
- factory.addStep(ShellCommand(
- name = "ccachestat",
- description = "Reporting ccache stats",
- command=["ccache", "-s"],
- env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
- want_stderr = False,
- haltOnFailure = False,
- flunkOnFailure = False,
- warnOnFailure = False,
- alwaysRun = True,
- ))
-
- c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
-
- c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
- force_factory.addStep(steps.Trigger(
- name = "trigger_%s" % target,
- description = "Triggering %s build" % target,
- schedulerNames = [ "trigger_%s" % target ],
- set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
- doStepIf = IsTargetSelected(target)
- ))
+
+def canStartBuild(builder, wfb, request):
+ """filter out non tag requests for tag_only workers."""
+ wtagonly = wfb.worker.properties.getProperty("tag_only")
+ tag = request.properties.getProperty("tag")
+
+ if wtagonly and not tag:
+ return False
+
+ return True
+
+
+c["builders"] = []
+
+workerNames = []
+
+for worker in c["workers"]:
+ workerNames.append(worker.workername)
+
+# add a single LocalWorker to handle the forcebuild builder
+c["workers"].append(LocalWorker("__local_force_build", max_builds=1))
+
+force_factory = BuildFactory()
+force_factory.addStep(
+ steps.Trigger(
+ name="trigger_build",
+ schedulerNames=["trigger"],
+ sourceStamps=[
+ {
+ "codebase": "",
+ "branch": Property("force_branch"),
+ "revision": Property("force_revision"),
+ "repository": repo_url,
+ "project": "",
+ }
+ ],
+ set_properties={
+ "reason": Property("reason"),
+ "tag": Property("tag"),
+ "target": Property("target"),
+ },
+ )
+)
+
+c["builders"].append(
+ BuilderConfig(
+ name="00_force_build", workername="__local_force_build", factory=force_factory
+ )
+)
+
+
+# NB the phase1 build factory assumes workers are single-build only
+def prepareFactory(target):
+ ts = target.split("/")
+
+ factory = BuildFactory()
+
+ # setup shared work directory if required
+ factory.addStep(
+ ShellCommand(
+ name="sharedwd",
+ descriptionDone="Shared work directory set up",
+ command='test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
+ workdir=".",
+ haltOnFailure=True,
+ )
+ )
+
+ # find number of cores
+ factory.addStep(
+ SetPropertyFromCommand(
+ name="nproc",
+ property="nproc",
+ description="Finding number of CPUs",
+ command=["nproc"],
+ )
+ )
+
+ # find gcc and g++ compilers
+ factory.addStep(
+ FileDownload(
+ name="dlfindbinpl",
+ mastersrc=scripts_dir + "/findbin.pl",
+ workerdest="../findbin.pl",
+ mode=0o755,
+ )
+ )
+
+ factory.addStep(
+ SetPropertyFromCommand(
+ name="gcc",
+ property="cc_command",
+ description="Finding gcc command",
+ command=["../findbin.pl", "gcc", "", ""],
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ SetPropertyFromCommand(
+ name="g++",
+ property="cxx_command",
+ description="Finding g++ command",
+ command=["../findbin.pl", "g++", "", ""],
+ haltOnFailure=True,
+ )
+ )
+
+ # see if ccache is available
+ factory.addStep(
+ SetPropertyFromCommand(
+ name="ccache",
+ property="ccache_command",
+ description="Testing for ccache command",
+ command=["which", "ccache"],
+ haltOnFailure=False,
+ flunkOnFailure=False,
+ warnOnFailure=False,
+ hideStepIf=lambda r, s: r == results.FAILURE,
+ )
+ )
+
+ # check out the source
+ # Git() runs:
+ # if repo doesn't exist: 'git clone repourl'
+ # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
+ # git cat-file -e <commit>
+ # git checkout -f <commit>
+ # git checkout -B <branch>
+ # git rev-parse HEAD
+ factory.addStep(
+ Git(
+ name="git",
+ repourl=repo_url,
+ mode="full",
+ method="fresh",
+ locks=NetLockDl,
+ haltOnFailure=True,
+ )
+ )
+
+ # workaround for https://github.com/openwrt/buildbot/issues/5
+ factory.addStep(
+ Git(
+ name="git me once more please",
+ repourl=repo_url,
+ mode="full",
+ method="fresh",
+ locks=NetLockDl,
+ haltOnFailure=True,
+ )
+ )
+
+ # update remote refs
+ factory.addStep(
+ ShellCommand(
+ name="fetchrefs",
+ description="Fetching Git remote refs",
+ descriptionDone="Git remote refs fetched",
+ command=[
+ "git",
+ "fetch",
+ "origin",
+ Interpolate(
+ "+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s"
+ ),
+ ],
+ haltOnFailure=True,
+ )
+ )
+
+ # getver.sh requires local branches to track upstream otherwise version computation fails.
+ # Git() does not set tracking branches when cloning or switching, so work around this here
+ factory.addStep(
+ ShellCommand(
+ name="trackupstream",
+ description="Setting upstream branch",
+ descriptionDone="getver.sh is happy now",
+ command=["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")],
+ haltOnFailure=True,
+ )
+ )
+
+ # Verify that Git HEAD points to a tag or branch
+ # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
+ factory.addStep(
+ ShellCommand(
+ name="gitverify",
+ description="Ensuring that Git HEAD is pointing to a branch or tag",
+ descriptionDone="Git HEAD is sane",
+ command='git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ StringDownload(
+ name="ccachecc",
+ s='#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
+ workerdest="../ccache_cc.sh",
+ mode=0o755,
+ )
+ )
+
+ factory.addStep(
+ StringDownload(
+ name="ccachecxx",
+ s='#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
+ workerdest="../ccache_cxx.sh",
+ mode=0o755,
+ )
+ )
+
+ # feed
+ factory.addStep(
+ ShellCommand(
+ name="updatefeeds",
+ description="Updating feeds",
+ command=["./scripts/feeds", "update"],
+ env=MakeEnv(tryccache=True),
+ haltOnFailure=True,
+ locks=NetLockDl,
+ )
+ )
+
+ # feed
+ factory.addStep(
+ ShellCommand(
+ name="installfeeds",
+ description="Installing feeds",
+ command=["./scripts/feeds", "install", "-a"],
+ env=MakeEnv(tryccache=True),
+ haltOnFailure=True,
+ )
+ )
+
+ # seed config
+ factory.addStep(
+ StringDownload(
+ name="dlconfigseed",
+ s=Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
+ workerdest=".config",
+ mode=0o644,
+ )
+ )
+
+ # configure
+ factory.addStep(
+ ShellCommand(
+ name="newconfig",
+ descriptionDone=".config seeded",
+ command=Interpolate(
+ "printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config",
+ target=ts[0],
+ subtarget=ts[1],
+ usign=GetUsignKey,
+ ),
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="defconfig",
+ description="Populating .config",
+ command=["make", "defconfig"],
+ env=MakeEnv(),
+ )
+ )
+
+ # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
+ factory.addStep(
+ ShellCommand(
+ name="checkarch",
+ description="Checking architecture",
+ descriptionDone="Architecture validated",
+ command='grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config'
+ % (ts[0], ts[1]),
+ logEnviron=False,
+ want_stdout=False,
+ want_stderr=False,
+ haltOnFailure=True,
+ flunkOnFailure=False, # this is not a build FAILURE - TODO mark build as SKIPPED
+ )
+ )
+
+ # find libc suffix
+ factory.addStep(
+ SetPropertyFromCommand(
+ name="libc",
+ property="libc",
+ description="Finding libc suffix",
+ command=[
+ "sed",
+ "-ne",
+ '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }',
+ ".config",
+ ],
+ )
+ )
+
+ # install build key
+ factory.addStep(
+ StringDownload(
+ name="dlkeybuildpub",
+ s=Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
+ workerdest="key-build.pub",
+ mode=0o600,
+ doStepIf=IsUsignEnabled,
+ )
+ )
+
+ factory.addStep(
+ StringDownload(
+ name="dlkeybuild",
+ s="# fake private key",
+ workerdest="key-build",
+ mode=0o600,
+ doStepIf=IsUsignEnabled,
+ )
+ )
+
+ factory.addStep(
+ StringDownload(
+ name="dlkeybuilducert",
+ s="# fake certificate",
+ workerdest="key-build.ucert",
+ mode=0o600,
+ doStepIf=IsUsignEnabled,
+ )
+ )
+
+ # prepare dl
+ factory.addStep(
+ ShellCommand(
+ name="dldir",
+ description="Preparing dl/",
+ descriptionDone="dl/ prepared",
+ command='mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
+ workdir=Property("builddir"),
+ logEnviron=False,
+ want_stdout=False,
+ )
+ )
+
+ # cleanup dl
+ factory.addStep(
+ ShellCommand(
+ name="dlprune",
+ description="Pruning dl/",
+ descriptionDone="dl/ pruned",
+ command="find dl/ -mindepth 1 -atime +15 -delete -print",
+ logEnviron=False,
+ )
+ )
+
+ # prepare tar
+ factory.addStep(
+ ShellCommand(
+ name="dltar",
+ description="Building and installing GNU tar",
+ descriptionDone="GNU tar built and installed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "tools/tar/compile",
+ "V=s",
+ ],
+ env=MakeEnv(tryccache=True),
+ haltOnFailure=True,
+ )
+ )
+
+ # populate dl
+ factory.addStep(
+ ShellCommand(
+ name="dlrun",
+ description="Populating dl/",
+ descriptionDone="dl/ populated",
+ command=["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
+ env=MakeEnv(),
+ logEnviron=False,
+ locks=NetLockDl,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="cleanbase",
+ description="Cleaning base-files",
+ command=["make", "package/base-files/clean", "V=s"],
+ )
+ )
+
+ # build
+ factory.addStep(
+ ShellCommand(
+ name="tools",
+ description="Building and installing tools",
+ descriptionDone="Tools built and installed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "tools/install",
+ "V=s",
+ ],
+ env=MakeEnv(tryccache=True),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="toolchain",
+ description="Building and installing toolchain",
+ descriptionDone="Toolchain built and installed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "toolchain/install",
+ "V=s",
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="kmods",
+ description="Building kmods",
+ descriptionDone="Kmods built",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "target/compile",
+ "V=s",
+ "IGNORE_ERRORS=n m",
+ "BUILD_LOG=1",
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ # find kernel version
+ factory.addStep(
+ SetPropertyFromCommand(
+ name="kernelversion",
+ property="kernelversion",
+ description="Finding the effective Kernel version",
+ command="make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
+ env={"TOPDIR": Interpolate("%(prop:builddir)s/build")},
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="pkgclean",
+ description="Cleaning up package build",
+ descriptionDone="Package build cleaned up",
+ command=["make", "package/cleanup", "V=s"],
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="pkgbuild",
+ description="Building packages",
+ descriptionDone="Packages built",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "package/compile",
+ "V=s",
+ "IGNORE_ERRORS=n m",
+ "BUILD_LOG=1",
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="pkginstall",
+ description="Installing packages",
+ descriptionDone="Packages installed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "package/install",
+ "V=s",
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="pkgindex",
+ description="Indexing packages",
+ descriptionDone="Packages indexed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "package/index",
+ "V=s",
+ "CONFIG_SIGNED_PACKAGES=",
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="images",
+ description="Building and installing images",
+ descriptionDone="Images built and installed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "target/install",
+ "V=s",
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="buildinfo",
+ description="Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
+ command="make -j1 buildinfo V=s || true",
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="json_overview_image_info",
+ description="Generating profiles.json in target folder",
+ command="make -j1 json_overview_image_info V=s || true",
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="checksums",
+ description="Calculating checksums",
+ descriptionDone="Checksums calculated",
+ command=["make", "-j1", "checksum", "V=s"],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="kmoddir",
+ descriptionDone="Kmod directory created",
+ command=[
+ "mkdir",
+ "-p",
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ ],
+ haltOnFailure=True,
+ doStepIf=IsKmodArchiveEnabled,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="kmodprepare",
+ description="Preparing kmod archive",
+ descriptionDone="Kmod archive prepared",
+ command=[
+ "rsync",
+ "--include=/kmod-*.ipk",
+ "--exclude=*",
+ "-va",
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ ],
+ haltOnFailure=True,
+ doStepIf=IsKmodArchiveEnabled,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="kmodindex",
+ description="Indexing kmod archive",
+ descriptionDone="Kmod archive indexed",
+ command=[
+ "make",
+ Interpolate("-j%(prop:nproc:-1)s"),
+ "package/index",
+ "V=s",
+ "CONFIG_SIGNED_PACKAGES=",
+ Interpolate(
+ "PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ ],
+ env=MakeEnv(),
+ haltOnFailure=True,
+ doStepIf=IsKmodArchiveEnabled,
+ )
+ )
+
+ # sign
+ factory.addStep(
+ MasterShellCommand(
+ name="signprepare",
+ descriptionDone="Temporary signing directory prepared",
+ command=["mkdir", "-p", "%s/signing" % (work_dir)],
+ haltOnFailure=True,
+ doStepIf=IsSignEnabled,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="signpack",
+ description="Packing files to sign",
+ descriptionDone="Files to sign packed",
+ command=Interpolate(
+ "find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ haltOnFailure=True,
+ doStepIf=IsSignEnabled,
+ )
+ )
+
+ factory.addStep(
+ FileUpload(
+ workersrc="sign.tar.gz",
+ masterdest="%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]),
+ haltOnFailure=True,
+ doStepIf=IsSignEnabled,
+ )
+ )
+
+ factory.addStep(
+ MasterShellCommand(
+ name="signfiles",
+ description="Signing files",
+ descriptionDone="Files signed",
+ command=[
+ "%s/signall.sh" % (scripts_dir),
+ "%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]),
+ Interpolate("%(prop:branch)s"),
+ ],
+ env={"CONFIG_INI": os.getenv("BUILDMASTER_CONFIG", "./config.ini")},
+ haltOnFailure=True,
+ doStepIf=IsSignEnabled,
+ )
+ )
+
+ factory.addStep(
+ FileDownload(
+ name="dlsigntargz",
+ mastersrc="%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]),
+ workerdest="sign.tar.gz",
+ haltOnFailure=True,
+ doStepIf=IsSignEnabled,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="signunpack",
+ description="Unpacking signed files",
+ descriptionDone="Signed files unpacked",
+ command=["tar", "-xzf", "sign.tar.gz"],
+ haltOnFailure=True,
+ doStepIf=IsSignEnabled,
+ )
+ )
+
+ # upload
+ factory.addStep(
+ ShellCommand(
+ name="dirprepare",
+ descriptionDone="Upload directory structure prepared",
+ command=[
+ "mkdir",
+ "-p",
+ Interpolate(
+ "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s",
+ target=ts[0],
+ subtarget=ts[1],
+ prefix=GetVersionPrefix,
+ ),
+ ],
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="linkprepare",
+ descriptionDone="Repository symlink prepared",
+ command=[
+ "ln",
+ "-s",
+ "-f",
+ Interpolate(
+ "../packages-%(kw:basever)s",
+ basever=util.Transform(GetBaseVersion, Property("branch")),
+ ),
+ Interpolate(
+ "tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix
+ ),
+ ],
+ doStepIf=IsNoMasterBuild,
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="kmoddirprepare",
+ descriptionDone="Kmod archive upload directory prepared",
+ command=[
+ "mkdir",
+ "-p",
+ Interpolate(
+ "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s",
+ target=ts[0],
+ subtarget=ts[1],
+ prefix=GetVersionPrefix,
+ ),
+ ],
+ haltOnFailure=True,
+ doStepIf=IsKmodArchiveEnabled,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="dirupload",
+ description="Uploading directory structure",
+ descriptionDone="Directory structure uploaded",
+ command=["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")]
+ + rsync_defopts
+ + [
+ "tmp/upload/",
+ Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url")),
+ ],
+ env={
+ "RSYNC_PASSWORD": Interpolate(
+ "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
+ )
+ },
+ haltOnFailure=True,
+ logEnviron=False,
+ locks=NetLockUl,
+ doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
+ )
+ )
+
+ # download remote sha256sums to 'target-sha256sums'
+ factory.addStep(
+ ShellCommand(
+ name="target-sha256sums",
+ description="Fetching remote sha256sums for target",
+ descriptionDone="Remote sha256sums for target fetched",
+ command=["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")]
+ + rsync_defopts
+ + [
+ Interpolate(
+ "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums",
+ url=GetRsyncParams.withArgs("bin", "url"),
+ target=ts[0],
+ subtarget=ts[1],
+ prefix=GetVersionPrefix,
+ ),
+ "target-sha256sums",
+ ],
+ env={
+ "RSYNC_PASSWORD": Interpolate(
+ "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
+ )
+ },
+ logEnviron=False,
+ haltOnFailure=False,
+ flunkOnFailure=False,
+ warnOnFailure=False,
+ doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
+ )
+ )
+
+ # build list of files to upload
+ factory.addStep(
+ FileDownload(
+ name="dlsha2rsyncpl",
+ mastersrc=scripts_dir + "/sha2rsync.pl",
+ workerdest="../sha2rsync.pl",
+ mode=0o755,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="buildlist",
+ description="Building list of files to upload",
+ descriptionDone="List of files to upload built",
+ command=[
+ "../sha2rsync.pl",
+ "target-sha256sums",
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ "rsynclist",
+ ],
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ FileDownload(
+ name="dlrsync.sh",
+ mastersrc=scripts_dir + "/rsync.sh",
+ workerdest="../rsync.sh",
+ mode=0o755,
+ )
+ )
+
+ # upload new files and update existing ones
+ factory.addStep(
+ ShellCommand(
+ name="targetupload",
+ description="Uploading target files",
+ descriptionDone="Target files uploaded",
+ command=[
+ "../rsync.sh",
+ "--exclude=/kmods/",
+ "--files-from=rsynclist",
+ "--delay-updates",
+ "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]),
+ ]
+ + rsync_defopts
+ + [
+ Interpolate("-a%(prop:rsync_ipv4:+4)s"),
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ Interpolate(
+ "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
+ url=GetRsyncParams.withArgs("bin", "url"),
+ target=ts[0],
+ subtarget=ts[1],
+ prefix=GetVersionPrefix,
+ ),
+ ],
+ env={
+ "RSYNC_PASSWORD": Interpolate(
+ "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
+ )
+ },
+ haltOnFailure=True,
+ logEnviron=False,
+ doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
+ )
+ )
+
+ # delete files which don't exist locally
+ factory.addStep(
+ ShellCommand(
+ name="targetprune",
+ description="Pruning target files",
+ descriptionDone="Target files pruned",
+ command=[
+ "../rsync.sh",
+ "--exclude=/kmods/",
+ "--delete",
+ "--existing",
+ "--ignore-existing",
+ "--delay-updates",
+ "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]),
+ ]
+ + rsync_defopts
+ + [
+ Interpolate("-a%(prop:rsync_ipv4:+4)s"),
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ Interpolate(
+ "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
+ url=GetRsyncParams.withArgs("bin", "url"),
+ target=ts[0],
+ subtarget=ts[1],
+ prefix=GetVersionPrefix,
+ ),
+ ],
+ env={
+ "RSYNC_PASSWORD": Interpolate(
+ "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
+ )
+ },
+ haltOnFailure=True,
+ logEnviron=False,
+ locks=NetLockUl,
+ doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="kmodupload",
+ description="Uploading kmod archive",
+ descriptionDone="Kmod archive uploaded",
+ command=[
+ "../rsync.sh",
+ "--delete",
+ "--delay-updates",
+ "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]),
+ ]
+ + rsync_defopts
+ + [
+ Interpolate("-a%(prop:rsync_ipv4:+4)s"),
+ Interpolate(
+ "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ Interpolate(
+ "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/",
+ url=GetRsyncParams.withArgs("bin", "url"),
+ target=ts[0],
+ subtarget=ts[1],
+ prefix=GetVersionPrefix,
+ ),
+ ],
+ env={
+ "RSYNC_PASSWORD": Interpolate(
+ "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
+ )
+ },
+ haltOnFailure=True,
+ logEnviron=False,
+ locks=NetLockUl,
+ doStepIf=IsKmodArchiveAndRsyncEnabled,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="sourcelist",
+ description="Finding source archives to upload",
+ descriptionDone="Source archives to upload found",
+ command="find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
+ haltOnFailure=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="sourceupload",
+ description="Uploading source archives",
+ descriptionDone="Source archives uploaded",
+ command=[
+ "../rsync.sh",
+ "--files-from=sourcelist",
+ "--size-only",
+ "--delay-updates",
+ ]
+ + rsync_defopts
+ + [
+ Interpolate(
+ "--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s",
+ target=ts[0],
+ subtarget=ts[1],
+ ),
+ Interpolate("-a%(prop:rsync_ipv4:+4)s"),
+ "dl/",
+ Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url")),
+ ],
+ env={
+ "RSYNC_PASSWORD": Interpolate(
+ "%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")
+ )
+ },
+ haltOnFailure=True,
+ logEnviron=False,
+ locks=NetLockUl,
+ doStepIf=util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="df",
+ description="Reporting disk usage",
+ command=["df", "-h", "."],
+ env={"LC_ALL": "C"},
+ logEnviron=False,
+ haltOnFailure=False,
+ flunkOnFailure=False,
+ warnOnFailure=False,
+ alwaysRun=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="du",
+ description="Reporting estimated file space usage",
+ command=["du", "-sh", "."],
+ env={"LC_ALL": "C"},
+ logEnviron=False,
+ haltOnFailure=False,
+ flunkOnFailure=False,
+ warnOnFailure=False,
+ alwaysRun=True,
+ )
+ )
+
+ factory.addStep(
+ ShellCommand(
+ name="ccachestat",
+ description="Reporting ccache stats",
+ command=["ccache", "-s"],
+ logEnviron=False,
+ want_stderr=False,
+ haltOnFailure=False,
+ flunkOnFailure=False,
+ warnOnFailure=False,
+ doStepIf=util.Transform(bool, Property("ccache_command")),
+ )
+ )
+
+ return factory
+
+
+for brname in branchNames:
+ for target in targets[brname]:
+ bldrname = brname + "_" + target
+ c["builders"].append(
+ BuilderConfig(
+ name=bldrname,
+ workernames=workerNames,
+ factory=prepareFactory(target),
+ tags=[
+ brname,
+ ],
+ nextBuild=GetNextBuild,
+ canStartBuild=canStartBuild,
+ )
+ )
####### STATUS TARGETS
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
# including web pages, email senders, and IRC bots.
-if ini.has_option("phase1", "status_bind"):
- c['www'] = {
- 'port': ini.get("phase1", "status_bind"),
- 'plugins': {
- 'waterfall_view': True,
- 'console_view': True,
- 'grid_view': True
- }
- }
-
- if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
- c['www']['auth'] = util.UserPasswordAuth([
- (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
- ])
- c['www']['authz'] = util.Authz(
- allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
- roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
- )
-
-c['services'] = []
-if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
- irc_host = ini.get("irc", "host")
- irc_port = 6667
- irc_chan = ini.get("irc", "channel")
- irc_nick = ini.get("irc", "nickname")
- irc_pass = None
-
- if ini.has_option("irc", "port"):
- irc_port = ini.getint("irc", "port")
-
- if ini.has_option("irc", "password"):
- irc_pass = ini.get("irc", "password")
-
- irc = reporters.IRC(irc_host, irc_nick,
- port = irc_port,
- password = irc_pass,
- channels = [ irc_chan ],
- notify_events = [ 'exception', 'problem', 'recovery' ]
- )
-
- c['services'].append(irc)
-
-c['revlink'] = util.RevlinkMatch([
- r'https://git.openwrt.org/openwrt/(.*).git'
- ],
- r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
+if "status_bind" in inip1:
+ c["www"] = {
+ "port": inip1.get("status_bind"),
+ "plugins": {"waterfall_view": True, "console_view": True, "grid_view": True},
+ }
+
+ if "status_user" in inip1 and "status_password" in inip1:
+ c["www"]["auth"] = util.UserPasswordAuth(
+ [(inip1.get("status_user"), inip1.get("status_password"))]
+ )
+ c["www"]["authz"] = util.Authz(
+ allowRules=[util.AnyControlEndpointMatcher(role="admins")],
+ roleMatchers=[
+ util.RolesFromUsername(
+ roles=["admins"], usernames=[inip1.get("status_user")]
+ )
+ ],
+ )
+
+c["services"] = []
+if ini.has_section("irc"):
+ iniirc = ini["irc"]
+ irc_host = iniirc.get("host", None)
+ irc_port = iniirc.getint("port", 6667)
+ irc_chan = iniirc.get("channel", None)
+ irc_nick = iniirc.get("nickname", None)
+ irc_pass = iniirc.get("password", None)
+
+ if irc_host and irc_nick and irc_chan:
+ irc = reporters.IRC(
+ irc_host,
+ irc_nick,
+ port=irc_port,
+ password=irc_pass,
+ channels=[irc_chan],
+ notify_events=["exception", "problem", "recovery"],
+ )
+
+ c["services"].append(irc)
+
+c["revlink"] = util.RevlinkMatch(
+ [r"https://git.openwrt.org/openwrt/(.*).git"],
+ r"https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s",
+)
####### DB URL
-c['db'] = {
- # This specifies what database buildbot uses to store its state. You can leave
- # this at its default for all but the largest installations.
- 'db_url' : "sqlite:///state.sqlite",
+c["db"] = {
+ # This specifies what database buildbot uses to store its state. You can leave
+ # this at its default for all but the largest installations.
+ "db_url": "sqlite:///state.sqlite",
}
-c['buildbotNetUsageData'] = None
+c["buildbotNetUsageData"] = None