2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
43 if not os.path.exists("twistd.pid"):
44 with open("twistd.pid", "w") as pidfile:
45 pidfile.write("{}".format(os.getpid()))
47 # This is a sample buildmaster config file. It must be installed as
48 # 'master.cfg' in your buildmaster's base directory.
50 ini = configparser.ConfigParser()
51 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53 if "general" not in ini or "phase1" not in ini:
54 raise ValueError("Fix your configuration")
59 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
60 scripts_dir = os.path.abspath("../scripts")
62 repo_url = ini['repo'].get("url")
64 rsync_defopts = ["-v", "-4", "--timeout=120"]
66 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
67 # rsync_bin_defopts += ["--contimeout=20"]
71 def ini_parse_branch(section):
73 name = section.get("name")
76 raise ValueError("missing 'name' in " + repr(section))
78 raise ValueError("duplicate branch name in " + repr(section))
81 b["bin_url"] = section.get("binary_url")
82 b["bin_key"] = section.get("binary_password")
84 b["src_url"] = section.get("source_url")
85 b["src_key"] = section.get("source_password")
87 b["gpg_key"] = section.get("gpg_key")
89 b["usign_key"] = section.get("usign_key")
90 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
91 b["usign_comment"] = section.get("usign_comment", usign_comment)
93 b["config_seed"] = section.get("config_seed")
95 b["kmod_archive"] = section.getboolean("kmod_archive", False)
98 log.msg("Configured branch: {}".format(name))
100 # PB port can be either a numeric port or a connection string
101 pb_port = inip1.get("port") or 9989
103 # This is the dictionary that the buildmaster pays attention to. We also use
104 # a shorter alias to save typing.
105 c = BuildmasterConfig = {}
107 ####### PROJECT IDENTITY
109 # the 'title' string will appear at the top of this buildbot
110 # installation's html.WebStatus home page (linked to the
111 # 'titleURL') and is embedded in the title of the waterfall HTML page.
113 c['title'] = ini['general'].get("title")
114 c['titleURL'] = ini['general'].get("title_url")
116 # the 'buildbotURL' string should point to the location where the buildbot's
117 # internal web server (usually the html.WebStatus page) is visible. This
118 # typically uses the port number set in the Waterfall 'status' entry, but
119 # with an externally-visible host name which the buildbot cannot figure out
122 c['buildbotURL'] = inip1.get("buildbot_url")
126 # The 'workers' list defines the set of recognized buildworkers. Each element is
127 # a Worker object, specifying a unique worker name and password. The same
128 # worker name and password must be configured on the worker.
133 for section in ini.sections():
134 if section.startswith("branch "):
135 ini_parse_branch(ini[section])
137 if section.startswith("worker "):
138 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
139 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
140 sl_props = { 'dl_lock':None, 'ul_lock':None }
141 name = ini.get(section, "name")
142 password = ini.get(section, "password")
143 if ini.has_option(section, "dl_lock"):
144 lockname = ini.get(section, "dl_lock")
145 sl_props['dl_lock'] = lockname
146 if lockname not in NetLocks:
147 NetLocks[lockname] = locks.MasterLock(lockname)
148 if ini.has_option(section, "ul_lock"):
149 lockname = ini.get(section, "ul_lock")
150 sl_props['ul_lock'] = lockname
151 if lockname not in NetLocks:
152 NetLocks[lockname] = locks.MasterLock(lockname)
153 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
155 c['protocols'] = {'pb': {'port': pb_port}}
158 c['collapseRequests'] = True
160 # Reduce amount of backlog data
161 c['configurators'] = [util.JanitorConfigurator(
162 logHorizon=timedelta(days=3),
166 @defer.inlineCallbacks
167 def getNewestCompleteTime(bldr):
168 """Returns the complete_at of the latest completed and not SKIPPED
169 build request for this builder, or None if there are no such build
170 requests. We need to filter out SKIPPED requests because we're
171 using collapseRequests=True which is unfortunately marking all
172 previous requests as complete when new buildset is created.
174 @returns: datetime instance or None, via Deferred
177 bldrid = yield bldr.getBuilderId()
178 completed = yield bldr.master.data.get(
179 ('builders', bldrid, 'buildrequests'),
181 resultspec.Filter('complete', 'eq', [True]),
182 resultspec.Filter('results', 'ne', [results.SKIPPED]),
184 order=['-complete_at'], limit=1)
188 complete_at = completed[0]['complete_at']
190 last_build = yield bldr.master.data.get(
193 resultspec.Filter('builderid', 'eq', [bldrid]),
195 order=['-started_at'], limit=1)
197 if last_build and last_build[0]:
198 last_complete_at = last_build[0]['complete_at']
199 if last_complete_at and (last_complete_at > complete_at):
200 return last_complete_at
204 @defer.inlineCallbacks
205 def prioritizeBuilders(master, builders):
206 """Returns sorted list of builders by their last timestamp of completed and
209 @returns: list of sorted builders
212 def is_building(bldr):
213 return bool(bldr.building) or bool(bldr.old_building)
216 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
217 d.addCallback(lambda complete_at: (complete_at, bldr))
221 (complete_at, bldr) = item
225 complete_at = date.replace(tzinfo=tzutc())
227 if is_building(bldr):
229 complete_at = date.replace(tzinfo=tzutc())
231 return (complete_at, bldr.name)
233 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
234 results.sort(key=bldr_sort)
237 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
239 return [r[1] for r in results]
241 c['prioritizeBuilders'] = prioritizeBuilders
243 ####### CHANGESOURCES
245 branchNames = [branches[b]["name"] for b in branches]
250 def populateTargets():
251 log.msg("Populating targets, this will take time")
252 sourcegit = work_dir + '/source.git'
253 for branch in branchNames:
254 if os.path.isdir(sourcegit):
255 subprocess.call(["rm", "-rf", sourcegit])
257 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
259 os.makedirs(sourcegit + '/tmp', exist_ok=True)
260 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
261 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
264 line = findtargets.stdout.readline()
267 ta = line.decode().strip().split(' ')
270 subprocess.call(["rm", "-rf", sourcegit])
274 # the 'change_source' setting tells the buildmaster how it should find out
275 # about source code changes. Here we point to the buildbot clone of pyflakes.
277 c['change_source'] = []
278 c['change_source'].append(GitPoller(
280 workdir=work_dir+'/work.git', branches=branchNames,
281 pollAtLaunch=True, pollinterval=300))
285 # Configure the Schedulers, which decide how to react to incoming changes. In this
286 # case, just kick off a 'basebuild' build
288 class TagChoiceParameter(BaseParameter):
289 spec_attributes = ["strict", "choices"]
293 def __init__(self, name, label=None, **kw):
294 super().__init__(name, label, **kw)
295 self._choice_list = []
300 basever = re.search(r'-([0-9]+\.[0-9]+)$', "master") # XXX FIXME
303 findtags = subprocess.Popen(
304 ['git', 'ls-remote', '--tags', repo_url],
305 stdout = subprocess.PIPE)
308 line = findtags.stdout.readline()
313 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
315 if tagver and tagver[1].find(basever[1]) == 0:
316 taglist.append(tagver[1])
318 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
319 taglist.insert(0, '')
321 self._choice_list = taglist
323 return self._choice_list
325 def parse_from_arg(self, s):
326 if self.strict and s not in self._choice_list:
327 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
331 c['schedulers'].append(AnyBranchScheduler(
333 change_filter = util.ChangeFilter(branch=branchNames),
334 treeStableTimer = 15*60,
335 builderNames = list(targets)))
337 c['schedulers'].append(ForceScheduler(
339 buttonName = "Force builds",
340 label = "Force build details",
341 builderNames = [ "00_force_build" ],
344 util.CodebaseParameter(
346 label = "Repository",
347 branch = util.FixedParameter(name = "branch", default = ""),
348 revision = util.FixedParameter(name = "revision", default = ""),
349 repository = util.FixedParameter(name = "repository", default = ""),
350 project = util.FixedParameter(name = "project", default = "")
354 reason = util.StringParameter(
357 default = "Trigger build",
363 util.NestedParameter(
365 label="Build Options",
368 util.ChoiceStringParameter(
370 label = "Build target",
372 choices = set( "all" ) | targets
386 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
387 # what steps, and which workers can execute them. Note that any particular build will
388 # only take place on one worker.
390 def IsTaggingRequested(step):
391 tag = step.getProperty("tag")
392 return tag and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", tag)
394 def IsNoMasterBuild(step):
395 return step.getProperty("branch") != "master"
397 def IsUsignEnabled(step):
398 branch = step.getProperty("branch")
399 return branch and branches[branch].get("usign_key")
401 def IsSignEnabled(step):
402 branch = step.getProperty("branch")
403 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
405 def IsKmodArchiveEnabled(step):
406 branch = step.getProperty("branch")
407 return branch and branches[branch].get("kmod_archive")
409 def GetBaseVersion(branch):
410 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
411 return branch.split('-')[1]
416 def GetVersionPrefix(props):
417 branch = props.getProperty("branch")
418 basever = GetBaseVersion(branch)
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
427 def GetConfigSeed(props):
428 branch = props.getProperty("branch")
429 return branch and branches[branch].get("config_seed") or ""
432 def GetRsyncParams(props, srcorbin, urlorkey):
433 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
434 branch = props.getProperty("branch")
435 opt = srcorbin + "_" + urlorkey
436 return branch and branches[branch].get(opt)
439 def GetUsignKey(props):
440 branch = props.getProperty("branch")
441 return branch and branches[branch].get("usign_key")
443 def GetNextBuild(builder, requests):
446 # order tagged build first
447 if r.properties.hasProperty("tag"):
449 # then order by branch order
450 pbranch = r.properties.getProperty("branch")
451 for name in branchNames:
456 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
459 def MakeEnv(overrides=None, tryccache=False):
461 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
462 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
465 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
466 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
467 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
469 env['CC'] = env['CCC']
470 env['CXX'] = env['CCXX']
472 if overrides is not None:
473 env.update(overrides)
477 def NetLockDl(props, extralock=None):
480 if props.hasProperty("dl_lock"):
481 lock = NetLocks[props["dl_lock"]]
483 locks.append(lock.access('exclusive'))
484 if extralock is not None:
485 locks.append(extralock)
489 def NetLockUl(props):
491 if props.hasProperty("ul_lock"):
492 lock = NetLocks[props["ul_lock"]]
494 return [lock.access('exclusive')]
499 def TagPropertyValue(props):
500 if props.hasProperty("options"):
501 options = props.getProperty("options")
502 if type(options) is dict:
503 return options.get("tag")
506 def IsTargetSelected(target):
507 def CheckTargetProperty(step):
509 options = step.getProperty("options")
510 if type(options) is dict:
511 selected_target = options.get("target", "all")
512 if selected_target != "all" and selected_target != target:
519 return CheckTargetProperty
522 def UsignSec2Pub(props):
523 branch = props.getProperty("branch")
525 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
526 seckey = branches[branch].get("usign_key")
527 seckey = base64.b64decode(seckey)
531 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
532 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
537 dlLock = locks.WorkerLock("worker_dl")
541 for worker in c['workers']:
542 workerNames.append(worker.workername)
544 force_factory = BuildFactory()
546 c['builders'].append(BuilderConfig(
547 name = "00_force_build",
548 workernames = workerNames,
549 factory = force_factory))
551 for target in targets:
552 ts = target.split('/')
554 factory = BuildFactory()
556 # setup shared work directory if required
557 factory.addStep(ShellCommand(
559 descriptionDone = "Shared work directory set up",
560 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
562 haltOnFailure = True,
565 # find number of cores
566 factory.addStep(SetPropertyFromCommand(
569 description = "Finding number of CPUs",
573 # find gcc and g++ compilers
574 factory.addStep(FileDownload(
575 name = "dlfindbinpl",
576 mastersrc = scripts_dir + '/findbin.pl',
577 workerdest = "../findbin.pl",
581 factory.addStep(SetPropertyFromCommand(
583 property = "cc_command",
584 description = "Finding gcc command",
585 command = ["../findbin.pl", "gcc", "", ""],
586 haltOnFailure = True,
589 factory.addStep(SetPropertyFromCommand(
591 property = "cxx_command",
592 description = "Finding g++ command",
593 command = ["../findbin.pl", "g++", "", ""],
594 haltOnFailure = True,
597 # see if ccache is available
598 factory.addStep(SetPropertyFromCommand(
600 property = "ccache_command",
601 description = "Testing for ccache command",
602 command = ["which", "ccache"],
603 haltOnFailure = False,
604 flunkOnFailure = False,
605 warnOnFailure = False,
606 hideStepIf = lambda r, s: r==results.FAILURE,
609 # check out the source
611 # if repo doesn't exist: 'git clone repourl'
612 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
613 # git cat-file -e <commit>
614 # git checkout -f <commit>
615 # git checkout -B <branch>
623 haltOnFailure = True,
627 factory.addStep(ShellCommand(
629 description = "Fetching Git remote refs",
630 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
631 haltOnFailure = True,
635 factory.addStep(ShellCommand(
637 description = "Checking out Git tag",
638 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
639 haltOnFailure = True,
640 doStepIf = IsTaggingRequested
643 # Verify that Git HEAD points to a tag or branch
644 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
645 factory.addStep(ShellCommand(
647 description = "Ensure that Git HEAD is pointing to a branch or tag",
648 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
649 haltOnFailure = True,
652 factory.addStep(ShellCommand(
654 description = "Remove tmp folder",
655 command=["rm", "-rf", "tmp/"],
659 factory.addStep(ShellCommand(
660 name = "rmfeedlinks",
661 description = "Remove feed symlinks",
662 command=["rm", "-rf", "package/feeds/"],
665 factory.addStep(StringDownload(
667 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
668 workerdest = "../ccache_cc.sh",
672 factory.addStep(StringDownload(
674 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
675 workerdest = "../ccache_cxx.sh",
680 factory.addStep(ShellCommand(
681 name = "updatefeeds",
682 description = "Updating feeds",
683 command=["./scripts/feeds", "update"],
684 env = MakeEnv(tryccache=True),
685 haltOnFailure = True,
690 factory.addStep(ShellCommand(
691 name = "installfeeds",
692 description = "Installing feeds",
693 command=["./scripts/feeds", "install", "-a"],
694 env = MakeEnv(tryccache=True),
695 haltOnFailure = True,
699 factory.addStep(StringDownload(
700 name = "dlconfigseed",
701 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
702 workerdest = ".config",
707 factory.addStep(ShellCommand(
709 descriptionDone = ".config seeded",
710 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
713 factory.addStep(ShellCommand(
715 description = "Removing output directory",
716 command = ["rm", "-rf", "bin/"],
719 factory.addStep(ShellCommand(
721 description = "Populating .config",
722 command = ["make", "defconfig"],
726 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
727 factory.addStep(ShellCommand(
729 description = "Checking architecture",
730 descriptionDone = "Architecture validated",
731 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
735 haltOnFailure = True,
736 flunkOnFailure = False, # this is not a build FAILURE
740 factory.addStep(SetPropertyFromCommand(
743 description = "Finding libc suffix",
744 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
748 factory.addStep(StringDownload(
749 name = "dlkeybuildpub",
750 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
751 workerdest = "key-build.pub",
753 doStepIf = IsUsignEnabled,
756 factory.addStep(StringDownload(
758 s = "# fake private key",
759 workerdest = "key-build",
761 doStepIf = IsUsignEnabled,
764 factory.addStep(StringDownload(
765 name = "dlkeybuilducert",
766 s = "# fake certificate",
767 workerdest = "key-build.ucert",
769 doStepIf = IsUsignEnabled,
773 factory.addStep(ShellCommand(
775 description = "Preparing dl/",
776 descriptionDone = "dl/ prepared",
777 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
778 workdir = Property("builddir"),
784 factory.addStep(ShellCommand(
786 description = "Building and installing GNU tar",
787 descriptionDone = "GNU tar built and installed",
788 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
789 env = MakeEnv(tryccache=True),
790 haltOnFailure = True,
794 factory.addStep(ShellCommand(
796 description = "Populating dl/",
797 descriptionDone = "dl/ populated",
798 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
801 locks = NetLockDl.withArgs(dlLock.access('exclusive')),
804 factory.addStep(ShellCommand(
806 description = "Cleaning base-files",
807 command=["make", "package/base-files/clean", "V=s"],
811 factory.addStep(ShellCommand(
813 description = "Building and installing tools",
814 descriptionDone = "Tools built and installed",
815 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
816 env = MakeEnv(tryccache=True),
817 haltOnFailure = True,
820 factory.addStep(ShellCommand(
822 description = "Building and installing toolchain",
823 descriptionDone = "Toolchain built and installed",
824 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
826 haltOnFailure = True,
829 factory.addStep(ShellCommand(
831 description = "Building kmods",
832 descriptionDone = "Kmods built",
833 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
835 haltOnFailure = True,
838 # find kernel version
839 factory.addStep(SetPropertyFromCommand(
840 name = "kernelversion",
841 property = "kernelversion",
842 description = "Finding the effective Kernel version",
843 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
844 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
847 factory.addStep(ShellCommand(
849 description = "Cleaning up package build",
850 descriptionDone = "Package build cleaned up",
851 command=["make", "package/cleanup", "V=s"],
854 factory.addStep(ShellCommand(
856 description = "Building packages",
857 descriptionDone = "Packages built",
858 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
860 haltOnFailure = True,
863 factory.addStep(ShellCommand(
865 description = "Installing packages",
866 descriptionDone = "Packages installed",
867 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
869 haltOnFailure = True,
872 factory.addStep(ShellCommand(
874 description = "Indexing packages",
875 descriptionDone = "Packages indexed",
876 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
878 haltOnFailure = True,
881 factory.addStep(ShellCommand(
883 description = "Building and installing images",
884 descriptionDone = "Images built and installed",
885 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
887 haltOnFailure = True,
890 factory.addStep(ShellCommand(
892 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
893 command = "make -j1 buildinfo V=s || true",
895 haltOnFailure = True,
898 factory.addStep(ShellCommand(
899 name = "json_overview_image_info",
900 description = "Generating profiles.json in target folder",
901 command = "make -j1 json_overview_image_info V=s || true",
903 haltOnFailure = True,
906 factory.addStep(ShellCommand(
908 description = "Calculating checksums",
909 descriptionDone = "Checksums calculated",
910 command=["make", "-j1", "checksum", "V=s"],
912 haltOnFailure = True,
915 factory.addStep(ShellCommand(
917 descriptionDone = "Kmod directory created",
918 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
919 haltOnFailure = True,
920 doStepIf = IsKmodArchiveEnabled,
923 factory.addStep(ShellCommand(
924 name = "kmodprepare",
925 description = "Preparing kmod archive",
926 descriptionDone = "Kmod archive prepared",
927 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
928 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
929 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
930 haltOnFailure = True,
931 doStepIf = IsKmodArchiveEnabled,
934 factory.addStep(ShellCommand(
936 description = "Indexing kmod archive",
937 descriptionDone = "Kmod archive indexed",
938 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
939 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
941 haltOnFailure = True,
942 doStepIf = IsKmodArchiveEnabled,
946 factory.addStep(MasterShellCommand(
947 name = "signprepare",
948 descriptionDone = "Temporary signing directory prepared",
949 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
950 haltOnFailure = True,
951 doStepIf = IsSignEnabled,
955 factory.addStep(ShellCommand(
957 description = "Packing files to sign",
958 descriptionDone = "Files to sign packed",
959 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
960 haltOnFailure = True,
961 doStepIf = IsSignEnabled,
964 factory.addStep(FileUpload(
965 workersrc = "sign.tar.gz",
966 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
967 haltOnFailure = True,
968 doStepIf = IsSignEnabled,
971 factory.addStep(MasterShellCommand(
973 description = "Signing files",
974 descriptionDone = "Files signed",
975 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
976 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
977 haltOnFailure = True,
978 doStepIf = IsSignEnabled,
981 factory.addStep(FileDownload(
982 name = "dlsigntargz",
983 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
984 workerdest = "sign.tar.gz",
985 haltOnFailure = True,
986 doStepIf = IsSignEnabled,
989 factory.addStep(ShellCommand(
991 description = "Unpacking signed files",
992 descriptionDone = "Signed files unpacked",
993 command = ["tar", "-xzf", "sign.tar.gz"],
994 haltOnFailure = True,
995 doStepIf = IsSignEnabled,
999 factory.addStep(ShellCommand(
1000 name = "dirprepare",
1001 descriptionDone = "Upload directory structure prepared",
1002 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1003 haltOnFailure = True,
1006 factory.addStep(ShellCommand(
1007 name = "linkprepare",
1008 descriptionDone = "Repository symlink prepared",
1009 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1010 doStepIf = IsNoMasterBuild,
1011 haltOnFailure = True,
1014 factory.addStep(ShellCommand(
1015 name = "kmoddirprepare",
1016 descriptionDone = "Kmod archive upload directory prepared",
1017 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1018 haltOnFailure = True,
1019 doStepIf = IsKmodArchiveEnabled,
1022 factory.addStep(ShellCommand(
1024 description = "Uploading directory structure",
1025 descriptionDone = "Directory structure uploaded",
1026 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1027 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1028 haltOnFailure = True,
1031 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1034 # download remote sha256sums to 'target-sha256sums'
1035 factory.addStep(ShellCommand(
1036 name = "target-sha256sums",
1037 description = "Fetching remote sha256sums for target",
1038 descriptionDone = "Remote sha256sums for target fetched",
1039 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1040 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1042 haltOnFailure = False,
1043 flunkOnFailure = False,
1044 warnOnFailure = False,
1045 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1048 # build list of files to upload
1049 factory.addStep(FileDownload(
1050 name = "dlsha2rsyncpl",
1051 mastersrc = scripts_dir + '/sha2rsync.pl',
1052 workerdest = "../sha2rsync.pl",
1056 factory.addStep(ShellCommand(
1058 description = "Building list of files to upload",
1059 descriptionDone = "List of files to upload built",
1060 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1061 haltOnFailure = True,
1064 factory.addStep(FileDownload(
1065 name = "dlrsync.sh",
1066 mastersrc = scripts_dir + '/rsync.sh',
1067 workerdest = "../rsync.sh",
1071 # upload new files and update existing ones
1072 factory.addStep(ShellCommand(
1073 name = "targetupload",
1074 description = "Uploading target files",
1075 descriptionDone = "Target files uploaded",
1076 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1077 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1078 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1079 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1080 haltOnFailure = True,
1082 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1085 # delete files which don't exist locally
1086 factory.addStep(ShellCommand(
1087 name = "targetprune",
1088 description = "Pruning target files",
1089 descriptionDone = "Target files pruned",
1090 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1091 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1092 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1093 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1094 haltOnFailure = True,
1097 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1100 factory.addStep(ShellCommand(
1101 name = "kmodupload",
1102 description = "Uploading kmod archive",
1103 descriptionDone = "Kmod archive uploaded",
1104 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1105 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1106 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1107 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1108 haltOnFailure = True,
1111 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1114 factory.addStep(ShellCommand(
1115 name = "sourcelist",
1116 description = "Finding source archives to upload",
1117 descriptionDone = "Source archives to upload found",
1118 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1119 haltOnFailure = True,
1122 factory.addStep(ShellCommand(
1123 name = "sourceupload",
1124 description = "Uploading source archives",
1125 descriptionDone = "Source archives uploaded",
1126 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1127 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1128 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1129 haltOnFailure = True,
1132 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1135 factory.addStep(ShellCommand(
1137 description = "Reporting disk usage",
1138 command=["df", "-h", "."],
1139 env={'LC_ALL': 'C'},
1140 haltOnFailure = False,
1141 flunkOnFailure = False,
1142 warnOnFailure = False,
1146 factory.addStep(ShellCommand(
1148 description = "Reporting estimated file space usage",
1149 command=["du", "-sh", "."],
1150 env={'LC_ALL': 'C'},
1151 haltOnFailure = False,
1152 flunkOnFailure = False,
1153 warnOnFailure = False,
1157 factory.addStep(ShellCommand(
1158 name = "ccachestat",
1159 description = "Reporting ccache stats",
1160 command=["ccache", "-s"],
1161 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1162 want_stderr = False,
1163 haltOnFailure = False,
1164 flunkOnFailure = False,
1165 warnOnFailure = False,
1166 hideStepIf = lambda r, s: r==results.FAILURE,
1169 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1171 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1172 force_factory.addStep(steps.Trigger(
1173 name = "trigger_%s" % target,
1174 description = "Triggering %s build" % target,
1175 schedulerNames = [ "trigger_%s" % target ],
1176 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1177 doStepIf = IsTargetSelected(target),
1181 ####### STATUS TARGETS
1183 # 'status' is a list of Status Targets. The results of each build will be
1184 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1185 # including web pages, email senders, and IRC bots.
1187 if "status_bind" in inip1:
1189 'port': inip1.get("status_bind"),
1191 'waterfall_view': True,
1192 'console_view': True,
1197 if "status_user" in inip1 and "status_password" in inip1:
1198 c['www']['auth'] = util.UserPasswordAuth([
1199 (inip1.get("status_user"), inip1.get("status_password"))
1201 c['www']['authz'] = util.Authz(
1202 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1203 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1207 if ini.has_section("irc"):
1209 irc_host = iniirc.get("host", None)
1210 irc_port = iniirc.getint("port", 6667)
1211 irc_chan = iniirc.get("channel", None)
1212 irc_nick = iniirc.get("nickname", None)
1213 irc_pass = iniirc.get("password", None)
1215 if irc_host and irc_nick and irc_chan:
1216 irc = reporters.IRC(irc_host, irc_nick,
1218 password = irc_pass,
1219 channels = [ irc_chan ],
1220 notify_events = [ 'exception', 'problem', 'recovery' ]
1223 c['services'].append(irc)
1225 c['revlink'] = util.RevlinkMatch([
1226 r'https://git.openwrt.org/openwrt/(.*).git'
1228 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1233 # This specifies what database buildbot uses to store its state. You can leave
1234 # this at its default for all but the largest installations.
1235 'db_url' : "sqlite:///state.sqlite",
1238 c['buildbotNetUsageData'] = None