2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
63 config_seed = inip1.get("config_seed", "")
65 repo_url = ini['repo'].get("url")
66 repo_branch = ini['repo'].get("branch", "master")
68 rsync_bin_url = ini['rsync'].get("binary_url")
69 rsync_bin_key = ini['rsync'].get("binary_password")
70 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
72 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
73 rsync_bin_defopts += ["--contimeout=20"]
75 rsync_src_url = ini['rsync'].get("source_url")
76 rsync_src_key = ini['rsync'].get("source_password")
77 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
79 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
80 rsync_src_defopts += ["--contimeout=20"]
83 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
85 if ini.has_section("usign"):
86 usign_key = ini['usign'].get("key")
87 usign_comment = ini['usign'].get("comment", usign_comment)
89 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
91 # PB port can be either a numeric port or a connection string
92 pb_port = inip1.get("port") or 9989
94 # This is the dictionary that the buildmaster pays attention to. We also use
95 # a shorter alias to save typing.
96 c = BuildmasterConfig = {}
98 ####### PROJECT IDENTITY
100 # the 'title' string will appear at the top of this buildbot
101 # installation's html.WebStatus home page (linked to the
102 # 'titleURL') and is embedded in the title of the waterfall HTML page.
104 c['title'] = ini['general'].get("title")
105 c['titleURL'] = ini['general'].get("title_url")
107 # the 'buildbotURL' string should point to the location where the buildbot's
108 # internal web server (usually the html.WebStatus page) is visible. This
109 # typically uses the port number set in the Waterfall 'status' entry, but
110 # with an externally-visible host name which the buildbot cannot figure out
113 c['buildbotURL'] = inip1.get("buildbot_url")
117 # The 'workers' list defines the set of recognized buildworkers. Each element is
118 # a Worker object, specifying a unique worker name and password. The same
119 # worker name and password must be configured on the worker.
124 for section in ini.sections():
125 if section.startswith("worker "):
126 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
127 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
128 sl_props = { 'dl_lock':None, 'ul_lock':None }
129 name = ini.get(section, "name")
130 password = ini.get(section, "password")
131 if ini.has_option(section, "dl_lock"):
132 lockname = ini.get(section, "dl_lock")
133 sl_props['dl_lock'] = lockname
134 if lockname not in NetLocks:
135 NetLocks[lockname] = locks.MasterLock(lockname)
136 if ini.has_option(section, "ul_lock"):
137 lockname = ini.get(section, "ul_lock")
138 sl_props['ul_lock'] = lockname
139 if lockname not in NetLocks:
140 NetLocks[lockname] = locks.MasterLock(lockname)
141 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
143 c['protocols'] = {'pb': {'port': pb_port}}
146 c['collapseRequests'] = True
148 # Reduce amount of backlog data
149 c['configurators'] = [util.JanitorConfigurator(
150 logHorizon=timedelta(days=3),
154 @defer.inlineCallbacks
155 def getNewestCompleteTime(bldr):
156 """Returns the complete_at of the latest completed and not SKIPPED
157 build request for this builder, or None if there are no such build
158 requests. We need to filter out SKIPPED requests because we're
159 using collapseRequests=True which is unfortunately marking all
160 previous requests as complete when new buildset is created.
162 @returns: datetime instance or None, via Deferred
165 bldrid = yield bldr.getBuilderId()
166 completed = yield bldr.master.data.get(
167 ('builders', bldrid, 'buildrequests'),
169 resultspec.Filter('complete', 'eq', [True]),
170 resultspec.Filter('results', 'ne', [results.SKIPPED]),
172 order=['-complete_at'], limit=1)
176 complete_at = completed[0]['complete_at']
178 last_build = yield bldr.master.data.get(
181 resultspec.Filter('builderid', 'eq', [bldrid]),
183 order=['-started_at'], limit=1)
185 if last_build and last_build[0]:
186 last_complete_at = last_build[0]['complete_at']
187 if last_complete_at and (last_complete_at > complete_at):
188 return last_complete_at
192 @defer.inlineCallbacks
193 def prioritizeBuilders(master, builders):
194 """Returns sorted list of builders by their last timestamp of completed and
197 @returns: list of sorted builders
200 def is_building(bldr):
201 return bool(bldr.building) or bool(bldr.old_building)
204 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
205 d.addCallback(lambda complete_at: (complete_at, bldr))
209 (complete_at, bldr) = item
213 complete_at = date.replace(tzinfo=tzutc())
215 if is_building(bldr):
217 complete_at = date.replace(tzinfo=tzutc())
219 return (complete_at, bldr.name)
221 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
222 results.sort(key=bldr_sort)
225 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
227 return [r[1] for r in results]
229 c['prioritizeBuilders'] = prioritizeBuilders
231 ####### CHANGESOURCES
237 def populateTargets():
238 sourcegit = work_dir + '/source.git'
239 if os.path.isdir(sourcegit):
240 subprocess.call(["rm", "-rf", sourcegit])
242 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, sourcegit])
244 os.makedirs(sourcegit + '/tmp', exist_ok=True)
245 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
246 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
249 line = findtargets.stdout.readline()
252 ta = line.decode().strip().split(' ')
253 targets.append(ta[0])
255 subprocess.call(["rm", "-rf", sourcegit])
259 # the 'change_source' setting tells the buildmaster how it should find out
260 # about source code changes. Here we point to the buildbot clone of pyflakes.
262 c['change_source'] = []
263 c['change_source'].append(GitPoller(
265 workdir=work_dir+'/work.git', branch=repo_branch,
270 # Configure the Schedulers, which decide how to react to incoming changes. In this
271 # case, just kick off a 'basebuild' build
273 class TagChoiceParameter(BaseParameter):
274 spec_attributes = ["strict", "choices"]
278 def __init__(self, name, label=None, **kw):
279 super().__init__(name, label, **kw)
280 self._choice_list = []
285 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
288 findtags = subprocess.Popen(
289 ['git', 'ls-remote', '--tags', repo_url],
290 stdout = subprocess.PIPE)
293 line = findtags.stdout.readline()
298 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
300 if tagver and tagver[1].find(basever[1]) == 0:
301 taglist.append(tagver[1])
303 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
304 taglist.insert(0, '')
306 self._choice_list = taglist
308 return self._choice_list
310 def parse_from_arg(self, s):
311 if self.strict and s not in self._choice_list:
312 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
316 c['schedulers'].append(SingleBranchScheduler(
318 change_filter = filter.ChangeFilter(branch=repo_branch),
319 treeStableTimer = 60,
320 builderNames = targets))
322 c['schedulers'].append(ForceScheduler(
324 buttonName = "Force builds",
325 label = "Force build details",
326 builderNames = [ "00_force_build" ],
329 util.CodebaseParameter(
331 label = "Repository",
332 branch = util.FixedParameter(name = "branch", default = ""),
333 revision = util.FixedParameter(name = "revision", default = ""),
334 repository = util.FixedParameter(name = "repository", default = ""),
335 project = util.FixedParameter(name = "project", default = "")
339 reason = util.StringParameter(
342 default = "Trigger build",
348 util.NestedParameter(
350 label="Build Options",
353 util.ChoiceStringParameter(
355 label = "Build target",
357 choices = [ "all" ] + targets
371 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
372 # what steps, and which workers can execute them. Note that any particular build will
373 # only take place on one worker.
375 def IsTaggingRequested(step):
376 val = step.getProperty("tag")
377 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
382 def IsNoMasterBuild(step):
383 return step.getProperty("branch") != "master"
385 def GetBaseVersion(branch):
386 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
387 return branch.split('-')[1]
392 def GetVersionPrefix(props):
393 branch = props.getProperty("branch")
394 basever = GetBaseVersion(branch)
395 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
396 return "%s/" % props["tag"]
397 elif basever != "master":
398 return "%s-SNAPSHOT/" % basever
402 def GetNextBuild(builder, requests):
404 if r.properties and r.properties.hasProperty("tag"):
408 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
411 def MakeEnv(overrides=None, tryccache=False):
413 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
414 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
417 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
418 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
419 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
421 env['CC'] = env['CCC']
422 env['CXX'] = env['CCXX']
424 if overrides is not None:
425 env.update(overrides)
429 def NetLockDl(props):
431 if props.hasProperty("dl_lock"):
432 lock = NetLocks[props["dl_lock"]]
434 return [lock.access('exclusive')]
439 def NetLockUl(props):
441 if props.hasProperty("ul_lock"):
442 lock = NetLocks[props["ul_lock"]]
444 return [lock.access('exclusive')]
449 def TagPropertyValue(props):
450 if props.hasProperty("options"):
451 options = props.getProperty("options")
452 if type(options) is dict:
453 return options.get("tag")
456 def IsTargetSelected(target):
457 def CheckTargetProperty(step):
459 options = step.getProperty("options")
460 if type(options) is dict:
461 selected_target = options.get("target", "all")
462 if selected_target != "all" and selected_target != target:
469 return CheckTargetProperty
471 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
473 seckey = base64.b64decode(seckey)
477 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
478 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
483 dlLock = locks.WorkerLock("worker_dl")
487 for worker in c['workers']:
488 workerNames.append(worker.workername)
490 force_factory = BuildFactory()
492 c['builders'].append(BuilderConfig(
493 name = "00_force_build",
494 workernames = workerNames,
495 factory = force_factory))
497 for target in targets:
498 ts = target.split('/')
500 factory = BuildFactory()
502 # setup shared work directory if required
503 factory.addStep(ShellCommand(
505 description = "Setting up shared work directory",
506 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
508 haltOnFailure = True))
510 # find number of cores
511 factory.addStep(SetPropertyFromCommand(
514 description = "Finding number of CPUs",
515 command = ["nproc"]))
517 # find gcc and g++ compilers
518 factory.addStep(FileDownload(
519 name = "dlfindbinpl",
520 mastersrc = scripts_dir + '/findbin.pl',
521 workerdest = "../findbin.pl",
524 factory.addStep(SetPropertyFromCommand(
526 property = "cc_command",
527 description = "Finding gcc command",
529 "../findbin.pl", "gcc", "", "",
531 haltOnFailure = True))
533 factory.addStep(SetPropertyFromCommand(
535 property = "cxx_command",
536 description = "Finding g++ command",
538 "../findbin.pl", "g++", "", "",
540 haltOnFailure = True))
542 # see if ccache is available
543 factory.addStep(SetPropertyFromCommand(
544 property = "ccache_command",
545 command = ["which", "ccache"],
546 description = "Testing for ccache command",
547 haltOnFailure = False,
548 flunkOnFailure = False,
549 warnOnFailure = False,
552 # Workaround bug when switching from a checked out tag back to a branch
553 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
554 factory.addStep(ShellCommand(
555 name = "gitcheckout",
556 description = "Ensure that Git HEAD is sane",
557 command = Interpolate("if [ -d .git ]; then git checkout -f %(prop:branch)s && git branch --set-upstream-to origin/%(prop:branch)s || rm -fr .git; else exit 0; fi"),
558 haltOnFailure = True))
560 # check out the source
562 # if repo doesn't exist: 'git clone repourl'
563 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
564 # 'git fetch -t repourl branch; git reset --hard revision'
568 branch = Interpolate("%(prop:branch)s"),
572 haltOnFailure = True,
576 factory.addStep(ShellCommand(
578 description = "Fetching Git remote refs",
579 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
584 factory.addStep(ShellCommand(
586 description = "Checking out Git tag",
587 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
588 haltOnFailure = True,
589 doStepIf = IsTaggingRequested
592 # Verify that Git HEAD points to a tag or branch
593 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
594 factory.addStep(ShellCommand(
596 description = "Ensure that Git HEAD is pointing to a branch or tag",
597 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
598 haltOnFailure = True))
600 factory.addStep(ShellCommand(
602 description = "Remove tmp folder",
603 command=["rm", "-rf", "tmp/"]))
606 factory.addStep(ShellCommand(
607 name = "rmfeedlinks",
608 description = "Remove feed symlinks",
609 command=["rm", "-rf", "package/feeds/"]))
611 factory.addStep(StringDownload(
613 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
614 workerdest = "../ccache_cc.sh",
618 factory.addStep(StringDownload(
620 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
621 workerdest = "../ccache_cxx.sh",
626 factory.addStep(ShellCommand(
627 name = "updatefeeds",
628 description = "Updating feeds",
629 command=["./scripts/feeds", "update"],
630 env = MakeEnv(tryccache=True),
631 haltOnFailure = True,
636 factory.addStep(ShellCommand(
637 name = "installfeeds",
638 description = "Installing feeds",
639 command=["./scripts/feeds", "install", "-a"],
640 env = MakeEnv(tryccache=True),
645 if config_seed is not None:
646 factory.addStep(StringDownload(
647 name = "dlconfigseed",
648 s = config_seed + '\n',
649 workerdest = ".config",
654 factory.addStep(ShellCommand(
656 description = "Seeding .config",
657 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
660 factory.addStep(ShellCommand(
662 description = "Removing output directory",
663 command = ["rm", "-rf", "bin/"]
666 factory.addStep(ShellCommand(
668 description = "Populating .config",
669 command = ["make", "defconfig"],
674 factory.addStep(ShellCommand(
676 description = "Checking architecture",
677 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
685 factory.addStep(SetPropertyFromCommand(
688 description = "Finding libc suffix",
689 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
692 if usign_key is not None:
693 factory.addStep(StringDownload(
694 name = "dlkeybuildpub",
695 s = UsignSec2Pub(usign_key, usign_comment),
696 workerdest = "key-build.pub",
700 factory.addStep(StringDownload(
702 s = "# fake private key",
703 workerdest = "key-build",
707 factory.addStep(StringDownload(
708 name = "dlkeybuilducert",
709 s = "# fake certificate",
710 workerdest = "key-build.ucert",
715 factory.addStep(ShellCommand(
717 description = "Preparing dl/",
718 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
724 factory.addStep(ShellCommand(
726 description = "Building and installing GNU tar",
727 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
728 env = MakeEnv(tryccache=True),
733 factory.addStep(ShellCommand(
735 description = "Populating dl/",
736 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
739 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
742 factory.addStep(ShellCommand(
744 description = "Cleaning base-files",
745 command=["make", "package/base-files/clean", "V=s"]
749 factory.addStep(ShellCommand(
751 description = "Building and installing tools",
752 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
753 env = MakeEnv(tryccache=True),
757 factory.addStep(ShellCommand(
759 description = "Building and installing toolchain",
760 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
765 factory.addStep(ShellCommand(
767 description = "Building kmods",
768 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
773 # find kernel version
774 factory.addStep(SetPropertyFromCommand(
775 name = "kernelversion",
776 property = "kernelversion",
777 description = "Finding the effective Kernel version",
778 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
779 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
782 factory.addStep(ShellCommand(
784 description = "Cleaning up package build",
785 command=["make", "package/cleanup", "V=s"]
788 factory.addStep(ShellCommand(
790 description = "Building packages",
791 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
796 factory.addStep(ShellCommand(
798 description = "Installing packages",
799 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
804 factory.addStep(ShellCommand(
806 description = "Indexing packages",
807 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
812 factory.addStep(ShellCommand(
814 description = "Building and installing images",
815 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
820 factory.addStep(ShellCommand(
822 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
823 command = "make -j1 buildinfo V=s || true",
828 factory.addStep(ShellCommand(
829 name = "json_overview_image_info",
830 description = "Generate profiles.json in target folder",
831 command = "make -j1 json_overview_image_info V=s || true",
836 factory.addStep(ShellCommand(
838 description = "Calculating checksums",
839 command=["make", "-j1", "checksum", "V=s"],
844 if enable_kmod_archive:
845 factory.addStep(ShellCommand(
847 description = "Creating kmod directory",
848 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
852 factory.addStep(ShellCommand(
853 name = "kmodprepare",
854 description = "Preparing kmod archive",
855 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
856 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
857 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
861 factory.addStep(ShellCommand(
863 description = "Indexing kmod archive",
864 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
865 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
871 if ini.has_option("gpg", "key") or usign_key is not None:
872 factory.addStep(MasterShellCommand(
873 name = "signprepare",
874 description = "Preparing temporary signing directory",
875 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
879 factory.addStep(ShellCommand(
881 description = "Packing files to sign",
882 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
886 factory.addStep(FileUpload(
887 workersrc = "sign.tar.gz",
888 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
892 factory.addStep(MasterShellCommand(
894 description = "Signing files",
895 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
896 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
900 factory.addStep(FileDownload(
901 name = "dlsigntargz",
902 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
903 workerdest = "sign.tar.gz",
907 factory.addStep(ShellCommand(
909 description = "Unpacking signed files",
910 command = ["tar", "-xzf", "sign.tar.gz"],
915 factory.addStep(ShellCommand(
917 description = "Preparing upload directory structure",
918 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
922 factory.addStep(ShellCommand(
923 name = "linkprepare",
924 description = "Preparing repository symlink",
925 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
926 doStepIf = IsNoMasterBuild,
930 if enable_kmod_archive:
931 factory.addStep(ShellCommand(
932 name = "kmoddirprepare",
933 description = "Preparing kmod archive upload directory",
934 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
938 factory.addStep(ShellCommand(
940 description = "Uploading directory structure",
941 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
942 env={'RSYNC_PASSWORD': rsync_bin_key},
943 haltOnFailure = True,
948 # download remote sha256sums to 'target-sha256sums'
949 factory.addStep(ShellCommand(
950 name = "target-sha256sums",
951 description = "Fetching remote sha256sums for target",
952 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
953 env={'RSYNC_PASSWORD': rsync_bin_key},
955 haltOnFailure = False,
956 flunkOnFailure = False,
957 warnOnFailure = False,
960 # build list of files to upload
961 factory.addStep(FileDownload(
962 name = "dlsha2rsyncpl",
963 mastersrc = scripts_dir + '/sha2rsync.pl',
964 workerdest = "../sha2rsync.pl",
968 factory.addStep(ShellCommand(
970 description = "Building list of files to upload",
971 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
972 haltOnFailure = True,
975 factory.addStep(FileDownload(
977 mastersrc = scripts_dir + '/rsync.sh',
978 workerdest = "../rsync.sh",
982 # upload new files and update existing ones
983 factory.addStep(ShellCommand(
984 name = "targetupload",
985 description = "Uploading target files",
986 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
987 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
988 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
989 env={'RSYNC_PASSWORD': rsync_bin_key},
990 haltOnFailure = True,
994 # delete files which don't exist locally
995 factory.addStep(ShellCommand(
996 name = "targetprune",
997 description = "Pruning target files",
998 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
999 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1000 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1001 env={'RSYNC_PASSWORD': rsync_bin_key},
1002 haltOnFailure = True,
1007 if enable_kmod_archive:
1008 factory.addStep(ShellCommand(
1009 name = "kmodupload",
1010 description = "Uploading kmod archive",
1011 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1012 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1013 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1014 env={'RSYNC_PASSWORD': rsync_bin_key},
1015 haltOnFailure = True,
1020 if rsync_src_url is not None:
1021 factory.addStep(ShellCommand(
1022 name = "sourcelist",
1023 description = "Finding source archives to upload",
1024 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1025 haltOnFailure = True
1028 factory.addStep(ShellCommand(
1029 name = "sourceupload",
1030 description = "Uploading source archives",
1031 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1032 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1033 env={'RSYNC_PASSWORD': rsync_src_key},
1034 haltOnFailure = True,
1039 factory.addStep(ShellCommand(
1041 description = "Reporting disk usage",
1042 command=["df", "-h", "."],
1043 env={'LC_ALL': 'C'},
1044 haltOnFailure = False,
1045 flunkOnFailure = False,
1046 warnOnFailure = False,
1050 factory.addStep(ShellCommand(
1052 description = "Reporting estimated file space usage",
1053 command=["du", "-sh", "."],
1054 env={'LC_ALL': 'C'},
1055 haltOnFailure = False,
1056 flunkOnFailure = False,
1057 warnOnFailure = False,
1061 factory.addStep(ShellCommand(
1062 name = "ccachestat",
1063 description = "Reporting ccache stats",
1064 command=["ccache", "-s"],
1065 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1066 want_stderr = False,
1067 haltOnFailure = False,
1068 flunkOnFailure = False,
1069 warnOnFailure = False,
1073 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1075 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1076 force_factory.addStep(steps.Trigger(
1077 name = "trigger_%s" % target,
1078 description = "Triggering %s build" % target,
1079 schedulerNames = [ "trigger_%s" % target ],
1080 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1081 doStepIf = IsTargetSelected(target)
1085 ####### STATUS TARGETS
1087 # 'status' is a list of Status Targets. The results of each build will be
1088 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1089 # including web pages, email senders, and IRC bots.
1091 if "status_bind" in inip1:
1093 'port': inip1.get("status_bind"),
1095 'waterfall_view': True,
1096 'console_view': True,
1101 if "status_user" in inip1 and "status_password" in inip1:
1102 c['www']['auth'] = util.UserPasswordAuth([
1103 (inip1.get("status_user"), inip1.get("status_password"))
1105 c['www']['authz'] = util.Authz(
1106 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1107 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1111 if ini.has_section("irc"):
1113 irc_host = iniirc.get("host", None)
1114 irc_port = iniirc.getint("port", 6667)
1115 irc_chan = iniirc.get("channel", None)
1116 irc_nick = iniirc.get("nickname", None)
1117 irc_pass = iniirc.get("password", None)
1119 if irc_host and irc_nick and irc_chan:
1120 irc = reporters.IRC(irc_host, irc_nick,
1122 password = irc_pass,
1123 channels = [ irc_chan ],
1124 notify_events = [ 'exception', 'problem', 'recovery' ]
1127 c['services'].append(irc)
1129 c['revlink'] = util.RevlinkMatch([
1130 r'https://git.openwrt.org/openwrt/(.*).git'
1132 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1137 # This specifies what database buildbot uses to store its state. You can leave
1138 # this at its default for all but the largest installations.
1139 'db_url' : "sqlite:///state.sqlite",
1142 c['buildbotNetUsageData'] = None