2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
63 ####### PROJECT IDENTITY
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
78 c['buildbotURL'] = inip1.get("buildbot_url")
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 if ini.has_option(section, "dl_lock"):
97 lockname = ini.get(section, "dl_lock")
98 sl_props['dl_lock'] = lockname
99 if lockname not in NetLocks:
100 NetLocks[lockname] = locks.MasterLock(lockname)
101 if ini.has_option(section, "ul_lock"):
102 lockname = ini.get(section, "ul_lock")
103 sl_props['ul_lock'] = lockname
104 if lockname not in NetLocks:
105 NetLocks[lockname] = locks.MasterLock(lockname)
106 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
108 # PB port can be either a numeric port or a connection string
109 pb_port = inip1.get("port") or 9989
110 c['protocols'] = {'pb': {'port': pb_port}}
113 c['collapseRequests'] = True
115 # Reduce amount of backlog data
116 c['configurators'] = [util.JanitorConfigurator(
117 logHorizon=timedelta(days=3),
121 @defer.inlineCallbacks
122 def getNewestCompleteTime(bldr):
123 """Returns the complete_at of the latest completed and not SKIPPED
124 build request for this builder, or None if there are no such build
125 requests. We need to filter out SKIPPED requests because we're
126 using collapseRequests=True which is unfortunately marking all
127 previous requests as complete when new buildset is created.
129 @returns: datetime instance or None, via Deferred
132 bldrid = yield bldr.getBuilderId()
133 completed = yield bldr.master.data.get(
134 ('builders', bldrid, 'buildrequests'),
136 resultspec.Filter('complete', 'eq', [True]),
137 resultspec.Filter('results', 'ne', [results.SKIPPED]),
139 order=['-complete_at'], limit=1)
143 complete_at = completed[0]['complete_at']
145 last_build = yield bldr.master.data.get(
148 resultspec.Filter('builderid', 'eq', [bldrid]),
150 order=['-started_at'], limit=1)
152 if last_build and last_build[0]:
153 last_complete_at = last_build[0]['complete_at']
154 if last_complete_at and (last_complete_at > complete_at):
155 return last_complete_at
159 @defer.inlineCallbacks
160 def prioritizeBuilders(master, builders):
161 """Returns sorted list of builders by their last timestamp of completed and
164 @returns: list of sorted builders
167 def is_building(bldr):
168 return bool(bldr.building) or bool(bldr.old_building)
171 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
172 d.addCallback(lambda complete_at: (complete_at, bldr))
176 (complete_at, bldr) = item
180 complete_at = date.replace(tzinfo=tzutc())
182 if is_building(bldr):
184 complete_at = date.replace(tzinfo=tzutc())
186 return (complete_at, bldr.name)
188 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
189 results.sort(key=bldr_sort)
192 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
194 return [r[1] for r in results]
196 c['prioritizeBuilders'] = prioritizeBuilders
198 ####### CHANGESOURCES
200 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
201 scripts_dir = os.path.abspath("../scripts")
203 config_seed = inip1.get("config_seed", "")
205 repo_url = ini['repo'].get("url")
206 repo_branch = ini['repo'].get("branch", "master")
208 rsync_bin_url = ini['rsync'].get("binary_url")
209 rsync_bin_key = ini['rsync'].get("binary_password")
210 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
212 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
213 rsync_bin_defopts += ["--contimeout=20"]
215 rsync_src_url = ini['rsync'].get("source_url")
216 rsync_src_key = ini['rsync'].get("source_password")
217 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
219 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
220 rsync_src_defopts += ["--contimeout=20"]
223 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
225 if ini.has_section("usign"):
226 usign_key = ini['usign'].get("key")
227 usign_comment = ini['usign'].get("comment", usign_comment)
229 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
235 if not os.path.isdir(work_dir+'/source.git'):
236 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
238 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
240 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
241 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
242 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
245 line = findtargets.stdout.readline()
248 ta = line.decode().strip().split(' ')
249 targets.append(ta[0])
252 # the 'change_source' setting tells the buildmaster how it should find out
253 # about source code changes. Here we point to the buildbot clone of pyflakes.
255 c['change_source'] = []
256 c['change_source'].append(GitPoller(
258 workdir=work_dir+'/work.git', branch=repo_branch,
263 # Configure the Schedulers, which decide how to react to incoming changes. In this
264 # case, just kick off a 'basebuild' build
266 class TagChoiceParameter(BaseParameter):
267 spec_attributes = ["strict", "choices"]
271 def __init__(self, name, label=None, **kw):
272 super().__init__(name, label, **kw)
273 self._choice_list = []
278 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
281 findtags = subprocess.Popen(
282 ['git', 'ls-remote', '--tags', repo_url],
283 stdout = subprocess.PIPE)
286 line = findtags.stdout.readline()
291 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
293 if tagver and tagver[1].find(basever[1]) == 0:
294 taglist.append(tagver[1])
296 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
297 taglist.insert(0, '')
299 self._choice_list = taglist
301 return self._choice_list
303 def parse_from_arg(self, s):
304 if self.strict and s not in self._choice_list:
305 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
309 c['schedulers'].append(SingleBranchScheduler(
311 change_filter = filter.ChangeFilter(branch=repo_branch),
312 treeStableTimer = 60,
313 builderNames = targets))
315 c['schedulers'].append(ForceScheduler(
317 buttonName = "Force builds",
318 label = "Force build details",
319 builderNames = [ "00_force_build" ],
322 util.CodebaseParameter(
324 label = "Repository",
325 branch = util.FixedParameter(name = "branch", default = ""),
326 revision = util.FixedParameter(name = "revision", default = ""),
327 repository = util.FixedParameter(name = "repository", default = ""),
328 project = util.FixedParameter(name = "project", default = "")
332 reason = util.StringParameter(
335 default = "Trigger build",
341 util.NestedParameter(
343 label="Build Options",
346 util.ChoiceStringParameter(
348 label = "Build target",
350 choices = [ "all" ] + targets
364 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
365 # what steps, and which workers can execute them. Note that any particular build will
366 # only take place on one worker.
368 def IsTaggingRequested(step):
369 val = step.getProperty("tag")
370 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
375 def IsNoMasterBuild(step):
376 return repo_branch != "master"
378 def GetBaseVersion():
379 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
380 return repo_branch.split('-')[1]
385 def GetVersionPrefix(props):
386 basever = GetBaseVersion()
387 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
388 return "%s/" % props["tag"]
389 elif basever != "master":
390 return "%s-SNAPSHOT/" % basever
394 def GetNextBuild(builder, requests):
396 if r.properties and r.properties.hasProperty("tag"):
400 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
403 def MakeEnv(overrides=None, tryccache=False):
405 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
406 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
409 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
410 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
411 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
413 env['CC'] = env['CCC']
414 env['CXX'] = env['CCXX']
416 if overrides is not None:
417 env.update(overrides)
421 def NetLockDl(props):
423 if props.hasProperty("dl_lock"):
424 lock = NetLocks[props["dl_lock"]]
426 return [lock.access('exclusive')]
431 def NetLockUl(props):
433 if props.hasProperty("ul_lock"):
434 lock = NetLocks[props["ul_lock"]]
436 return [lock.access('exclusive')]
441 def TagPropertyValue(props):
442 if props.hasProperty("options"):
443 options = props.getProperty("options")
444 if type(options) is dict:
445 return options.get("tag")
448 def IsTargetSelected(target):
449 def CheckTargetProperty(step):
451 options = step.getProperty("options")
452 if type(options) is dict:
453 selected_target = options.get("target", "all")
454 if selected_target != "all" and selected_target != target:
461 return CheckTargetProperty
463 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
465 seckey = base64.b64decode(seckey)
469 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
470 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
475 dlLock = locks.WorkerLock("worker_dl")
479 for worker in c['workers']:
480 workerNames.append(worker.workername)
482 force_factory = BuildFactory()
484 c['builders'].append(BuilderConfig(
485 name = "00_force_build",
486 workernames = workerNames,
487 factory = force_factory))
489 for target in targets:
490 ts = target.split('/')
492 factory = BuildFactory()
494 # setup shared work directory if required
495 factory.addStep(ShellCommand(
497 description = "Setting up shared work directory",
498 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
500 haltOnFailure = True))
502 # find number of cores
503 factory.addStep(SetPropertyFromCommand(
506 description = "Finding number of CPUs",
507 command = ["nproc"]))
509 # find gcc and g++ compilers
510 factory.addStep(FileDownload(
511 name = "dlfindbinpl",
512 mastersrc = scripts_dir + '/findbin.pl',
513 workerdest = "../findbin.pl",
516 factory.addStep(SetPropertyFromCommand(
518 property = "cc_command",
519 description = "Finding gcc command",
521 "../findbin.pl", "gcc", "", "",
523 haltOnFailure = True))
525 factory.addStep(SetPropertyFromCommand(
527 property = "cxx_command",
528 description = "Finding g++ command",
530 "../findbin.pl", "g++", "", "",
532 haltOnFailure = True))
534 # see if ccache is available
535 factory.addStep(SetPropertyFromCommand(
536 property = "ccache_command",
537 command = ["which", "ccache"],
538 description = "Testing for ccache command",
539 haltOnFailure = False,
540 flunkOnFailure = False,
541 warnOnFailure = False,
544 # Workaround bug when switching from a checked out tag back to a branch
545 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
546 factory.addStep(ShellCommand(
547 name = "gitcheckout",
548 description = "Ensure that Git HEAD is sane",
549 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
550 haltOnFailure = True))
552 # check out the source
554 # if repo doesn't exist: 'git clone repourl'
555 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
556 # 'git fetch -t repourl branch; git reset --hard revision'
560 branch = repo_branch,
564 haltOnFailure = True,
568 factory.addStep(ShellCommand(
570 description = "Fetching Git remote refs",
571 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
576 factory.addStep(ShellCommand(
578 description = "Checking out Git tag",
579 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
580 haltOnFailure = True,
581 doStepIf = IsTaggingRequested
584 # Verify that Git HEAD points to a tag or branch
585 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
586 factory.addStep(ShellCommand(
588 description = "Ensure that Git HEAD is pointing to a branch or tag",
589 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
590 haltOnFailure = True))
592 factory.addStep(ShellCommand(
594 description = "Remove tmp folder",
595 command=["rm", "-rf", "tmp/"]))
598 factory.addStep(ShellCommand(
599 name = "rmfeedlinks",
600 description = "Remove feed symlinks",
601 command=["rm", "-rf", "package/feeds/"]))
603 factory.addStep(StringDownload(
605 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
606 workerdest = "../ccache_cc.sh",
610 factory.addStep(StringDownload(
612 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
613 workerdest = "../ccache_cxx.sh",
618 factory.addStep(ShellCommand(
619 name = "updatefeeds",
620 description = "Updating feeds",
621 command=["./scripts/feeds", "update"],
622 env = MakeEnv(tryccache=True),
623 haltOnFailure = True,
628 factory.addStep(ShellCommand(
629 name = "installfeeds",
630 description = "Installing feeds",
631 command=["./scripts/feeds", "install", "-a"],
632 env = MakeEnv(tryccache=True),
637 if config_seed is not None:
638 factory.addStep(StringDownload(
639 name = "dlconfigseed",
640 s = config_seed + '\n',
641 workerdest = ".config",
646 factory.addStep(ShellCommand(
648 description = "Seeding .config",
649 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
652 factory.addStep(ShellCommand(
654 description = "Removing output directory",
655 command = ["rm", "-rf", "bin/"]
658 factory.addStep(ShellCommand(
660 description = "Populating .config",
661 command = ["make", "defconfig"],
666 factory.addStep(ShellCommand(
668 description = "Checking architecture",
669 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
677 factory.addStep(SetPropertyFromCommand(
680 description = "Finding libc suffix",
681 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
684 if usign_key is not None:
685 factory.addStep(StringDownload(
686 name = "dlkeybuildpub",
687 s = UsignSec2Pub(usign_key, usign_comment),
688 workerdest = "key-build.pub",
692 factory.addStep(StringDownload(
694 s = "# fake private key",
695 workerdest = "key-build",
699 factory.addStep(StringDownload(
700 name = "dlkeybuilducert",
701 s = "# fake certificate",
702 workerdest = "key-build.ucert",
707 factory.addStep(ShellCommand(
709 description = "Preparing dl/",
710 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
716 factory.addStep(ShellCommand(
718 description = "Building and installing GNU tar",
719 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
720 env = MakeEnv(tryccache=True),
725 factory.addStep(ShellCommand(
727 description = "Populating dl/",
728 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
731 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
734 factory.addStep(ShellCommand(
736 description = "Cleaning base-files",
737 command=["make", "package/base-files/clean", "V=s"]
741 factory.addStep(ShellCommand(
743 description = "Building and installing tools",
744 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
745 env = MakeEnv(tryccache=True),
749 factory.addStep(ShellCommand(
751 description = "Building and installing toolchain",
752 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
757 factory.addStep(ShellCommand(
759 description = "Building kmods",
760 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
765 # find kernel version
766 factory.addStep(SetPropertyFromCommand(
767 name = "kernelversion",
768 property = "kernelversion",
769 description = "Finding the effective Kernel version",
770 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
771 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
774 factory.addStep(ShellCommand(
776 description = "Cleaning up package build",
777 command=["make", "package/cleanup", "V=s"]
780 factory.addStep(ShellCommand(
782 description = "Building packages",
783 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
788 factory.addStep(ShellCommand(
790 description = "Installing packages",
791 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
796 factory.addStep(ShellCommand(
798 description = "Indexing packages",
799 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
804 factory.addStep(ShellCommand(
806 description = "Building and installing images",
807 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
812 factory.addStep(ShellCommand(
814 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
815 command = "make -j1 buildinfo V=s || true",
820 factory.addStep(ShellCommand(
821 name = "json_overview_image_info",
822 description = "Generate profiles.json in target folder",
823 command = "make -j1 json_overview_image_info V=s || true",
828 factory.addStep(ShellCommand(
830 description = "Calculating checksums",
831 command=["make", "-j1", "checksum", "V=s"],
836 if enable_kmod_archive:
837 factory.addStep(ShellCommand(
839 description = "Creating kmod directory",
840 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
844 factory.addStep(ShellCommand(
845 name = "kmodprepare",
846 description = "Preparing kmod archive",
847 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
848 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
849 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
853 factory.addStep(ShellCommand(
855 description = "Indexing kmod archive",
856 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
857 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
863 if ini.has_option("gpg", "key") or usign_key is not None:
864 factory.addStep(MasterShellCommand(
865 name = "signprepare",
866 description = "Preparing temporary signing directory",
867 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
871 factory.addStep(ShellCommand(
873 description = "Packing files to sign",
874 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
878 factory.addStep(FileUpload(
879 workersrc = "sign.tar.gz",
880 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
884 factory.addStep(MasterShellCommand(
886 description = "Signing files",
887 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
888 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
892 factory.addStep(FileDownload(
893 name = "dlsigntargz",
894 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
895 workerdest = "sign.tar.gz",
899 factory.addStep(ShellCommand(
901 description = "Unpacking signed files",
902 command = ["tar", "-xzf", "sign.tar.gz"],
907 factory.addStep(ShellCommand(
909 description = "Preparing upload directory structure",
910 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
914 factory.addStep(ShellCommand(
915 name = "linkprepare",
916 description = "Preparing repository symlink",
917 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
918 doStepIf = IsNoMasterBuild,
922 if enable_kmod_archive:
923 factory.addStep(ShellCommand(
924 name = "kmoddirprepare",
925 description = "Preparing kmod archive upload directory",
926 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
930 factory.addStep(ShellCommand(
932 description = "Uploading directory structure",
933 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
934 env={'RSYNC_PASSWORD': rsync_bin_key},
935 haltOnFailure = True,
940 # download remote sha256sums to 'target-sha256sums'
941 factory.addStep(ShellCommand(
942 name = "target-sha256sums",
943 description = "Fetching remote sha256sums for target",
944 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
945 env={'RSYNC_PASSWORD': rsync_bin_key},
947 haltOnFailure = False,
948 flunkOnFailure = False,
949 warnOnFailure = False,
952 # build list of files to upload
953 factory.addStep(FileDownload(
954 name = "dlsha2rsyncpl",
955 mastersrc = scripts_dir + '/sha2rsync.pl',
956 workerdest = "../sha2rsync.pl",
960 factory.addStep(ShellCommand(
962 description = "Building list of files to upload",
963 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
964 haltOnFailure = True,
967 factory.addStep(FileDownload(
969 mastersrc = scripts_dir + '/rsync.sh',
970 workerdest = "../rsync.sh",
974 # upload new files and update existing ones
975 factory.addStep(ShellCommand(
976 name = "targetupload",
977 description = "Uploading target files",
978 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
979 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
980 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
981 env={'RSYNC_PASSWORD': rsync_bin_key},
982 haltOnFailure = True,
986 # delete files which don't exist locally
987 factory.addStep(ShellCommand(
988 name = "targetprune",
989 description = "Pruning target files",
990 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
991 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
992 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
993 env={'RSYNC_PASSWORD': rsync_bin_key},
994 haltOnFailure = True,
999 if enable_kmod_archive:
1000 factory.addStep(ShellCommand(
1001 name = "kmodupload",
1002 description = "Uploading kmod archive",
1003 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1004 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1005 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1006 env={'RSYNC_PASSWORD': rsync_bin_key},
1007 haltOnFailure = True,
1012 if rsync_src_url is not None:
1013 factory.addStep(ShellCommand(
1014 name = "sourcelist",
1015 description = "Finding source archives to upload",
1016 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1017 haltOnFailure = True
1020 factory.addStep(ShellCommand(
1021 name = "sourceupload",
1022 description = "Uploading source archives",
1023 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1024 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1025 env={'RSYNC_PASSWORD': rsync_src_key},
1026 haltOnFailure = True,
1031 factory.addStep(ShellCommand(
1033 description = "Reporting disk usage",
1034 command=["df", "-h", "."],
1035 env={'LC_ALL': 'C'},
1036 haltOnFailure = False,
1037 flunkOnFailure = False,
1038 warnOnFailure = False,
1042 factory.addStep(ShellCommand(
1044 description = "Reporting estimated file space usage",
1045 command=["du", "-sh", "."],
1046 env={'LC_ALL': 'C'},
1047 haltOnFailure = False,
1048 flunkOnFailure = False,
1049 warnOnFailure = False,
1053 factory.addStep(ShellCommand(
1054 name = "ccachestat",
1055 description = "Reporting ccache stats",
1056 command=["ccache", "-s"],
1057 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1058 want_stderr = False,
1059 haltOnFailure = False,
1060 flunkOnFailure = False,
1061 warnOnFailure = False,
1065 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1067 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1068 force_factory.addStep(steps.Trigger(
1069 name = "trigger_%s" % target,
1070 description = "Triggering %s build" % target,
1071 schedulerNames = [ "trigger_%s" % target ],
1072 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1073 doStepIf = IsTargetSelected(target)
1077 ####### STATUS TARGETS
1079 # 'status' is a list of Status Targets. The results of each build will be
1080 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1081 # including web pages, email senders, and IRC bots.
1083 if "status_bind" in inip1:
1085 'port': inip1.get("status_bind"),
1087 'waterfall_view': True,
1088 'console_view': True,
1093 if "status_user" in inip1 and "status_password" in inip1:
1094 c['www']['auth'] = util.UserPasswordAuth([
1095 (inip1.get("status_user"), inip1.get("status_password"))
1097 c['www']['authz'] = util.Authz(
1098 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1099 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1103 if ini.has_section("irc"):
1105 irc_host = iniirc.get("host", None)
1106 irc_port = iniirc.getint("port", 6667)
1107 irc_chan = iniirc.get("channel", None)
1108 irc_nick = iniirc.get("nickname", None)
1109 irc_pass = iniirc.get("password", None)
1111 if irc_host and irc_nick and irc_chan:
1112 irc = reporters.IRC(irc_host, irc_nick,
1114 password = irc_pass,
1115 channels = [ irc_chan ],
1116 notify_events = [ 'exception', 'problem', 'recovery' ]
1119 c['services'].append(irc)
1121 c['revlink'] = util.RevlinkMatch([
1122 r'https://git.openwrt.org/openwrt/(.*).git'
1124 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1129 # This specifies what database buildbot uses to store its state. You can leave
1130 # this at its default for all but the largest installations.
1131 'db_url' : "sqlite:///state.sqlite",
1134 c['buildbotNetUsageData'] = None