2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
63 ####### PROJECT IDENTITY
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
78 c['buildbotURL'] = inip1.get("buildbot_url")
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
126 c['collapseRequests'] = True
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
142 @returns: datetime instance or None, via Deferred
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 order=['-complete_at'], limit=1)
156 complete_at = completed[0]['complete_at']
158 last_build = yield bldr.master.data.get(
161 resultspec.Filter('builderid', 'eq', [bldrid]),
163 order=['-started_at'], limit=1)
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
177 @returns: list of sorted builders
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
189 (complete_at, bldr) = item
193 complete_at = date.replace(tzinfo=tzutc())
195 if is_building(bldr):
197 complete_at = date.replace(tzinfo=tzutc())
199 return (complete_at, bldr.name)
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207 return [r[1] for r in results]
209 c['prioritizeBuilders'] = prioritizeBuilders
211 ####### CHANGESOURCES
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
259 line = findtargets.stdout.readline()
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
272 workdir=work_dir+'/work.git', branch=repo_branch,
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
300 line = findtags.stdout.readline()
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
313 self._choice_list = taglist
315 return self._choice_list
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
323 c['schedulers'].append(SingleBranchScheduler(
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
329 c['schedulers'].append(ForceScheduler(
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
336 util.CodebaseParameter(
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
346 reason = util.StringParameter(
349 default = "Trigger build",
355 util.NestedParameter(
357 label="Build Options",
360 util.ChoiceStringParameter(
362 label = "Build target",
364 choices = [ "all" ] + targets
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
383 [ "tools", "tools/clean" ],
384 [ "chain", "toolchain/clean" ],
385 [ "linux", "target/linux/clean" ],
386 [ "dir", "dirclean" ],
387 [ "dist", "distclean" ]
390 def IsMakeCleanRequested(pattern):
391 def CheckCleanProperty(step):
392 val = step.getProperty("clean")
393 if val and re.match(pattern, val):
398 return CheckCleanProperty
400 def IsSharedWorkdir(step):
401 return bool(step.getProperty("shared_wd"))
403 def IsCleanupRequested(step):
404 if IsSharedWorkdir(step):
406 do_cleanup = step.getProperty("do_cleanup")
412 def IsExpireRequested(step):
413 if IsSharedWorkdir(step):
416 return not IsCleanupRequested(step)
418 def IsGitFreshRequested(step):
419 do_cleanup = step.getProperty("do_cleanup")
425 def IsGitCleanRequested(step):
426 return not IsGitFreshRequested(step)
428 def IsTaggingRequested(step):
429 val = step.getProperty("tag")
430 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
435 def IsNoTaggingRequested(step):
436 return not IsTaggingRequested(step)
438 def IsNoMasterBuild(step):
439 return repo_branch != "master"
441 def GetBaseVersion():
442 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
443 return repo_branch.split('-')[1]
448 def GetVersionPrefix(props):
449 basever = GetBaseVersion()
450 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
451 return "%s/" % props["tag"]
452 elif basever != "master":
453 return "%s-SNAPSHOT/" % basever
458 def GetNumJobs(props):
459 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
460 return str(int(int(props["nproc"]) / props["max_builds"]))
466 if props.hasProperty("cc_command"):
467 return props["cc_command"]
473 if props.hasProperty("cxx_command"):
474 return props["cxx_command"]
479 def GetCCache(props):
480 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
481 return props["ccache_command"]
485 def GetNextBuild(builder, requests):
487 if r.properties and r.properties.hasProperty("tag"):
491 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
494 def MakeEnv(overrides=None, tryccache=False):
496 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
497 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
500 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
501 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
502 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
504 env['CC'] = env['CCC']
505 env['CXX'] = env['CCXX']
507 if overrides is not None:
508 env.update(overrides)
512 def NetLockDl(props):
514 if props.hasProperty("dl_lock"):
515 lock = NetLocks[props["dl_lock"]]
517 return [lock.access('exclusive')]
522 def NetLockUl(props):
524 if props.hasProperty("ul_lock"):
525 lock = NetLocks[props["ul_lock"]]
527 return [lock.access('exclusive')]
532 def TagPropertyValue(props):
533 if props.hasProperty("options"):
534 options = props.getProperty("options")
535 if type(options) is dict:
536 return options.get("tag")
539 def IsTargetSelected(target):
540 def CheckTargetProperty(step):
542 options = step.getProperty("options")
543 if type(options) is dict:
544 selected_target = options.get("target", "all")
545 if selected_target != "all" and selected_target != target:
552 return CheckTargetProperty
554 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
556 seckey = base64.b64decode(seckey)
560 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
561 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
566 dlLock = locks.WorkerLock("worker_dl")
570 for worker in c['workers']:
571 workerNames.append(worker.workername)
573 force_factory = BuildFactory()
575 c['builders'].append(BuilderConfig(
576 name = "00_force_build",
577 workernames = workerNames,
578 factory = force_factory))
580 for target in targets:
581 ts = target.split('/')
583 factory = BuildFactory()
585 # setup shared work directory if required
586 factory.addStep(ShellCommand(
588 description = "Setting up shared work directory",
589 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
591 haltOnFailure = True,
592 doStepIf = IsSharedWorkdir))
594 # find number of cores
595 factory.addStep(SetPropertyFromCommand(
598 description = "Finding number of CPUs",
599 command = ["nproc"]))
601 # find gcc and g++ compilers
602 factory.addStep(FileDownload(
603 name = "dlfindbinpl",
604 mastersrc = scripts_dir + '/findbin.pl',
605 workerdest = "../findbin.pl",
608 factory.addStep(SetPropertyFromCommand(
610 property = "cc_command",
611 description = "Finding gcc command",
613 "../findbin.pl", "gcc", "", "",
615 haltOnFailure = True))
617 factory.addStep(SetPropertyFromCommand(
619 property = "cxx_command",
620 description = "Finding g++ command",
622 "../findbin.pl", "g++", "", "",
624 haltOnFailure = True))
626 # see if ccache is available
627 factory.addStep(SetPropertyFromCommand(
628 property = "ccache_command",
629 command = ["which", "ccache"],
630 description = "Testing for ccache command",
631 haltOnFailure = False,
632 flunkOnFailure = False,
633 warnOnFailure = False,
636 # expire tree if needed
638 factory.addStep(FileDownload(
640 doStepIf = IsExpireRequested,
641 mastersrc = scripts_dir + '/expire.sh',
642 workerdest = "../expire.sh",
645 factory.addStep(ShellCommand(
647 description = "Checking for build tree expiry",
648 command = ["./expire.sh", str(tree_expire)],
650 haltOnFailure = True,
651 doStepIf = IsExpireRequested,
654 # cleanup.sh if needed
655 factory.addStep(FileDownload(
656 name = "dlcleanupsh",
657 mastersrc = scripts_dir + '/cleanup.sh',
658 workerdest = "../cleanup.sh",
660 doStepIf = IsCleanupRequested))
662 factory.addStep(ShellCommand(
664 description = "Cleaning previous builds",
665 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
667 haltOnFailure = True,
668 doStepIf = IsCleanupRequested,
671 factory.addStep(ShellCommand(
673 description = "Cleaning work area",
674 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
676 haltOnFailure = True,
677 doStepIf = IsCleanupRequested,
680 # user-requested clean targets
681 for tuple in CleanTargetMap:
682 factory.addStep(ShellCommand(
684 description = 'User-requested "make %s"' % tuple[1],
685 command = ["make", tuple[1], "V=s"],
687 doStepIf = IsMakeCleanRequested(tuple[0])
690 # Workaround bug when switching from a checked out tag back to a branch
691 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
692 factory.addStep(ShellCommand(
693 name = "gitcheckout",
694 description = "Ensure that Git HEAD is sane",
695 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
696 haltOnFailure = True))
698 # check out the source
700 # if repo doesn't exist: 'git clone repourl'
701 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
702 # 'git fetch -t repourl branch; git reset --hard revision'
703 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
704 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
708 branch = repo_branch,
712 haltOnFailure = True,
713 doStepIf = IsGitCleanRequested,
719 branch = repo_branch,
723 haltOnFailure = True,
724 doStepIf = IsGitFreshRequested,
728 factory.addStep(ShellCommand(
730 description = "Fetching Git remote refs",
731 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
736 factory.addStep(ShellCommand(
738 description = "Checking out Git tag",
739 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
740 haltOnFailure = True,
741 doStepIf = IsTaggingRequested
744 # Verify that Git HEAD points to a tag or branch
745 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
746 factory.addStep(ShellCommand(
748 description = "Ensure that Git HEAD is pointing to a branch or tag",
749 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
750 haltOnFailure = True))
752 factory.addStep(ShellCommand(
754 description = "Remove tmp folder",
755 command=["rm", "-rf", "tmp/"]))
758 factory.addStep(ShellCommand(
759 name = "rmfeedlinks",
760 description = "Remove feed symlinks",
761 command=["rm", "-rf", "package/feeds/"]))
763 factory.addStep(StringDownload(
765 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
766 workerdest = "../ccache_cc.sh",
770 factory.addStep(StringDownload(
772 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
773 workerdest = "../ccache_cxx.sh",
778 factory.addStep(ShellCommand(
779 name = "updatefeeds",
780 description = "Updating feeds",
781 command=["./scripts/feeds", "update"],
782 env = MakeEnv(tryccache=True),
783 haltOnFailure = True,
788 factory.addStep(ShellCommand(
789 name = "installfeeds",
790 description = "Installing feeds",
791 command=["./scripts/feeds", "install", "-a"],
792 env = MakeEnv(tryccache=True),
797 if config_seed is not None:
798 factory.addStep(StringDownload(
799 name = "dlconfigseed",
800 s = config_seed + '\n',
801 workerdest = ".config",
806 factory.addStep(ShellCommand(
808 description = "Seeding .config",
809 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
812 factory.addStep(ShellCommand(
814 description = "Removing output directory",
815 command = ["rm", "-rf", "bin/"]
818 factory.addStep(ShellCommand(
820 description = "Populating .config",
821 command = ["make", "defconfig"],
826 factory.addStep(ShellCommand(
828 description = "Checking architecture",
829 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
837 factory.addStep(SetPropertyFromCommand(
840 description = "Finding libc suffix",
841 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
844 if usign_key is not None:
845 factory.addStep(StringDownload(
846 name = "dlkeybuildpub",
847 s = UsignSec2Pub(usign_key, usign_comment),
848 workerdest = "key-build.pub",
852 factory.addStep(StringDownload(
854 s = "# fake private key",
855 workerdest = "key-build",
859 factory.addStep(StringDownload(
860 name = "dlkeybuilducert",
861 s = "# fake certificate",
862 workerdest = "key-build.ucert",
867 factory.addStep(ShellCommand(
869 description = "Preparing dl/",
870 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
876 factory.addStep(ShellCommand(
878 description = "Building and installing GNU tar",
879 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
880 env = MakeEnv(tryccache=True),
885 factory.addStep(ShellCommand(
887 description = "Populating dl/",
888 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
891 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
894 factory.addStep(ShellCommand(
896 description = "Cleaning base-files",
897 command=["make", "package/base-files/clean", "V=s"]
901 factory.addStep(ShellCommand(
903 description = "Building and installing tools",
904 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
905 env = MakeEnv(tryccache=True),
909 factory.addStep(ShellCommand(
911 description = "Building and installing toolchain",
912 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
917 factory.addStep(ShellCommand(
919 description = "Building kmods",
920 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
925 # find kernel version
926 factory.addStep(SetPropertyFromCommand(
927 name = "kernelversion",
928 property = "kernelversion",
929 description = "Finding the effective Kernel version",
930 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
931 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
934 factory.addStep(ShellCommand(
936 description = "Cleaning up package build",
937 command=["make", "package/cleanup", "V=s"]
940 factory.addStep(ShellCommand(
942 description = "Building packages",
943 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
948 factory.addStep(ShellCommand(
950 description = "Installing packages",
951 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
956 factory.addStep(ShellCommand(
958 description = "Indexing packages",
959 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
964 factory.addStep(ShellCommand(
966 description = "Building and installing images",
967 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
972 factory.addStep(ShellCommand(
974 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
975 command = "make -j1 buildinfo V=s || true",
980 factory.addStep(ShellCommand(
981 name = "json_overview_image_info",
982 description = "Generate profiles.json in target folder",
983 command = "make -j1 json_overview_image_info V=s || true",
988 factory.addStep(ShellCommand(
990 description = "Calculating checksums",
991 command=["make", "-j1", "checksum", "V=s"],
996 if enable_kmod_archive:
997 factory.addStep(ShellCommand(
999 description = "Creating kmod directory",
1000 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1001 haltOnFailure = True
1004 factory.addStep(ShellCommand(
1005 name = "kmodprepare",
1006 description = "Preparing kmod archive",
1007 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1008 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1009 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1010 haltOnFailure = True
1013 factory.addStep(ShellCommand(
1015 description = "Indexing kmod archive",
1016 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1017 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1019 haltOnFailure = True
1023 if ini.has_option("gpg", "key") or usign_key is not None:
1024 factory.addStep(MasterShellCommand(
1025 name = "signprepare",
1026 description = "Preparing temporary signing directory",
1027 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1028 haltOnFailure = True
1031 factory.addStep(ShellCommand(
1033 description = "Packing files to sign",
1034 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1035 haltOnFailure = True
1038 factory.addStep(FileUpload(
1039 workersrc = "sign.tar.gz",
1040 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1041 haltOnFailure = True
1044 factory.addStep(MasterShellCommand(
1046 description = "Signing files",
1047 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1048 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1049 haltOnFailure = True
1052 factory.addStep(FileDownload(
1053 name = "dlsigntargz",
1054 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1055 workerdest = "sign.tar.gz",
1056 haltOnFailure = True
1059 factory.addStep(ShellCommand(
1060 name = "signunpack",
1061 description = "Unpacking signed files",
1062 command = ["tar", "-xzf", "sign.tar.gz"],
1063 haltOnFailure = True
1067 factory.addStep(ShellCommand(
1068 name = "dirprepare",
1069 description = "Preparing upload directory structure",
1070 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1071 haltOnFailure = True
1074 factory.addStep(ShellCommand(
1075 name = "linkprepare",
1076 description = "Preparing repository symlink",
1077 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1078 doStepIf = IsNoMasterBuild,
1079 haltOnFailure = True
1082 if enable_kmod_archive:
1083 factory.addStep(ShellCommand(
1084 name = "kmoddirprepare",
1085 description = "Preparing kmod archive upload directory",
1086 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1087 haltOnFailure = True
1090 factory.addStep(ShellCommand(
1092 description = "Uploading directory structure",
1093 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1094 env={'RSYNC_PASSWORD': rsync_bin_key},
1095 haltOnFailure = True,
1100 # download remote sha256sums to 'target-sha256sums'
1101 factory.addStep(ShellCommand(
1102 name = "target-sha256sums",
1103 description = "Fetching remote sha256sums for target",
1104 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1105 env={'RSYNC_PASSWORD': rsync_bin_key},
1107 haltOnFailure = False,
1108 flunkOnFailure = False,
1109 warnOnFailure = False,
1112 # build list of files to upload
1113 factory.addStep(FileDownload(
1114 name = "dlsha2rsyncpl",
1115 mastersrc = scripts_dir + '/sha2rsync.pl',
1116 workerdest = "../sha2rsync.pl",
1120 factory.addStep(ShellCommand(
1122 description = "Building list of files to upload",
1123 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1124 haltOnFailure = True,
1127 factory.addStep(FileDownload(
1128 name = "dlrsync.sh",
1129 mastersrc = scripts_dir + '/rsync.sh',
1130 workerdest = "../rsync.sh",
1134 # upload new files and update existing ones
1135 factory.addStep(ShellCommand(
1136 name = "targetupload",
1137 description = "Uploading target files",
1138 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1139 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1140 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1141 env={'RSYNC_PASSWORD': rsync_bin_key},
1142 haltOnFailure = True,
1146 # delete files which don't exist locally
1147 factory.addStep(ShellCommand(
1148 name = "targetprune",
1149 description = "Pruning target files",
1150 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1151 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1152 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1153 env={'RSYNC_PASSWORD': rsync_bin_key},
1154 haltOnFailure = True,
1159 if enable_kmod_archive:
1160 factory.addStep(ShellCommand(
1161 name = "kmodupload",
1162 description = "Uploading kmod archive",
1163 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1164 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1165 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1166 env={'RSYNC_PASSWORD': rsync_bin_key},
1167 haltOnFailure = True,
1172 if rsync_src_url is not None:
1173 factory.addStep(ShellCommand(
1174 name = "sourcelist",
1175 description = "Finding source archives to upload",
1176 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1177 haltOnFailure = True
1180 factory.addStep(ShellCommand(
1181 name = "sourceupload",
1182 description = "Uploading source archives",
1183 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1184 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1185 env={'RSYNC_PASSWORD': rsync_src_key},
1186 haltOnFailure = True,
1191 factory.addStep(ShellCommand(
1193 description = "Reporting disk usage",
1194 command=["df", "-h", "."],
1195 env={'LC_ALL': 'C'},
1196 haltOnFailure = False,
1197 flunkOnFailure = False,
1198 warnOnFailure = False,
1202 factory.addStep(ShellCommand(
1204 description = "Reporting estimated file space usage",
1205 command=["du", "-sh", "."],
1206 env={'LC_ALL': 'C'},
1207 haltOnFailure = False,
1208 flunkOnFailure = False,
1209 warnOnFailure = False,
1213 factory.addStep(ShellCommand(
1214 name = "ccachestat",
1215 description = "Reporting ccache stats",
1216 command=["ccache", "-s"],
1217 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1218 want_stderr = False,
1219 haltOnFailure = False,
1220 flunkOnFailure = False,
1221 warnOnFailure = False,
1225 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1227 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1228 force_factory.addStep(steps.Trigger(
1229 name = "trigger_%s" % target,
1230 description = "Triggering %s build" % target,
1231 schedulerNames = [ "trigger_%s" % target ],
1232 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1233 doStepIf = IsTargetSelected(target)
1237 ####### STATUS TARGETS
1239 # 'status' is a list of Status Targets. The results of each build will be
1240 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1241 # including web pages, email senders, and IRC bots.
1243 if "status_bind" in inip1:
1245 'port': inip1.get("status_bind"),
1247 'waterfall_view': True,
1248 'console_view': True,
1253 if "status_user" in inip1 and "status_password" in inip1:
1254 c['www']['auth'] = util.UserPasswordAuth([
1255 (inip1.get("status_user"), inip1.get("status_password"))
1257 c['www']['authz'] = util.Authz(
1258 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1259 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1263 if ini.has_section("irc"):
1265 irc_host = iniirc.get("host", None)
1266 irc_port = iniirc.getint("port", 6667)
1267 irc_chan = iniirc.get("channel", None)
1268 irc_nick = iniirc.get("nickname", None)
1269 irc_pass = iniirc.get("password", None)
1271 if irc_host and irc_nick and irc_chan:
1272 irc = reporters.IRC(irc_host, irc_nick,
1274 password = irc_pass,
1275 channels = [ irc_chan ],
1276 notify_events = [ 'exception', 'problem', 'recovery' ]
1279 c['services'].append(irc)
1281 c['revlink'] = util.RevlinkMatch([
1282 r'https://git.openwrt.org/openwrt/(.*).git'
1284 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1289 # This specifies what database buildbot uses to store its state. You can leave
1290 # this at its default for all but the largest installations.
1291 'db_url' : "sqlite:///state.sqlite",
1294 c['buildbotNetUsageData'] = None