2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
58 ####### PROJECT IDENTITY
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
124 c['protocols'] = {'pb': {'port': worker_port}}
127 c['collapseRequests'] = True
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
143 @returns: datetime instance or None, via Deferred
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
153 order=['-complete_at'], limit=1)
157 complete_at = completed[0]['complete_at']
159 last_build = yield bldr.master.data.get(
162 resultspec.Filter('builderid', 'eq', [bldrid]),
164 order=['-started_at'], limit=1)
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
178 @returns: list of sorted builders
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
190 (complete_at, bldr) = item
194 complete_at = date.replace(tzinfo=tzutc())
196 if is_building(bldr):
198 complete_at = date.replace(tzinfo=tzutc())
200 return (complete_at, bldr.name)
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
208 return [r[1] for r in results]
210 c['prioritizeBuilders'] = prioritizeBuilders
212 ####### CHANGESOURCES
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
227 if ini.has_option("phase1", "expire"):
228 tree_expire = ini.getint("phase1", "expire")
230 if ini.has_option("phase1", "cc_version"):
231 cc_version = ini.get("phase1", "cc_version").split()
232 if len(cc_version) == 1:
233 cc_version = ["eq", cc_version[0]]
235 if ini.has_option("general", "git_ssh"):
236 git_ssh = ini.getboolean("general", "git_ssh")
238 if ini.has_option("general", "git_ssh_key"):
239 git_ssh_key = ini.get("general", "git_ssh_key")
243 if ini.has_option("phase1", "config_seed"):
244 config_seed = ini.get("phase1", "config_seed")
246 repo_url = ini.get("repo", "url")
247 repo_branch = "master"
249 if ini.has_option("repo", "branch"):
250 repo_branch = ini.get("repo", "branch")
252 rsync_bin_url = ini.get("rsync", "binary_url")
253 rsync_bin_key = ini.get("rsync", "binary_password")
254 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
256 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
257 rsync_bin_defopts += ["--contimeout=20"]
261 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
263 if ini.has_option("rsync", "source_url"):
264 rsync_src_url = ini.get("rsync", "source_url")
265 rsync_src_key = ini.get("rsync", "source_password")
267 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
268 rsync_src_defopts += ["--contimeout=20"]
271 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
273 if ini.has_option("usign", "key"):
274 usign_key = ini.get("usign", "key")
276 if ini.has_option("usign", "comment"):
277 usign_comment = ini.get("usign", "comment")
279 enable_kmod_archive = False
280 embed_kmod_repository = False
282 if ini.has_option("phase1", "kmod_archive"):
283 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
285 if ini.has_option("phase1", "kmod_repository"):
286 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
292 if not os.path.isdir(work_dir+'/source.git'):
293 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
295 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
297 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
298 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
299 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
302 line = findtargets.stdout.readline()
305 ta = line.decode().strip().split(' ')
306 targets.append(ta[0])
309 # the 'change_source' setting tells the buildmaster how it should find out
310 # about source code changes. Here we point to the buildbot clone of pyflakes.
312 c['change_source'] = []
313 c['change_source'].append(GitPoller(
315 workdir=work_dir+'/work.git', branch=repo_branch,
320 # Configure the Schedulers, which decide how to react to incoming changes. In this
321 # case, just kick off a 'basebuild' build
323 class TagChoiceParameter(BaseParameter):
324 spec_attributes = ["strict", "choices"]
328 def __init__(self, name, label=None, **kw):
329 super().__init__(name, label, **kw)
330 self._choice_list = []
335 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
338 findtags = subprocess.Popen(
339 ['git', 'ls-remote', '--tags', repo_url],
340 stdout = subprocess.PIPE)
343 line = findtags.stdout.readline()
348 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
350 if tagver and tagver[1].find(basever[1]) == 0:
351 taglist.append(tagver[1])
353 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
354 taglist.insert(0, '')
356 self._choice_list = taglist
358 return self._choice_list
360 def parse_from_arg(self, s):
361 if self.strict and s not in self._choice_list:
362 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
366 c['schedulers'].append(SingleBranchScheduler(
368 change_filter = filter.ChangeFilter(branch=repo_branch),
369 treeStableTimer = 60,
370 builderNames = targets))
372 c['schedulers'].append(ForceScheduler(
374 buttonName = "Force builds",
375 label = "Force build details",
376 builderNames = [ "00_force_build" ],
379 util.CodebaseParameter(
381 label = "Repository",
382 branch = util.FixedParameter(name = "branch", default = ""),
383 revision = util.FixedParameter(name = "revision", default = ""),
384 repository = util.FixedParameter(name = "repository", default = ""),
385 project = util.FixedParameter(name = "project", default = "")
389 reason = util.StringParameter(
392 default = "Trigger build",
398 util.NestedParameter(
400 label="Build Options",
403 util.ChoiceStringParameter(
405 label = "Build target",
407 choices = [ "all" ] + targets
421 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
422 # what steps, and which workers can execute them. Note that any particular build will
423 # only take place on one worker.
426 [ "tools", "tools/clean" ],
427 [ "chain", "toolchain/clean" ],
428 [ "linux", "target/linux/clean" ],
429 [ "dir", "dirclean" ],
430 [ "dist", "distclean" ]
433 def IsMakeCleanRequested(pattern):
434 def CheckCleanProperty(step):
435 val = step.getProperty("clean")
436 if val and re.match(pattern, val):
441 return CheckCleanProperty
443 def IsSharedWorkdir(step):
444 return bool(step.getProperty("shared_wd"))
446 def IsCleanupRequested(step):
447 if IsSharedWorkdir(step):
449 do_cleanup = step.getProperty("do_cleanup")
455 def IsExpireRequested(step):
456 if IsSharedWorkdir(step):
459 return not IsCleanupRequested(step)
461 def IsGitFreshRequested(step):
462 do_cleanup = step.getProperty("do_cleanup")
468 def IsGitCleanRequested(step):
469 return not IsGitFreshRequested(step)
471 def IsTaggingRequested(step):
472 val = step.getProperty("tag")
473 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
478 def IsNoTaggingRequested(step):
479 return not IsTaggingRequested(step)
481 def IsNoMasterBuild(step):
482 return repo_branch != "master"
484 def GetBaseVersion():
485 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
486 return repo_branch.split('-')[1]
491 def GetVersionPrefix(props):
492 basever = GetBaseVersion()
493 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
494 return "%s/" % props["tag"]
495 elif basever != "master":
496 return "%s-SNAPSHOT/" % basever
501 def GetNumJobs(props):
502 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
503 return str(int(int(props["nproc"]) / props["max_builds"]))
509 if props.hasProperty("cc_command"):
510 return props["cc_command"]
516 if props.hasProperty("cxx_command"):
517 return props["cxx_command"]
523 if props.hasProperty("builddir"):
524 return props["builddir"]
525 elif props.hasProperty("workdir"):
526 return props["workdir"]
531 def GetCCache(props):
532 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
533 return props["ccache_command"]
537 def GetNextBuild(builder, requests):
539 if r.properties and r.properties.hasProperty("tag"):
543 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
546 def MakeEnv(overrides=None, tryccache=False):
548 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
549 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
552 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
553 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
554 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
556 env['CC'] = env['CCC']
557 env['CXX'] = env['CCXX']
559 if overrides is not None:
560 env.update(overrides)
564 def NetLockDl(props):
566 if props.hasProperty("dl_lock"):
567 lock = NetLocks[props["dl_lock"]]
569 return [lock.access('exclusive')]
574 def NetLockUl(props):
576 if props.hasProperty("ul_lock"):
577 lock = NetLocks[props["ul_lock"]]
579 return [lock.access('exclusive')]
584 def TagPropertyValue(props):
585 if props.hasProperty("options"):
586 options = props.getProperty("options")
587 if type(options) is dict:
588 return options.get("tag")
591 def IsTargetSelected(target):
592 def CheckTargetProperty(step):
594 options = step.getProperty("options")
595 if type(options) is dict:
596 selected_target = options.get("target", "all")
597 if selected_target != "all" and selected_target != target:
604 return CheckTargetProperty
606 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
608 seckey = base64.b64decode(seckey)
612 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
613 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
618 dlLock = locks.WorkerLock("worker_dl")
620 checkBuiltin = re.sub('[\t\n ]+', ' ', """
622 local symbol op path file;
623 for file in $CHANGED_FILES; do
629 while read symbol op path; do
630 case "$symbol" in package-*)
631 symbol="${symbol##*(}";
632 symbol="${symbol%)}";
633 for file in $CHANGED_FILES; do
634 case "$file" in "package/$path/"*)
635 grep -qsx "$symbol=y" .config && return 0
639 done < tmp/.packagedeps;
645 class IfBuiltinShellCommand(ShellCommand):
646 def _quote(self, str):
647 if re.search("[^a-zA-Z0-9/_.-]", str):
648 return "'%s'" %(re.sub("'", "'\"'\"'", str))
651 def setCommand(self, command):
652 if not isinstance(command, (str, unicode)):
653 command = ' '.join(map(self._quote, command))
656 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
659 def setupEnvironment(self, cmd):
660 workerEnv = self.workerEnvironment
661 if workerEnv is None:
664 for request in self.build.requests:
665 for source in request.sources:
666 for change in source.changes:
667 for file in change.files:
668 changedFiles[file] = True
669 fullSlaveEnv = workerEnv.copy()
670 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
671 cmd.args['env'] = fullSlaveEnv
675 for worker in c['workers']:
676 workerNames.append(worker.workername)
678 force_factory = BuildFactory()
680 c['builders'].append(BuilderConfig(
681 name = "00_force_build",
682 workernames = workerNames,
683 factory = force_factory))
685 for target in targets:
686 ts = target.split('/')
688 factory = BuildFactory()
690 # setup shared work directory if required
691 factory.addStep(ShellCommand(
693 description = "Setting up shared work directory",
694 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
696 haltOnFailure = True,
697 doStepIf = IsSharedWorkdir))
699 # find number of cores
700 factory.addStep(SetPropertyFromCommand(
703 description = "Finding number of CPUs",
704 command = ["nproc"]))
706 # find gcc and g++ compilers
707 factory.addStep(FileDownload(
708 name = "dlfindbinpl",
709 mastersrc = scripts_dir + '/findbin.pl',
710 workerdest = "../findbin.pl",
713 factory.addStep(SetPropertyFromCommand(
715 property = "cc_command",
716 description = "Finding gcc command",
718 "../findbin.pl", "gcc",
719 cc_version[0] if cc_version is not None else '',
720 cc_version[1] if cc_version is not None else ''
722 haltOnFailure = True))
724 factory.addStep(SetPropertyFromCommand(
726 property = "cxx_command",
727 description = "Finding g++ command",
729 "../findbin.pl", "g++",
730 cc_version[0] if cc_version is not None else '',
731 cc_version[1] if cc_version is not None else ''
733 haltOnFailure = True))
735 # see if ccache is available
736 factory.addStep(SetPropertyFromCommand(
737 property = "ccache_command",
738 command = ["which", "ccache"],
739 description = "Testing for ccache command",
740 haltOnFailure = False,
741 flunkOnFailure = False,
742 warnOnFailure = False,
745 # expire tree if needed
747 factory.addStep(FileDownload(
749 doStepIf = IsExpireRequested,
750 mastersrc = scripts_dir + '/expire.sh',
751 workerdest = "../expire.sh",
754 factory.addStep(ShellCommand(
756 description = "Checking for build tree expiry",
757 command = ["./expire.sh", str(tree_expire)],
759 haltOnFailure = True,
760 doStepIf = IsExpireRequested,
763 # cleanup.sh if needed
764 factory.addStep(FileDownload(
765 name = "dlcleanupsh",
766 mastersrc = scripts_dir + '/cleanup.sh',
767 workerdest = "../cleanup.sh",
769 doStepIf = IsCleanupRequested))
771 factory.addStep(ShellCommand(
773 description = "Cleaning previous builds",
774 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
776 haltOnFailure = True,
777 doStepIf = IsCleanupRequested,
780 factory.addStep(ShellCommand(
782 description = "Cleaning work area",
783 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
785 haltOnFailure = True,
786 doStepIf = IsCleanupRequested,
789 # user-requested clean targets
790 for tuple in CleanTargetMap:
791 factory.addStep(ShellCommand(
793 description = 'User-requested "make %s"' % tuple[1],
794 command = ["make", tuple[1], "V=s"],
796 doStepIf = IsMakeCleanRequested(tuple[0])
799 # Workaround bug when switching from a checked out tag back to a branch
800 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
801 factory.addStep(ShellCommand(
802 name = "gitcheckout",
803 description = "Ensure that Git HEAD is sane",
804 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
805 haltOnFailure = True))
807 # check out the source
809 # if repo doesn't exist: 'git clone repourl'
810 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
811 # 'git fetch -t repourl branch; git reset --hard revision'
812 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
813 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
817 branch = repo_branch,
821 haltOnFailure = True,
822 doStepIf = IsGitCleanRequested,
828 branch = repo_branch,
832 haltOnFailure = True,
833 doStepIf = IsGitFreshRequested,
837 factory.addStep(ShellCommand(
839 description = "Fetching Git remote refs",
840 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
845 factory.addStep(ShellCommand(
847 description = "Checking out Git tag",
848 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
849 haltOnFailure = True,
850 doStepIf = IsTaggingRequested
853 # Verify that Git HEAD points to a tag or branch
854 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
855 factory.addStep(ShellCommand(
857 description = "Ensure that Git HEAD is pointing to a branch or tag",
858 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
859 haltOnFailure = True))
861 factory.addStep(ShellCommand(
863 description = "Remove tmp folder",
864 command=["rm", "-rf", "tmp/"]))
867 # factory.addStep(ShellCommand(
868 # name = "feedsconf",
869 # description = "Copy the feeds.conf",
870 # command='''cp ~/feeds.conf ./feeds.conf''' ))
873 factory.addStep(ShellCommand(
874 name = "rmfeedlinks",
875 description = "Remove feed symlinks",
876 command=["rm", "-rf", "package/feeds/"]))
878 factory.addStep(StringDownload(
880 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
881 workerdest = "../ccache_cc.sh",
885 factory.addStep(StringDownload(
887 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
888 workerdest = "../ccache_cxx.sh",
894 factory.addStep(StringDownload(
895 name = "dlgitclonekey",
897 workerdest = "../git-clone.key",
901 factory.addStep(ShellCommand(
902 name = "patchfeedsconf",
903 description = "Patching feeds.conf",
904 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
909 factory.addStep(ShellCommand(
910 name = "updatefeeds",
911 description = "Updating feeds",
912 command=["./scripts/feeds", "update"],
913 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
914 haltOnFailure = True,
920 factory.addStep(ShellCommand(
921 name = "rmfeedsconf",
922 description = "Removing feeds.conf",
923 command=["rm", "feeds.conf"],
928 factory.addStep(ShellCommand(
929 name = "installfeeds",
930 description = "Installing feeds",
931 command=["./scripts/feeds", "install", "-a"],
932 env = MakeEnv(tryccache=True),
937 if config_seed is not None:
938 factory.addStep(StringDownload(
939 name = "dlconfigseed",
940 s = config_seed + '\n',
941 workerdest = ".config",
946 factory.addStep(ShellCommand(
948 description = "Seeding .config",
949 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
952 factory.addStep(ShellCommand(
954 description = "Removing output directory",
955 command = ["rm", "-rf", "bin/"]
958 factory.addStep(ShellCommand(
960 description = "Populating .config",
961 command = ["make", "defconfig"],
966 factory.addStep(ShellCommand(
968 description = "Checking architecture",
969 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
977 factory.addStep(SetPropertyFromCommand(
980 description = "Finding libc suffix",
981 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
984 if usign_key is not None:
985 factory.addStep(StringDownload(
986 name = "dlkeybuildpub",
987 s = UsignSec2Pub(usign_key, usign_comment),
988 workerdest = "key-build.pub",
992 factory.addStep(StringDownload(
994 s = "# fake private key",
995 workerdest = "key-build",
999 factory.addStep(StringDownload(
1000 name = "dlkeybuilducert",
1001 s = "# fake certificate",
1002 workerdest = "key-build.ucert",
1007 factory.addStep(ShellCommand(
1009 description = "Preparing dl/",
1010 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1016 factory.addStep(ShellCommand(
1018 description = "Building and installing GNU tar",
1019 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1020 env = MakeEnv(tryccache=True),
1021 haltOnFailure = True
1025 factory.addStep(ShellCommand(
1027 description = "Populating dl/",
1028 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1031 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
1034 factory.addStep(ShellCommand(
1036 description = "Cleaning base-files",
1037 command=["make", "package/base-files/clean", "V=s"]
1041 factory.addStep(ShellCommand(
1043 description = "Building and installing tools",
1044 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1045 env = MakeEnv(tryccache=True),
1046 haltOnFailure = True
1049 factory.addStep(ShellCommand(
1051 description = "Building and installing toolchain",
1052 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1054 haltOnFailure = True
1057 factory.addStep(ShellCommand(
1059 description = "Building kmods",
1060 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1062 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1063 haltOnFailure = True
1066 # find kernel version
1067 factory.addStep(SetPropertyFromCommand(
1068 name = "kernelversion",
1069 property = "kernelversion",
1070 description = "Finding the effective Kernel version",
1071 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1072 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1075 factory.addStep(ShellCommand(
1077 description = "Cleaning up package build",
1078 command=["make", "package/cleanup", "V=s"]
1081 factory.addStep(ShellCommand(
1083 description = "Building packages",
1084 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1086 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1087 haltOnFailure = True
1090 # factory.addStep(IfBuiltinShellCommand(
1091 factory.addStep(ShellCommand(
1092 name = "pkginstall",
1093 description = "Installing packages",
1094 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1096 haltOnFailure = True
1099 factory.addStep(ShellCommand(
1101 description = "Indexing packages",
1102 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1104 haltOnFailure = True
1107 if enable_kmod_archive and embed_kmod_repository:
1108 # embed kmod repository. Must happen before 'images'
1110 # find rootfs staging directory
1111 factory.addStep(SetPropertyFromCommand(
1113 property = "stageroot",
1114 description = "Finding the rootfs staging directory",
1115 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1116 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1120 factory.addStep(ShellCommand(
1122 description = "Creating file overlay directory",
1123 command=["mkdir", "-p", "files/etc/opkg"],
1124 haltOnFailure = True
1127 factory.addStep(ShellCommand(
1128 name = "kmodconfig",
1129 description = "Embedding kmod repository configuration",
1130 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1131 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1132 haltOnFailure = True
1135 #factory.addStep(IfBuiltinShellCommand(
1136 factory.addStep(ShellCommand(
1138 description = "Building and installing images",
1139 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1141 haltOnFailure = True
1144 factory.addStep(ShellCommand(
1146 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1147 command = "make -j1 buildinfo V=s || true",
1149 haltOnFailure = True
1152 factory.addStep(ShellCommand(
1153 name = "json_overview_image_info",
1154 description = "Generate profiles.json in target folder",
1155 command = "make -j1 json_overview_image_info V=s || true",
1157 haltOnFailure = True
1160 factory.addStep(ShellCommand(
1162 description = "Calculating checksums",
1163 command=["make", "-j1", "checksum", "V=s"],
1165 haltOnFailure = True
1168 if enable_kmod_archive:
1169 factory.addStep(ShellCommand(
1171 description = "Creating kmod directory",
1172 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1173 haltOnFailure = True
1176 factory.addStep(ShellCommand(
1177 name = "kmodprepare",
1178 description = "Preparing kmod archive",
1179 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1180 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1181 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1182 haltOnFailure = True
1185 factory.addStep(ShellCommand(
1187 description = "Indexing kmod archive",
1188 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1189 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1191 haltOnFailure = True
1195 if ini.has_option("gpg", "key") or usign_key is not None:
1196 factory.addStep(MasterShellCommand(
1197 name = "signprepare",
1198 description = "Preparing temporary signing directory",
1199 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1200 haltOnFailure = True
1203 factory.addStep(ShellCommand(
1205 description = "Packing files to sign",
1206 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1207 haltOnFailure = True
1210 factory.addStep(FileUpload(
1211 workersrc = "sign.tar.gz",
1212 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1213 haltOnFailure = True
1216 factory.addStep(MasterShellCommand(
1218 description = "Signing files",
1219 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1220 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1221 haltOnFailure = True
1224 factory.addStep(FileDownload(
1225 name = "dlsigntargz",
1226 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1227 workerdest = "sign.tar.gz",
1228 haltOnFailure = True
1231 factory.addStep(ShellCommand(
1232 name = "signunpack",
1233 description = "Unpacking signed files",
1234 command = ["tar", "-xzf", "sign.tar.gz"],
1235 haltOnFailure = True
1239 factory.addStep(ShellCommand(
1240 name = "dirprepare",
1241 description = "Preparing upload directory structure",
1242 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1243 haltOnFailure = True
1246 factory.addStep(ShellCommand(
1247 name = "linkprepare",
1248 description = "Preparing repository symlink",
1249 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1250 doStepIf = IsNoMasterBuild,
1251 haltOnFailure = True
1254 if enable_kmod_archive:
1255 factory.addStep(ShellCommand(
1256 name = "kmoddirprepare",
1257 description = "Preparing kmod archive upload directory",
1258 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1259 haltOnFailure = True
1262 factory.addStep(ShellCommand(
1264 description = "Uploading directory structure",
1265 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1266 env={'RSYNC_PASSWORD': rsync_bin_key},
1267 haltOnFailure = True,
1272 # download remote sha256sums to 'target-sha256sums'
1273 factory.addStep(ShellCommand(
1274 name = "target-sha256sums",
1275 description = "Fetching remote sha256sums for target",
1276 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1277 env={'RSYNC_PASSWORD': rsync_bin_key},
1279 haltOnFailure = False,
1280 flunkOnFailure = False,
1281 warnOnFailure = False,
1284 # build list of files to upload
1285 factory.addStep(FileDownload(
1286 name = "dlsha2rsyncpl",
1287 mastersrc = scripts_dir + '/sha2rsync.pl',
1288 workerdest = "../sha2rsync.pl",
1292 factory.addStep(ShellCommand(
1294 description = "Building list of files to upload",
1295 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1296 haltOnFailure = True,
1299 factory.addStep(FileDownload(
1300 name = "dlrsync.sh",
1301 mastersrc = scripts_dir + '/rsync.sh',
1302 workerdest = "../rsync.sh",
1306 # upload new files and update existing ones
1307 factory.addStep(ShellCommand(
1308 name = "targetupload",
1309 description = "Uploading target files",
1310 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1311 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1312 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1313 env={'RSYNC_PASSWORD': rsync_bin_key},
1314 haltOnFailure = True,
1318 # delete files which don't exist locally
1319 factory.addStep(ShellCommand(
1320 name = "targetprune",
1321 description = "Pruning target files",
1322 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1323 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1324 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1325 env={'RSYNC_PASSWORD': rsync_bin_key},
1326 haltOnFailure = True,
1331 if enable_kmod_archive:
1332 factory.addStep(ShellCommand(
1333 name = "kmodupload",
1334 description = "Uploading kmod archive",
1335 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1336 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1337 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1338 env={'RSYNC_PASSWORD': rsync_bin_key},
1339 haltOnFailure = True,
1344 if rsync_src_url is not None:
1345 factory.addStep(ShellCommand(
1346 name = "sourcelist",
1347 description = "Finding source archives to upload",
1348 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1349 haltOnFailure = True
1352 factory.addStep(ShellCommand(
1353 name = "sourceupload",
1354 description = "Uploading source archives",
1355 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1356 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1357 env={'RSYNC_PASSWORD': rsync_src_key},
1358 haltOnFailure = True,
1364 factory.addStep(ShellCommand(
1365 name = "packageupload",
1366 description = "Uploading package files",
1367 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1368 env={'RSYNC_PASSWORD': rsync_bin_key},
1369 haltOnFailure = False,
1370 flunkOnFailure = False,
1371 warnOnFailure = True,
1378 factory.addStep(ShellCommand(
1380 description = "Uploading logs",
1381 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1382 env={'RSYNC_PASSWORD': rsync_bin_key},
1383 haltOnFailure = False,
1384 flunkOnFailure = False,
1385 warnOnFailure = True,
1391 factory.addStep(ShellCommand(
1393 description = "Reporting disk usage",
1394 command=["df", "-h", "."],
1395 env={'LC_ALL': 'C'},
1396 haltOnFailure = False,
1397 flunkOnFailure = False,
1398 warnOnFailure = False,
1402 factory.addStep(ShellCommand(
1404 description = "Reporting estimated file space usage",
1405 command=["du", "-sh", "."],
1406 env={'LC_ALL': 'C'},
1407 haltOnFailure = False,
1408 flunkOnFailure = False,
1409 warnOnFailure = False,
1413 factory.addStep(ShellCommand(
1414 name = "ccachestat",
1415 description = "Reporting ccache stats",
1416 command=["ccache", "-s"],
1417 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1418 want_stderr = False,
1419 haltOnFailure = False,
1420 flunkOnFailure = False,
1421 warnOnFailure = False,
1425 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1427 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1428 force_factory.addStep(steps.Trigger(
1429 name = "trigger_%s" % target,
1430 description = "Triggering %s build" % target,
1431 schedulerNames = [ "trigger_%s" % target ],
1432 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1433 doStepIf = IsTargetSelected(target)
1437 ####### STATUS TARGETS
1439 # 'status' is a list of Status Targets. The results of each build will be
1440 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1441 # including web pages, email senders, and IRC bots.
1443 if ini.has_option("phase1", "status_bind"):
1445 'port': ini.get("phase1", "status_bind"),
1447 'waterfall_view': True,
1448 'console_view': True,
1453 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1454 c['www']['auth'] = util.UserPasswordAuth([
1455 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1457 c['www']['authz'] = util.Authz(
1458 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1459 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1463 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1464 irc_host = ini.get("irc", "host")
1466 irc_chan = ini.get("irc", "channel")
1467 irc_nick = ini.get("irc", "nickname")
1470 if ini.has_option("irc", "port"):
1471 irc_port = ini.getint("irc", "port")
1473 if ini.has_option("irc", "password"):
1474 irc_pass = ini.get("irc", "password")
1476 irc = reporters.IRC(irc_host, irc_nick,
1478 password = irc_pass,
1479 channels = [ irc_chan ],
1480 notify_events = [ 'exception', 'problem', 'recovery' ]
1483 c['services'].append(irc)
1485 c['revlink'] = util.RevlinkMatch([
1486 r'https://git.openwrt.org/openwrt/(.*).git'
1488 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1493 # This specifies what database buildbot uses to store its state. You can leave
1494 # this at its default for all but the largest installations.
1495 'db_url' : "sqlite:///state.sqlite",
1498 c['buildbotNetUsageData'] = None