2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
58 ####### PROJECT IDENTITY
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
124 c['protocols'] = {'pb': {'port': worker_port}}
127 c['collapseRequests'] = True
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
143 @returns: datetime instance or None, via Deferred
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
153 order=['-complete_at'], limit=1)
157 complete_at = completed[0]['complete_at']
159 last_build = yield bldr.master.data.get(
162 resultspec.Filter('builderid', 'eq', [bldrid]),
164 order=['-started_at'], limit=1)
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
178 @returns: list of sorted builders
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
190 (complete_at, bldr) = item
194 complete_at = date.replace(tzinfo=tzutc())
196 if is_building(bldr):
198 complete_at = date.replace(tzinfo=tzutc())
200 return (complete_at, bldr.name)
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
208 return [r[1] for r in results]
210 c['prioritizeBuilders'] = prioritizeBuilders
212 ####### CHANGESOURCES
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
228 if ini.has_option("phase1", "expire"):
229 tree_expire = ini.getint("phase1", "expire")
231 if ini.has_option("phase1", "other_builds"):
232 other_builds = ini.getint("phase1", "other_builds")
234 if ini.has_option("phase1", "cc_version"):
235 cc_version = ini.get("phase1", "cc_version").split()
236 if len(cc_version) == 1:
237 cc_version = ["eq", cc_version[0]]
239 if ini.has_option("general", "git_ssh"):
240 git_ssh = ini.getboolean("general", "git_ssh")
242 if ini.has_option("general", "git_ssh_key"):
243 git_ssh_key = ini.get("general", "git_ssh_key")
247 if ini.has_option("phase1", "config_seed"):
248 config_seed = ini.get("phase1", "config_seed")
250 repo_url = ini.get("repo", "url")
251 repo_branch = "master"
253 if ini.has_option("repo", "branch"):
254 repo_branch = ini.get("repo", "branch")
256 rsync_bin_url = ini.get("rsync", "binary_url")
257 rsync_bin_key = ini.get("rsync", "binary_password")
258 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
260 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
261 rsync_bin_defopts += ["--contimeout=20"]
265 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
267 if ini.has_option("rsync", "source_url"):
268 rsync_src_url = ini.get("rsync", "source_url")
269 rsync_src_key = ini.get("rsync", "source_password")
271 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
272 rsync_src_defopts += ["--contimeout=20"]
275 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
277 if ini.has_option("usign", "key"):
278 usign_key = ini.get("usign", "key")
280 if ini.has_option("usign", "comment"):
281 usign_comment = ini.get("usign", "comment")
283 enable_kmod_archive = False
284 embed_kmod_repository = False
286 if ini.has_option("phase1", "kmod_archive"):
287 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
289 if ini.has_option("phase1", "kmod_repository"):
290 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
296 if not os.path.isdir(work_dir+'/source.git'):
297 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
299 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
301 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
302 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
303 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
306 line = findtargets.stdout.readline()
309 ta = line.decode().strip().split(' ')
310 targets.append(ta[0])
313 # the 'change_source' setting tells the buildmaster how it should find out
314 # about source code changes. Here we point to the buildbot clone of pyflakes.
316 c['change_source'] = []
317 c['change_source'].append(GitPoller(
319 workdir=work_dir+'/work.git', branch=repo_branch,
324 # Configure the Schedulers, which decide how to react to incoming changes. In this
325 # case, just kick off a 'basebuild' build
327 class TagChoiceParameter(BaseParameter):
328 spec_attributes = ["strict", "choices"]
332 def __init__(self, name, label=None, **kw):
333 super().__init__(name, label, **kw)
334 self._choice_list = []
339 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
342 findtags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
347 line = findtags.stdout.readline()
352 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
354 if tagver and tagver[1].find(basever[1]) == 0:
355 taglist.append(tagver[1])
357 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
358 taglist.insert(0, '')
360 self._choice_list = taglist
362 return self._choice_list
364 def parse_from_arg(self, s):
365 if self.strict and s not in self._choice_list:
366 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
370 c['schedulers'].append(SingleBranchScheduler(
372 change_filter = filter.ChangeFilter(branch=repo_branch),
373 treeStableTimer = 60,
374 builderNames = targets))
376 c['schedulers'].append(ForceScheduler(
378 buttonName = "Force builds",
379 label = "Force build details",
380 builderNames = [ "00_force_build" ],
383 util.CodebaseParameter(
385 label = "Repository",
386 branch = util.FixedParameter(name = "branch", default = ""),
387 revision = util.FixedParameter(name = "revision", default = ""),
388 repository = util.FixedParameter(name = "repository", default = ""),
389 project = util.FixedParameter(name = "project", default = "")
393 reason = util.StringParameter(
396 default = "Trigger build",
402 util.NestedParameter(
404 label="Build Options",
407 util.ChoiceStringParameter(
409 label = "Build target",
411 choices = [ "all" ] + targets
425 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
426 # what steps, and which workers can execute them. Note that any particular build will
427 # only take place on one worker.
430 [ "tools", "tools/clean" ],
431 [ "chain", "toolchain/clean" ],
432 [ "linux", "target/linux/clean" ],
433 [ "dir", "dirclean" ],
434 [ "dist", "distclean" ]
437 def IsMakeCleanRequested(pattern):
438 def CheckCleanProperty(step):
439 val = step.getProperty("clean")
440 if val and re.match(pattern, val):
445 return CheckCleanProperty
447 def IsSharedWorkdir(step):
448 return bool(step.getProperty("shared_wd"))
450 def IsCleanupRequested(step):
451 if IsSharedWorkdir(step):
453 do_cleanup = step.getProperty("do_cleanup")
459 def IsExpireRequested(step):
460 if IsSharedWorkdir(step):
463 return not IsCleanupRequested(step)
465 def IsGitFreshRequested(step):
466 do_cleanup = step.getProperty("do_cleanup")
472 def IsGitCleanRequested(step):
473 return not IsGitFreshRequested(step)
475 def IsTaggingRequested(step):
476 val = step.getProperty("tag")
477 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
482 def IsNoTaggingRequested(step):
483 return not IsTaggingRequested(step)
485 def IsNoMasterBuild(step):
486 return repo_branch != "master"
488 def GetBaseVersion():
489 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
490 return repo_branch.split('-')[1]
495 def GetVersionPrefix(props):
496 basever = GetBaseVersion()
497 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
498 return "%s/" % props["tag"]
499 elif basever != "master":
500 return "%s-SNAPSHOT/" % basever
505 def GetNumJobs(props):
506 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
507 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
513 if props.hasProperty("cc_command"):
514 return props["cc_command"]
520 if props.hasProperty("cxx_command"):
521 return props["cxx_command"]
527 if props.hasProperty("builddir"):
528 return props["builddir"]
529 elif props.hasProperty("workdir"):
530 return props["workdir"]
535 def GetCCache(props):
536 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
537 return props["ccache_command"]
541 def GetNextBuild(builder, requests):
543 if r.properties and r.properties.hasProperty("tag"):
547 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
550 def MakeEnv(overrides=None, tryccache=False):
552 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
553 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
556 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
557 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
558 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
560 env['CC'] = env['CCC']
561 env['CXX'] = env['CCXX']
563 if overrides is not None:
564 env.update(overrides)
568 def NetLockDl(props):
570 if props.hasProperty("dl_lock"):
571 lock = NetLocks[props["dl_lock"]]
573 return [lock.access('exclusive')]
578 def NetLockUl(props):
580 if props.hasProperty("ul_lock"):
581 lock = NetLocks[props["ul_lock"]]
583 return [lock.access('exclusive')]
588 def TagPropertyValue(props):
589 if props.hasProperty("options"):
590 options = props.getProperty("options")
591 if type(options) is dict:
592 return options.get("tag")
595 def IsTargetSelected(target):
596 def CheckTargetProperty(step):
598 options = step.getProperty("options")
599 if type(options) is dict:
600 selected_target = options.get("target", "all")
601 if selected_target != "all" and selected_target != target:
608 return CheckTargetProperty
610 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
612 seckey = base64.b64decode(seckey)
616 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
617 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
622 dlLock = locks.WorkerLock("worker_dl")
624 checkBuiltin = re.sub('[\t\n ]+', ' ', """
626 local symbol op path file;
627 for file in $CHANGED_FILES; do
633 while read symbol op path; do
634 case "$symbol" in package-*)
635 symbol="${symbol##*(}";
636 symbol="${symbol%)}";
637 for file in $CHANGED_FILES; do
638 case "$file" in "package/$path/"*)
639 grep -qsx "$symbol=y" .config && return 0
643 done < tmp/.packagedeps;
649 class IfBuiltinShellCommand(ShellCommand):
650 def _quote(self, str):
651 if re.search("[^a-zA-Z0-9/_.-]", str):
652 return "'%s'" %(re.sub("'", "'\"'\"'", str))
655 def setCommand(self, command):
656 if not isinstance(command, (str, unicode)):
657 command = ' '.join(map(self._quote, command))
660 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
663 def setupEnvironment(self, cmd):
664 workerEnv = self.workerEnvironment
665 if workerEnv is None:
668 for request in self.build.requests:
669 for source in request.sources:
670 for change in source.changes:
671 for file in change.files:
672 changedFiles[file] = True
673 fullSlaveEnv = workerEnv.copy()
674 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
675 cmd.args['env'] = fullSlaveEnv
679 for worker in c['workers']:
680 workerNames.append(worker.workername)
682 force_factory = BuildFactory()
684 c['builders'].append(BuilderConfig(
685 name = "00_force_build",
686 workernames = workerNames,
687 factory = force_factory))
689 for target in targets:
690 ts = target.split('/')
692 factory = BuildFactory()
694 # setup shared work directory if required
695 factory.addStep(ShellCommand(
697 description = "Setting up shared work directory",
698 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
700 haltOnFailure = True,
701 doStepIf = IsSharedWorkdir))
703 # find number of cores
704 factory.addStep(SetPropertyFromCommand(
707 description = "Finding number of CPUs",
708 command = ["nproc"]))
710 # find gcc and g++ compilers
711 factory.addStep(FileDownload(
712 name = "dlfindbinpl",
713 mastersrc = scripts_dir + '/findbin.pl',
714 workerdest = "../findbin.pl",
717 factory.addStep(SetPropertyFromCommand(
719 property = "cc_command",
720 description = "Finding gcc command",
722 "../findbin.pl", "gcc",
723 cc_version[0] if cc_version is not None else '',
724 cc_version[1] if cc_version is not None else ''
726 haltOnFailure = True))
728 factory.addStep(SetPropertyFromCommand(
730 property = "cxx_command",
731 description = "Finding g++ command",
733 "../findbin.pl", "g++",
734 cc_version[0] if cc_version is not None else '',
735 cc_version[1] if cc_version is not None else ''
737 haltOnFailure = True))
739 # see if ccache is available
740 factory.addStep(SetPropertyFromCommand(
741 property = "ccache_command",
742 command = ["which", "ccache"],
743 description = "Testing for ccache command",
744 haltOnFailure = False,
745 flunkOnFailure = False,
746 warnOnFailure = False,
749 # expire tree if needed
751 factory.addStep(FileDownload(
753 doStepIf = IsExpireRequested,
754 mastersrc = scripts_dir + '/expire.sh',
755 workerdest = "../expire.sh",
758 factory.addStep(ShellCommand(
760 description = "Checking for build tree expiry",
761 command = ["./expire.sh", str(tree_expire)],
763 haltOnFailure = True,
764 doStepIf = IsExpireRequested,
767 # cleanup.sh if needed
768 factory.addStep(FileDownload(
769 name = "dlcleanupsh",
770 mastersrc = scripts_dir + '/cleanup.sh',
771 workerdest = "../cleanup.sh",
773 doStepIf = IsCleanupRequested))
775 factory.addStep(ShellCommand(
777 description = "Cleaning previous builds",
778 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
780 haltOnFailure = True,
781 doStepIf = IsCleanupRequested,
784 factory.addStep(ShellCommand(
786 description = "Cleaning work area",
787 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
789 haltOnFailure = True,
790 doStepIf = IsCleanupRequested,
793 # user-requested clean targets
794 for tuple in CleanTargetMap:
795 factory.addStep(ShellCommand(
797 description = 'User-requested "make %s"' % tuple[1],
798 command = ["make", tuple[1], "V=s"],
800 doStepIf = IsMakeCleanRequested(tuple[0])
803 # Workaround bug when switching from a checked out tag back to a branch
804 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
805 factory.addStep(ShellCommand(
806 name = "gitcheckout",
807 description = "Ensure that Git HEAD is sane",
808 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
809 haltOnFailure = True))
811 # check out the source
813 # if repo doesn't exist: 'git clone repourl'
814 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
815 # 'git fetch -t repourl branch; git reset --hard revision'
816 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
817 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
821 branch = repo_branch,
824 haltOnFailure = True,
825 doStepIf = IsGitCleanRequested,
831 branch = repo_branch,
834 haltOnFailure = True,
835 doStepIf = IsGitFreshRequested,
839 factory.addStep(ShellCommand(
841 description = "Fetching Git remote refs",
842 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
847 factory.addStep(ShellCommand(
849 description = "Checking out Git tag",
850 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
851 haltOnFailure = True,
852 doStepIf = IsTaggingRequested
855 # Verify that Git HEAD points to a tag or branch
856 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
857 factory.addStep(ShellCommand(
859 description = "Ensure that Git HEAD is pointing to a branch or tag",
860 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
861 haltOnFailure = True))
863 factory.addStep(ShellCommand(
865 description = "Remove tmp folder",
866 command=["rm", "-rf", "tmp/"]))
869 # factory.addStep(ShellCommand(
870 # name = "feedsconf",
871 # description = "Copy the feeds.conf",
872 # command='''cp ~/feeds.conf ./feeds.conf''' ))
875 factory.addStep(ShellCommand(
876 name = "rmfeedlinks",
877 description = "Remove feed symlinks",
878 command=["rm", "-rf", "package/feeds/"]))
880 factory.addStep(StringDownload(
882 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
883 workerdest = "../ccache_cc.sh",
887 factory.addStep(StringDownload(
889 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
890 workerdest = "../ccache_cxx.sh",
896 factory.addStep(StringDownload(
897 name = "dlgitclonekey",
899 workerdest = "../git-clone.key",
903 factory.addStep(ShellCommand(
904 name = "patchfeedsconf",
905 description = "Patching feeds.conf",
906 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
911 factory.addStep(ShellCommand(
912 name = "updatefeeds",
913 description = "Updating feeds",
914 command=["./scripts/feeds", "update"],
915 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
921 factory.addStep(ShellCommand(
922 name = "rmfeedsconf",
923 description = "Removing feeds.conf",
924 command=["rm", "feeds.conf"],
929 factory.addStep(ShellCommand(
930 name = "installfeeds",
931 description = "Installing feeds",
932 command=["./scripts/feeds", "install", "-a"],
933 env = MakeEnv(tryccache=True),
938 if config_seed is not None:
939 factory.addStep(StringDownload(
940 name = "dlconfigseed",
941 s = config_seed + '\n',
942 workerdest = ".config",
947 factory.addStep(ShellCommand(
949 description = "Seeding .config",
950 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
953 factory.addStep(ShellCommand(
955 description = "Removing output directory",
956 command = ["rm", "-rf", "bin/"]
959 factory.addStep(ShellCommand(
961 description = "Populating .config",
962 command = ["make", "defconfig"],
967 factory.addStep(ShellCommand(
969 description = "Checking architecture",
970 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
978 factory.addStep(SetPropertyFromCommand(
981 description = "Finding libc suffix",
982 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
985 if usign_key is not None:
986 factory.addStep(StringDownload(
987 name = "dlkeybuildpub",
988 s = UsignSec2Pub(usign_key, usign_comment),
989 workerdest = "key-build.pub",
993 factory.addStep(StringDownload(
995 s = "# fake private key",
996 workerdest = "key-build",
1000 factory.addStep(StringDownload(
1001 name = "dlkeybuilducert",
1002 s = "# fake certificate",
1003 workerdest = "key-build.ucert",
1008 factory.addStep(ShellCommand(
1010 description = "Preparing dl/",
1011 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1017 factory.addStep(ShellCommand(
1019 description = "Building and installing GNU tar",
1020 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1021 env = MakeEnv(tryccache=True),
1022 haltOnFailure = True
1026 factory.addStep(ShellCommand(
1028 description = "Populating dl/",
1029 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1032 locks = [dlLock.access('exclusive')],
1035 factory.addStep(ShellCommand(
1037 description = "Cleaning base-files",
1038 command=["make", "package/base-files/clean", "V=s"]
1042 factory.addStep(ShellCommand(
1044 description = "Building and installing tools",
1045 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1046 env = MakeEnv(tryccache=True),
1047 haltOnFailure = True
1050 factory.addStep(ShellCommand(
1052 description = "Building and installing toolchain",
1053 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1055 haltOnFailure = True
1058 factory.addStep(ShellCommand(
1060 description = "Building kmods",
1061 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1063 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1064 haltOnFailure = True
1067 # find kernel version
1068 factory.addStep(SetPropertyFromCommand(
1069 name = "kernelversion",
1070 property = "kernelversion",
1071 description = "Finding the effective Kernel version",
1072 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1073 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1076 factory.addStep(ShellCommand(
1078 description = "Cleaning up package build",
1079 command=["make", "package/cleanup", "V=s"]
1082 factory.addStep(ShellCommand(
1084 description = "Building packages",
1085 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1087 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1088 haltOnFailure = True
1091 # factory.addStep(IfBuiltinShellCommand(
1092 factory.addStep(ShellCommand(
1093 name = "pkginstall",
1094 description = "Installing packages",
1095 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1097 haltOnFailure = True
1100 factory.addStep(ShellCommand(
1102 description = "Indexing packages",
1103 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1105 haltOnFailure = True
1108 if enable_kmod_archive and embed_kmod_repository:
1109 # embed kmod repository. Must happen before 'images'
1111 # find rootfs staging directory
1112 factory.addStep(SetPropertyFromCommand(
1114 property = "stageroot",
1115 description = "Finding the rootfs staging directory",
1116 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1117 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1121 factory.addStep(ShellCommand(
1123 description = "Creating file overlay directory",
1124 command=["mkdir", "-p", "files/etc/opkg"],
1125 haltOnFailure = True
1128 factory.addStep(ShellCommand(
1129 name = "kmodconfig",
1130 description = "Embedding kmod repository configuration",
1131 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1132 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1133 haltOnFailure = True
1136 #factory.addStep(IfBuiltinShellCommand(
1137 factory.addStep(ShellCommand(
1139 description = "Building and installing images",
1140 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1142 haltOnFailure = True
1145 factory.addStep(ShellCommand(
1147 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1148 command = "make -j1 buildinfo V=s || true",
1150 haltOnFailure = True
1153 factory.addStep(ShellCommand(
1154 name = "json_overview_image_info",
1155 description = "Generate profiles.json in target folder",
1156 command = "make -j1 json_overview_image_info V=s || true",
1158 haltOnFailure = True
1161 factory.addStep(ShellCommand(
1163 description = "Calculating checksums",
1164 command=["make", "-j1", "checksum", "V=s"],
1166 haltOnFailure = True
1169 if enable_kmod_archive:
1170 factory.addStep(ShellCommand(
1172 description = "Creating kmod directory",
1173 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1174 haltOnFailure = True
1177 factory.addStep(ShellCommand(
1178 name = "kmodprepare",
1179 description = "Preparing kmod archive",
1180 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1181 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1182 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1183 haltOnFailure = True
1186 factory.addStep(ShellCommand(
1188 description = "Indexing kmod archive",
1189 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1190 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1192 haltOnFailure = True
1196 if ini.has_option("gpg", "key") or usign_key is not None:
1197 factory.addStep(MasterShellCommand(
1198 name = "signprepare",
1199 description = "Preparing temporary signing directory",
1200 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1201 haltOnFailure = True
1204 factory.addStep(ShellCommand(
1206 description = "Packing files to sign",
1207 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1208 haltOnFailure = True
1211 factory.addStep(FileUpload(
1212 workersrc = "sign.tar.gz",
1213 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1214 haltOnFailure = True
1217 factory.addStep(MasterShellCommand(
1219 description = "Signing files",
1220 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1221 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1222 haltOnFailure = True
1225 factory.addStep(FileDownload(
1226 name = "dlsigntargz",
1227 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1228 workerdest = "sign.tar.gz",
1229 haltOnFailure = True
1232 factory.addStep(ShellCommand(
1233 name = "signunpack",
1234 description = "Unpacking signed files",
1235 command = ["tar", "-xzf", "sign.tar.gz"],
1236 haltOnFailure = True
1240 factory.addStep(ShellCommand(
1241 name = "dirprepare",
1242 description = "Preparing upload directory structure",
1243 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1244 haltOnFailure = True
1247 factory.addStep(ShellCommand(
1248 name = "linkprepare",
1249 description = "Preparing repository symlink",
1250 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1251 doStepIf = IsNoMasterBuild,
1252 haltOnFailure = True
1255 if enable_kmod_archive:
1256 factory.addStep(ShellCommand(
1257 name = "kmoddirprepare",
1258 description = "Preparing kmod archive upload directory",
1259 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1260 haltOnFailure = True
1263 factory.addStep(ShellCommand(
1265 description = "Uploading directory structure",
1266 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1267 env={'RSYNC_PASSWORD': rsync_bin_key},
1268 haltOnFailure = True,
1272 # download remote sha256sums to 'target-sha256sums'
1273 factory.addStep(ShellCommand(
1274 name = "target-sha256sums",
1275 description = "Fetching remote sha256sums for target",
1276 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1277 env={'RSYNC_PASSWORD': rsync_bin_key},
1279 haltOnFailure = False,
1280 flunkOnFailure = False,
1281 warnOnFailure = False,
1284 # build list of files to upload
1285 factory.addStep(FileDownload(
1286 name = "dlsha2rsyncpl",
1287 mastersrc = scripts_dir + '/sha2rsync.pl',
1288 workerdest = "../sha2rsync.pl",
1292 factory.addStep(ShellCommand(
1294 description = "Building list of files to upload",
1295 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1296 haltOnFailure = True,
1299 factory.addStep(FileDownload(
1300 name = "dlrsync.sh",
1301 mastersrc = scripts_dir + '/rsync.sh',
1302 workerdest = "../rsync.sh",
1306 # upload new files and update existing ones
1307 factory.addStep(ShellCommand(
1308 name = "targetupload",
1309 description = "Uploading target files",
1310 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1311 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1312 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1313 env={'RSYNC_PASSWORD': rsync_bin_key},
1314 haltOnFailure = True,
1318 # delete files which don't exist locally
1319 factory.addStep(ShellCommand(
1320 name = "targetprune",
1321 description = "Pruning target files",
1322 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1323 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1324 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1325 env={'RSYNC_PASSWORD': rsync_bin_key},
1326 haltOnFailure = True,
1330 if enable_kmod_archive:
1331 factory.addStep(ShellCommand(
1332 name = "kmodupload",
1333 description = "Uploading kmod archive",
1334 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1335 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1336 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1337 env={'RSYNC_PASSWORD': rsync_bin_key},
1338 haltOnFailure = True,
1342 if rsync_src_url is not None:
1343 factory.addStep(ShellCommand(
1344 name = "sourcelist",
1345 description = "Finding source archives to upload",
1346 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1347 haltOnFailure = True
1350 factory.addStep(ShellCommand(
1351 name = "sourceupload",
1352 description = "Uploading source archives",
1353 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1354 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1355 env={'RSYNC_PASSWORD': rsync_src_key},
1356 haltOnFailure = True,
1361 factory.addStep(ShellCommand(
1362 name = "packageupload",
1363 description = "Uploading package files",
1364 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1365 env={'RSYNC_PASSWORD': rsync_bin_key},
1366 haltOnFailure = False,
1367 flunkOnFailure = False,
1368 warnOnFailure = True,
1374 factory.addStep(ShellCommand(
1376 description = "Uploading logs",
1377 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1378 env={'RSYNC_PASSWORD': rsync_bin_key},
1379 haltOnFailure = False,
1380 flunkOnFailure = False,
1381 warnOnFailure = True,
1386 factory.addStep(ShellCommand(
1388 description = "Reporting disk usage",
1389 command=["df", "-h", "."],
1390 env={'LC_ALL': 'C'},
1391 haltOnFailure = False,
1392 flunkOnFailure = False,
1393 warnOnFailure = False,
1397 factory.addStep(ShellCommand(
1399 description = "Reporting estimated file space usage",
1400 command=["du", "-sh", "."],
1401 env={'LC_ALL': 'C'},
1402 haltOnFailure = False,
1403 flunkOnFailure = False,
1404 warnOnFailure = False,
1408 factory.addStep(ShellCommand(
1409 name = "ccachestat",
1410 description = "Reporting ccache stats",
1411 command=["ccache", "-s"],
1412 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1413 want_stderr = False,
1414 haltOnFailure = False,
1415 flunkOnFailure = False,
1416 warnOnFailure = False,
1420 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1422 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1423 force_factory.addStep(steps.Trigger(
1424 name = "trigger_%s" % target,
1425 description = "Triggering %s build" % target,
1426 schedulerNames = [ "trigger_%s" % target ],
1427 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1428 doStepIf = IsTargetSelected(target)
1432 ####### STATUS TARGETS
1434 # 'status' is a list of Status Targets. The results of each build will be
1435 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1436 # including web pages, email senders, and IRC bots.
1438 if ini.has_option("phase1", "status_bind"):
1440 'port': ini.get("phase1", "status_bind"),
1442 'waterfall_view': True,
1443 'console_view': True,
1448 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1449 c['www']['auth'] = util.UserPasswordAuth([
1450 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1452 c['www']['authz'] = util.Authz(
1453 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1454 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1458 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1459 irc_host = ini.get("irc", "host")
1461 irc_chan = ini.get("irc", "channel")
1462 irc_nick = ini.get("irc", "nickname")
1465 if ini.has_option("irc", "port"):
1466 irc_port = ini.getint("irc", "port")
1468 if ini.has_option("irc", "password"):
1469 irc_pass = ini.get("irc", "password")
1471 irc = reporters.IRC(irc_host, irc_nick,
1473 password = irc_pass,
1474 channels = [ irc_chan ],
1475 notify_events = [ 'exception', 'problem', 'recovery' ]
1478 c['services'].append(irc)
1480 c['revlink'] = util.RevlinkMatch([
1481 r'https://git.openwrt.org/openwrt/(.*).git'
1483 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1488 # This specifies what database buildbot uses to store its state. You can leave
1489 # this at its default for all but the largest installations.
1490 'db_url' : "sqlite:///state.sqlite",
1493 c['buildbotNetUsageData'] = None