2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
58 ####### PROJECT IDENTITY
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
124 c['protocols'] = {'pb': {'port': worker_port}}
127 c['collapseRequests'] = True
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
143 @returns: datetime instance or None, via Deferred
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
153 order=['-complete_at'], limit=1)
157 complete_at = completed[0]['complete_at']
159 last_build = yield bldr.master.data.get(
162 resultspec.Filter('builderid', 'eq', [bldrid]),
164 order=['-started_at'], limit=1)
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
178 @returns: list of sorted builders
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
190 (complete_at, bldr) = item
194 complete_at = date.replace(tzinfo=tzutc())
196 if is_building(bldr):
198 complete_at = date.replace(tzinfo=tzutc())
200 return (complete_at, bldr.name)
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
208 return [r[1] for r in results]
210 c['prioritizeBuilders'] = prioritizeBuilders
212 ####### CHANGESOURCES
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
226 if ini.has_option("phase1", "expire"):
227 tree_expire = ini.getint("phase1", "expire")
229 if ini.has_option("general", "git_ssh"):
230 git_ssh = ini.getboolean("general", "git_ssh")
232 if ini.has_option("general", "git_ssh_key"):
233 git_ssh_key = ini.get("general", "git_ssh_key")
237 if ini.has_option("phase1", "config_seed"):
238 config_seed = ini.get("phase1", "config_seed")
240 repo_url = ini.get("repo", "url")
241 repo_branch = "master"
243 if ini.has_option("repo", "branch"):
244 repo_branch = ini.get("repo", "branch")
246 rsync_bin_url = ini.get("rsync", "binary_url")
247 rsync_bin_key = ini.get("rsync", "binary_password")
248 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
250 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
251 rsync_bin_defopts += ["--contimeout=20"]
255 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
257 if ini.has_option("rsync", "source_url"):
258 rsync_src_url = ini.get("rsync", "source_url")
259 rsync_src_key = ini.get("rsync", "source_password")
261 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
262 rsync_src_defopts += ["--contimeout=20"]
265 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
267 if ini.has_option("usign", "key"):
268 usign_key = ini.get("usign", "key")
270 if ini.has_option("usign", "comment"):
271 usign_comment = ini.get("usign", "comment")
273 enable_kmod_archive = False
274 embed_kmod_repository = False
276 if ini.has_option("phase1", "kmod_archive"):
277 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
279 if ini.has_option("phase1", "kmod_repository"):
280 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
286 if not os.path.isdir(work_dir+'/source.git'):
287 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
289 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
291 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
292 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
293 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
296 line = findtargets.stdout.readline()
299 ta = line.decode().strip().split(' ')
300 targets.append(ta[0])
303 # the 'change_source' setting tells the buildmaster how it should find out
304 # about source code changes. Here we point to the buildbot clone of pyflakes.
306 c['change_source'] = []
307 c['change_source'].append(GitPoller(
309 workdir=work_dir+'/work.git', branch=repo_branch,
314 # Configure the Schedulers, which decide how to react to incoming changes. In this
315 # case, just kick off a 'basebuild' build
317 class TagChoiceParameter(BaseParameter):
318 spec_attributes = ["strict", "choices"]
322 def __init__(self, name, label=None, **kw):
323 super().__init__(name, label, **kw)
324 self._choice_list = []
329 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
332 findtags = subprocess.Popen(
333 ['git', 'ls-remote', '--tags', repo_url],
334 stdout = subprocess.PIPE)
337 line = findtags.stdout.readline()
342 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
344 if tagver and tagver[1].find(basever[1]) == 0:
345 taglist.append(tagver[1])
347 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
348 taglist.insert(0, '')
350 self._choice_list = taglist
352 return self._choice_list
354 def parse_from_arg(self, s):
355 if self.strict and s not in self._choice_list:
356 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
360 c['schedulers'].append(SingleBranchScheduler(
362 change_filter = filter.ChangeFilter(branch=repo_branch),
363 treeStableTimer = 60,
364 builderNames = targets))
366 c['schedulers'].append(ForceScheduler(
368 buttonName = "Force builds",
369 label = "Force build details",
370 builderNames = [ "00_force_build" ],
373 util.CodebaseParameter(
375 label = "Repository",
376 branch = util.FixedParameter(name = "branch", default = ""),
377 revision = util.FixedParameter(name = "revision", default = ""),
378 repository = util.FixedParameter(name = "repository", default = ""),
379 project = util.FixedParameter(name = "project", default = "")
383 reason = util.StringParameter(
386 default = "Trigger build",
392 util.NestedParameter(
394 label="Build Options",
397 util.ChoiceStringParameter(
399 label = "Build target",
401 choices = [ "all" ] + targets
415 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
416 # what steps, and which workers can execute them. Note that any particular build will
417 # only take place on one worker.
420 [ "tools", "tools/clean" ],
421 [ "chain", "toolchain/clean" ],
422 [ "linux", "target/linux/clean" ],
423 [ "dir", "dirclean" ],
424 [ "dist", "distclean" ]
427 def IsMakeCleanRequested(pattern):
428 def CheckCleanProperty(step):
429 val = step.getProperty("clean")
430 if val and re.match(pattern, val):
435 return CheckCleanProperty
437 def IsSharedWorkdir(step):
438 return bool(step.getProperty("shared_wd"))
440 def IsCleanupRequested(step):
441 if IsSharedWorkdir(step):
443 do_cleanup = step.getProperty("do_cleanup")
449 def IsExpireRequested(step):
450 if IsSharedWorkdir(step):
453 return not IsCleanupRequested(step)
455 def IsGitFreshRequested(step):
456 do_cleanup = step.getProperty("do_cleanup")
462 def IsGitCleanRequested(step):
463 return not IsGitFreshRequested(step)
465 def IsTaggingRequested(step):
466 val = step.getProperty("tag")
467 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
472 def IsNoTaggingRequested(step):
473 return not IsTaggingRequested(step)
475 def IsNoMasterBuild(step):
476 return repo_branch != "master"
478 def GetBaseVersion():
479 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
480 return repo_branch.split('-')[1]
485 def GetVersionPrefix(props):
486 basever = GetBaseVersion()
487 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
488 return "%s/" % props["tag"]
489 elif basever != "master":
490 return "%s-SNAPSHOT/" % basever
495 def GetNumJobs(props):
496 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
497 return str(int(int(props["nproc"]) / props["max_builds"]))
503 if props.hasProperty("cc_command"):
504 return props["cc_command"]
510 if props.hasProperty("cxx_command"):
511 return props["cxx_command"]
517 if props.hasProperty("builddir"):
518 return props["builddir"]
519 elif props.hasProperty("workdir"):
520 return props["workdir"]
525 def GetCCache(props):
526 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
527 return props["ccache_command"]
531 def GetNextBuild(builder, requests):
533 if r.properties and r.properties.hasProperty("tag"):
537 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
540 def MakeEnv(overrides=None, tryccache=False):
542 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
543 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
546 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
547 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
548 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
550 env['CC'] = env['CCC']
551 env['CXX'] = env['CCXX']
553 if overrides is not None:
554 env.update(overrides)
558 def NetLockDl(props):
560 if props.hasProperty("dl_lock"):
561 lock = NetLocks[props["dl_lock"]]
563 return [lock.access('exclusive')]
568 def NetLockUl(props):
570 if props.hasProperty("ul_lock"):
571 lock = NetLocks[props["ul_lock"]]
573 return [lock.access('exclusive')]
578 def TagPropertyValue(props):
579 if props.hasProperty("options"):
580 options = props.getProperty("options")
581 if type(options) is dict:
582 return options.get("tag")
585 def IsTargetSelected(target):
586 def CheckTargetProperty(step):
588 options = step.getProperty("options")
589 if type(options) is dict:
590 selected_target = options.get("target", "all")
591 if selected_target != "all" and selected_target != target:
598 return CheckTargetProperty
600 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
602 seckey = base64.b64decode(seckey)
606 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
607 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
612 dlLock = locks.WorkerLock("worker_dl")
614 checkBuiltin = re.sub('[\t\n ]+', ' ', """
616 local symbol op path file;
617 for file in $CHANGED_FILES; do
623 while read symbol op path; do
624 case "$symbol" in package-*)
625 symbol="${symbol##*(}";
626 symbol="${symbol%)}";
627 for file in $CHANGED_FILES; do
628 case "$file" in "package/$path/"*)
629 grep -qsx "$symbol=y" .config && return 0
633 done < tmp/.packagedeps;
639 class IfBuiltinShellCommand(ShellCommand):
640 def _quote(self, str):
641 if re.search("[^a-zA-Z0-9/_.-]", str):
642 return "'%s'" %(re.sub("'", "'\"'\"'", str))
645 def setCommand(self, command):
646 if not isinstance(command, (str, unicode)):
647 command = ' '.join(map(self._quote, command))
650 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
653 def setupEnvironment(self, cmd):
654 workerEnv = self.workerEnvironment
655 if workerEnv is None:
658 for request in self.build.requests:
659 for source in request.sources:
660 for change in source.changes:
661 for file in change.files:
662 changedFiles[file] = True
663 fullSlaveEnv = workerEnv.copy()
664 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
665 cmd.args['env'] = fullSlaveEnv
669 for worker in c['workers']:
670 workerNames.append(worker.workername)
672 force_factory = BuildFactory()
674 c['builders'].append(BuilderConfig(
675 name = "00_force_build",
676 workernames = workerNames,
677 factory = force_factory))
679 for target in targets:
680 ts = target.split('/')
682 factory = BuildFactory()
684 # setup shared work directory if required
685 factory.addStep(ShellCommand(
687 description = "Setting up shared work directory",
688 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
690 haltOnFailure = True,
691 doStepIf = IsSharedWorkdir))
693 # find number of cores
694 factory.addStep(SetPropertyFromCommand(
697 description = "Finding number of CPUs",
698 command = ["nproc"]))
700 # find gcc and g++ compilers
701 factory.addStep(FileDownload(
702 name = "dlfindbinpl",
703 mastersrc = scripts_dir + '/findbin.pl',
704 workerdest = "../findbin.pl",
707 factory.addStep(SetPropertyFromCommand(
709 property = "cc_command",
710 description = "Finding gcc command",
712 "../findbin.pl", "gcc", "", "",
714 haltOnFailure = True))
716 factory.addStep(SetPropertyFromCommand(
718 property = "cxx_command",
719 description = "Finding g++ command",
721 "../findbin.pl", "g++", "", "",
723 haltOnFailure = True))
725 # see if ccache is available
726 factory.addStep(SetPropertyFromCommand(
727 property = "ccache_command",
728 command = ["which", "ccache"],
729 description = "Testing for ccache command",
730 haltOnFailure = False,
731 flunkOnFailure = False,
732 warnOnFailure = False,
735 # expire tree if needed
737 factory.addStep(FileDownload(
739 doStepIf = IsExpireRequested,
740 mastersrc = scripts_dir + '/expire.sh',
741 workerdest = "../expire.sh",
744 factory.addStep(ShellCommand(
746 description = "Checking for build tree expiry",
747 command = ["./expire.sh", str(tree_expire)],
749 haltOnFailure = True,
750 doStepIf = IsExpireRequested,
753 # cleanup.sh if needed
754 factory.addStep(FileDownload(
755 name = "dlcleanupsh",
756 mastersrc = scripts_dir + '/cleanup.sh',
757 workerdest = "../cleanup.sh",
759 doStepIf = IsCleanupRequested))
761 factory.addStep(ShellCommand(
763 description = "Cleaning previous builds",
764 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
766 haltOnFailure = True,
767 doStepIf = IsCleanupRequested,
770 factory.addStep(ShellCommand(
772 description = "Cleaning work area",
773 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
775 haltOnFailure = True,
776 doStepIf = IsCleanupRequested,
779 # user-requested clean targets
780 for tuple in CleanTargetMap:
781 factory.addStep(ShellCommand(
783 description = 'User-requested "make %s"' % tuple[1],
784 command = ["make", tuple[1], "V=s"],
786 doStepIf = IsMakeCleanRequested(tuple[0])
789 # Workaround bug when switching from a checked out tag back to a branch
790 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
791 factory.addStep(ShellCommand(
792 name = "gitcheckout",
793 description = "Ensure that Git HEAD is sane",
794 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
795 haltOnFailure = True))
797 # check out the source
799 # if repo doesn't exist: 'git clone repourl'
800 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
801 # 'git fetch -t repourl branch; git reset --hard revision'
802 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
803 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
807 branch = repo_branch,
811 haltOnFailure = True,
812 doStepIf = IsGitCleanRequested,
818 branch = repo_branch,
822 haltOnFailure = True,
823 doStepIf = IsGitFreshRequested,
827 factory.addStep(ShellCommand(
829 description = "Fetching Git remote refs",
830 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
835 factory.addStep(ShellCommand(
837 description = "Checking out Git tag",
838 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
839 haltOnFailure = True,
840 doStepIf = IsTaggingRequested
843 # Verify that Git HEAD points to a tag or branch
844 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
845 factory.addStep(ShellCommand(
847 description = "Ensure that Git HEAD is pointing to a branch or tag",
848 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
849 haltOnFailure = True))
851 factory.addStep(ShellCommand(
853 description = "Remove tmp folder",
854 command=["rm", "-rf", "tmp/"]))
857 # factory.addStep(ShellCommand(
858 # name = "feedsconf",
859 # description = "Copy the feeds.conf",
860 # command='''cp ~/feeds.conf ./feeds.conf''' ))
863 factory.addStep(ShellCommand(
864 name = "rmfeedlinks",
865 description = "Remove feed symlinks",
866 command=["rm", "-rf", "package/feeds/"]))
868 factory.addStep(StringDownload(
870 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
871 workerdest = "../ccache_cc.sh",
875 factory.addStep(StringDownload(
877 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
878 workerdest = "../ccache_cxx.sh",
884 factory.addStep(StringDownload(
885 name = "dlgitclonekey",
887 workerdest = "../git-clone.key",
891 factory.addStep(ShellCommand(
892 name = "patchfeedsconf",
893 description = "Patching feeds.conf",
894 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
899 factory.addStep(ShellCommand(
900 name = "updatefeeds",
901 description = "Updating feeds",
902 command=["./scripts/feeds", "update"],
903 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
904 haltOnFailure = True,
910 factory.addStep(ShellCommand(
911 name = "rmfeedsconf",
912 description = "Removing feeds.conf",
913 command=["rm", "feeds.conf"],
918 factory.addStep(ShellCommand(
919 name = "installfeeds",
920 description = "Installing feeds",
921 command=["./scripts/feeds", "install", "-a"],
922 env = MakeEnv(tryccache=True),
927 if config_seed is not None:
928 factory.addStep(StringDownload(
929 name = "dlconfigseed",
930 s = config_seed + '\n',
931 workerdest = ".config",
936 factory.addStep(ShellCommand(
938 description = "Seeding .config",
939 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
942 factory.addStep(ShellCommand(
944 description = "Removing output directory",
945 command = ["rm", "-rf", "bin/"]
948 factory.addStep(ShellCommand(
950 description = "Populating .config",
951 command = ["make", "defconfig"],
956 factory.addStep(ShellCommand(
958 description = "Checking architecture",
959 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
967 factory.addStep(SetPropertyFromCommand(
970 description = "Finding libc suffix",
971 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
974 if usign_key is not None:
975 factory.addStep(StringDownload(
976 name = "dlkeybuildpub",
977 s = UsignSec2Pub(usign_key, usign_comment),
978 workerdest = "key-build.pub",
982 factory.addStep(StringDownload(
984 s = "# fake private key",
985 workerdest = "key-build",
989 factory.addStep(StringDownload(
990 name = "dlkeybuilducert",
991 s = "# fake certificate",
992 workerdest = "key-build.ucert",
997 factory.addStep(ShellCommand(
999 description = "Preparing dl/",
1000 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1006 factory.addStep(ShellCommand(
1008 description = "Building and installing GNU tar",
1009 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1010 env = MakeEnv(tryccache=True),
1011 haltOnFailure = True
1015 factory.addStep(ShellCommand(
1017 description = "Populating dl/",
1018 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1021 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
1024 factory.addStep(ShellCommand(
1026 description = "Cleaning base-files",
1027 command=["make", "package/base-files/clean", "V=s"]
1031 factory.addStep(ShellCommand(
1033 description = "Building and installing tools",
1034 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1035 env = MakeEnv(tryccache=True),
1036 haltOnFailure = True
1039 factory.addStep(ShellCommand(
1041 description = "Building and installing toolchain",
1042 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1044 haltOnFailure = True
1047 factory.addStep(ShellCommand(
1049 description = "Building kmods",
1050 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1052 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1053 haltOnFailure = True
1056 # find kernel version
1057 factory.addStep(SetPropertyFromCommand(
1058 name = "kernelversion",
1059 property = "kernelversion",
1060 description = "Finding the effective Kernel version",
1061 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1062 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1065 factory.addStep(ShellCommand(
1067 description = "Cleaning up package build",
1068 command=["make", "package/cleanup", "V=s"]
1071 factory.addStep(ShellCommand(
1073 description = "Building packages",
1074 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1076 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1077 haltOnFailure = True
1080 # factory.addStep(IfBuiltinShellCommand(
1081 factory.addStep(ShellCommand(
1082 name = "pkginstall",
1083 description = "Installing packages",
1084 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1086 haltOnFailure = True
1089 factory.addStep(ShellCommand(
1091 description = "Indexing packages",
1092 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1094 haltOnFailure = True
1097 if enable_kmod_archive and embed_kmod_repository:
1098 # embed kmod repository. Must happen before 'images'
1100 # find rootfs staging directory
1101 factory.addStep(SetPropertyFromCommand(
1103 property = "stageroot",
1104 description = "Finding the rootfs staging directory",
1105 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1106 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1110 factory.addStep(ShellCommand(
1112 description = "Creating file overlay directory",
1113 command=["mkdir", "-p", "files/etc/opkg"],
1114 haltOnFailure = True
1117 factory.addStep(ShellCommand(
1118 name = "kmodconfig",
1119 description = "Embedding kmod repository configuration",
1120 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1121 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1122 haltOnFailure = True
1125 #factory.addStep(IfBuiltinShellCommand(
1126 factory.addStep(ShellCommand(
1128 description = "Building and installing images",
1129 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1131 haltOnFailure = True
1134 factory.addStep(ShellCommand(
1136 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1137 command = "make -j1 buildinfo V=s || true",
1139 haltOnFailure = True
1142 factory.addStep(ShellCommand(
1143 name = "json_overview_image_info",
1144 description = "Generate profiles.json in target folder",
1145 command = "make -j1 json_overview_image_info V=s || true",
1147 haltOnFailure = True
1150 factory.addStep(ShellCommand(
1152 description = "Calculating checksums",
1153 command=["make", "-j1", "checksum", "V=s"],
1155 haltOnFailure = True
1158 if enable_kmod_archive:
1159 factory.addStep(ShellCommand(
1161 description = "Creating kmod directory",
1162 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1163 haltOnFailure = True
1166 factory.addStep(ShellCommand(
1167 name = "kmodprepare",
1168 description = "Preparing kmod archive",
1169 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1170 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1171 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1172 haltOnFailure = True
1175 factory.addStep(ShellCommand(
1177 description = "Indexing kmod archive",
1178 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1179 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1181 haltOnFailure = True
1185 if ini.has_option("gpg", "key") or usign_key is not None:
1186 factory.addStep(MasterShellCommand(
1187 name = "signprepare",
1188 description = "Preparing temporary signing directory",
1189 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1190 haltOnFailure = True
1193 factory.addStep(ShellCommand(
1195 description = "Packing files to sign",
1196 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1197 haltOnFailure = True
1200 factory.addStep(FileUpload(
1201 workersrc = "sign.tar.gz",
1202 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1203 haltOnFailure = True
1206 factory.addStep(MasterShellCommand(
1208 description = "Signing files",
1209 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1210 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1211 haltOnFailure = True
1214 factory.addStep(FileDownload(
1215 name = "dlsigntargz",
1216 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1217 workerdest = "sign.tar.gz",
1218 haltOnFailure = True
1221 factory.addStep(ShellCommand(
1222 name = "signunpack",
1223 description = "Unpacking signed files",
1224 command = ["tar", "-xzf", "sign.tar.gz"],
1225 haltOnFailure = True
1229 factory.addStep(ShellCommand(
1230 name = "dirprepare",
1231 description = "Preparing upload directory structure",
1232 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1233 haltOnFailure = True
1236 factory.addStep(ShellCommand(
1237 name = "linkprepare",
1238 description = "Preparing repository symlink",
1239 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1240 doStepIf = IsNoMasterBuild,
1241 haltOnFailure = True
1244 if enable_kmod_archive:
1245 factory.addStep(ShellCommand(
1246 name = "kmoddirprepare",
1247 description = "Preparing kmod archive upload directory",
1248 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1249 haltOnFailure = True
1252 factory.addStep(ShellCommand(
1254 description = "Uploading directory structure",
1255 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1256 env={'RSYNC_PASSWORD': rsync_bin_key},
1257 haltOnFailure = True,
1262 # download remote sha256sums to 'target-sha256sums'
1263 factory.addStep(ShellCommand(
1264 name = "target-sha256sums",
1265 description = "Fetching remote sha256sums for target",
1266 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1267 env={'RSYNC_PASSWORD': rsync_bin_key},
1269 haltOnFailure = False,
1270 flunkOnFailure = False,
1271 warnOnFailure = False,
1274 # build list of files to upload
1275 factory.addStep(FileDownload(
1276 name = "dlsha2rsyncpl",
1277 mastersrc = scripts_dir + '/sha2rsync.pl',
1278 workerdest = "../sha2rsync.pl",
1282 factory.addStep(ShellCommand(
1284 description = "Building list of files to upload",
1285 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1286 haltOnFailure = True,
1289 factory.addStep(FileDownload(
1290 name = "dlrsync.sh",
1291 mastersrc = scripts_dir + '/rsync.sh',
1292 workerdest = "../rsync.sh",
1296 # upload new files and update existing ones
1297 factory.addStep(ShellCommand(
1298 name = "targetupload",
1299 description = "Uploading target files",
1300 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1301 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1302 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1303 env={'RSYNC_PASSWORD': rsync_bin_key},
1304 haltOnFailure = True,
1308 # delete files which don't exist locally
1309 factory.addStep(ShellCommand(
1310 name = "targetprune",
1311 description = "Pruning target files",
1312 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1313 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1314 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1315 env={'RSYNC_PASSWORD': rsync_bin_key},
1316 haltOnFailure = True,
1321 if enable_kmod_archive:
1322 factory.addStep(ShellCommand(
1323 name = "kmodupload",
1324 description = "Uploading kmod archive",
1325 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1326 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1327 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1328 env={'RSYNC_PASSWORD': rsync_bin_key},
1329 haltOnFailure = True,
1334 if rsync_src_url is not None:
1335 factory.addStep(ShellCommand(
1336 name = "sourcelist",
1337 description = "Finding source archives to upload",
1338 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1339 haltOnFailure = True
1342 factory.addStep(ShellCommand(
1343 name = "sourceupload",
1344 description = "Uploading source archives",
1345 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1346 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1347 env={'RSYNC_PASSWORD': rsync_src_key},
1348 haltOnFailure = True,
1354 factory.addStep(ShellCommand(
1355 name = "packageupload",
1356 description = "Uploading package files",
1357 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1358 env={'RSYNC_PASSWORD': rsync_bin_key},
1359 haltOnFailure = False,
1360 flunkOnFailure = False,
1361 warnOnFailure = True,
1368 factory.addStep(ShellCommand(
1370 description = "Uploading logs",
1371 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1372 env={'RSYNC_PASSWORD': rsync_bin_key},
1373 haltOnFailure = False,
1374 flunkOnFailure = False,
1375 warnOnFailure = True,
1381 factory.addStep(ShellCommand(
1383 description = "Reporting disk usage",
1384 command=["df", "-h", "."],
1385 env={'LC_ALL': 'C'},
1386 haltOnFailure = False,
1387 flunkOnFailure = False,
1388 warnOnFailure = False,
1392 factory.addStep(ShellCommand(
1394 description = "Reporting estimated file space usage",
1395 command=["du", "-sh", "."],
1396 env={'LC_ALL': 'C'},
1397 haltOnFailure = False,
1398 flunkOnFailure = False,
1399 warnOnFailure = False,
1403 factory.addStep(ShellCommand(
1404 name = "ccachestat",
1405 description = "Reporting ccache stats",
1406 command=["ccache", "-s"],
1407 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1408 want_stderr = False,
1409 haltOnFailure = False,
1410 flunkOnFailure = False,
1411 warnOnFailure = False,
1415 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1417 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1418 force_factory.addStep(steps.Trigger(
1419 name = "trigger_%s" % target,
1420 description = "Triggering %s build" % target,
1421 schedulerNames = [ "trigger_%s" % target ],
1422 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1423 doStepIf = IsTargetSelected(target)
1427 ####### STATUS TARGETS
1429 # 'status' is a list of Status Targets. The results of each build will be
1430 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1431 # including web pages, email senders, and IRC bots.
1433 if ini.has_option("phase1", "status_bind"):
1435 'port': ini.get("phase1", "status_bind"),
1437 'waterfall_view': True,
1438 'console_view': True,
1443 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1444 c['www']['auth'] = util.UserPasswordAuth([
1445 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1447 c['www']['authz'] = util.Authz(
1448 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1449 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1453 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1454 irc_host = ini.get("irc", "host")
1456 irc_chan = ini.get("irc", "channel")
1457 irc_nick = ini.get("irc", "nickname")
1460 if ini.has_option("irc", "port"):
1461 irc_port = ini.getint("irc", "port")
1463 if ini.has_option("irc", "password"):
1464 irc_pass = ini.get("irc", "password")
1466 irc = reporters.IRC(irc_host, irc_nick,
1468 password = irc_pass,
1469 channels = [ irc_chan ],
1470 notify_events = [ 'exception', 'problem', 'recovery' ]
1473 c['services'].append(irc)
1475 c['revlink'] = util.RevlinkMatch([
1476 r'https://git.openwrt.org/openwrt/(.*).git'
1478 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1483 # This specifies what database buildbot uses to store its state. You can leave
1484 # this at its default for all but the largest installations.
1485 'db_url' : "sqlite:///state.sqlite",
1488 c['buildbotNetUsageData'] = None