2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
58 ####### PROJECT IDENTITY
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
124 c['protocols'] = {'pb': {'port': worker_port}}
127 c['collapseRequests'] = True
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
143 @returns: datetime instance or None, via Deferred
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
153 order=['-complete_at'], limit=1)
157 return completed[0]['complete_at']
159 @defer.inlineCallbacks
160 def prioritizeBuilders(master, builders):
161 """Returns sorted list of builders by their last timestamp of completed and
164 @returns: list of sorted builders
167 def is_building(bldr):
168 return bool(bldr.building) or bool(bldr.old_building)
171 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
172 d.addCallback(lambda complete_at: (complete_at, bldr))
176 (complete_at, bldr) = item
180 complete_at = date.replace(tzinfo=tzutc())
182 if is_building(bldr):
184 complete_at = date.replace(tzinfo=tzutc())
186 return (complete_at, bldr.name)
188 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
189 results.sort(key=bldr_sort)
192 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
194 return [r[1] for r in results]
196 c['prioritizeBuilders'] = prioritizeBuilders
198 ####### CHANGESOURCES
200 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
201 scripts_dir = os.path.abspath("../scripts")
214 if ini.has_option("phase1", "expire"):
215 tree_expire = ini.getint("phase1", "expire")
217 if ini.has_option("phase1", "other_builds"):
218 other_builds = ini.getint("phase1", "other_builds")
220 if ini.has_option("phase1", "cc_version"):
221 cc_version = ini.get("phase1", "cc_version").split()
222 if len(cc_version) == 1:
223 cc_version = ["eq", cc_version[0]]
225 if ini.has_option("general", "git_ssh"):
226 git_ssh = ini.getboolean("general", "git_ssh")
228 if ini.has_option("general", "git_ssh_key"):
229 git_ssh_key = ini.get("general", "git_ssh_key")
233 if ini.has_option("phase1", "config_seed"):
234 config_seed = ini.get("phase1", "config_seed")
236 repo_url = ini.get("repo", "url")
237 repo_branch = "master"
239 if ini.has_option("repo", "branch"):
240 repo_branch = ini.get("repo", "branch")
242 rsync_bin_url = ini.get("rsync", "binary_url")
243 rsync_bin_key = ini.get("rsync", "binary_password")
244 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
246 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
247 rsync_bin_defopts += ["--contimeout=20"]
251 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
253 if ini.has_option("rsync", "source_url"):
254 rsync_src_url = ini.get("rsync", "source_url")
255 rsync_src_key = ini.get("rsync", "source_password")
257 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
258 rsync_src_defopts += ["--contimeout=20"]
261 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
263 if ini.has_option("usign", "key"):
264 usign_key = ini.get("usign", "key")
266 if ini.has_option("usign", "comment"):
267 usign_comment = ini.get("usign", "comment")
269 enable_kmod_archive = False
270 embed_kmod_repository = False
272 if ini.has_option("phase1", "kmod_archive"):
273 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
275 if ini.has_option("phase1", "kmod_repository"):
276 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
282 if not os.path.isdir(work_dir+'/source.git'):
283 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
285 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
287 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
288 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
289 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
292 line = findtargets.stdout.readline()
295 ta = line.decode().strip().split(' ')
296 targets.append(ta[0])
299 # the 'change_source' setting tells the buildmaster how it should find out
300 # about source code changes. Here we point to the buildbot clone of pyflakes.
302 c['change_source'] = []
303 c['change_source'].append(GitPoller(
305 workdir=work_dir+'/work.git', branch=repo_branch,
310 # Configure the Schedulers, which decide how to react to incoming changes. In this
311 # case, just kick off a 'basebuild' build
313 class TagChoiceParameter(BaseParameter):
314 spec_attributes = ["strict", "choices"]
318 def __init__(self, name, label=None, **kw):
319 super().__init__(name, label, **kw)
320 self._choice_list = []
325 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
328 findtags = subprocess.Popen(
329 ['git', 'ls-remote', '--tags', repo_url],
330 stdout = subprocess.PIPE)
333 line = findtags.stdout.readline()
338 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
340 if tagver and tagver[1].find(basever[1]) == 0:
341 taglist.append(tagver[1])
343 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
344 taglist.insert(0, '')
346 self._choice_list = taglist
348 return self._choice_list
350 def parse_from_arg(self, s):
351 if self.strict and s not in self._choice_list:
352 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
356 c['schedulers'].append(SingleBranchScheduler(
358 change_filter = filter.ChangeFilter(branch=repo_branch),
359 treeStableTimer = 60,
360 builderNames = targets))
362 c['schedulers'].append(ForceScheduler(
364 buttonName = "Force builds",
365 label = "Force build details",
366 builderNames = [ "00_force_build" ],
369 util.CodebaseParameter(
371 label = "Repository",
372 branch = util.FixedParameter(name = "branch", default = ""),
373 revision = util.FixedParameter(name = "revision", default = ""),
374 repository = util.FixedParameter(name = "repository", default = ""),
375 project = util.FixedParameter(name = "project", default = "")
379 reason = util.StringParameter(
382 default = "Trigger build",
388 util.NestedParameter(
390 label="Build Options",
393 util.ChoiceStringParameter(
395 label = "Build target",
397 choices = [ "all" ] + targets
411 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
412 # what steps, and which workers can execute them. Note that any particular build will
413 # only take place on one worker.
416 [ "tools", "tools/clean" ],
417 [ "chain", "toolchain/clean" ],
418 [ "linux", "target/linux/clean" ],
419 [ "dir", "dirclean" ],
420 [ "dist", "distclean" ]
423 def IsMakeCleanRequested(pattern):
424 def CheckCleanProperty(step):
425 val = step.getProperty("clean")
426 if val and re.match(pattern, val):
431 return CheckCleanProperty
433 def IsSharedWorkdir(step):
434 return bool(step.getProperty("shared_wd"))
436 def IsCleanupRequested(step):
437 if IsSharedWorkdir(step):
439 do_cleanup = step.getProperty("do_cleanup")
445 def IsExpireRequested(step):
446 if IsSharedWorkdir(step):
449 return not IsCleanupRequested(step)
451 def IsGitFreshRequested(step):
452 do_cleanup = step.getProperty("do_cleanup")
458 def IsGitCleanRequested(step):
459 return not IsGitFreshRequested(step)
461 def IsTaggingRequested(step):
462 val = step.getProperty("tag")
463 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
468 def IsNoTaggingRequested(step):
469 return not IsTaggingRequested(step)
471 def IsNoMasterBuild(step):
472 return repo_branch != "master"
474 def GetBaseVersion():
475 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
476 return repo_branch.split('-')[1]
481 def GetVersionPrefix(props):
482 basever = GetBaseVersion()
483 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
484 return "%s/" % props["tag"]
485 elif basever != "master":
486 return "%s-SNAPSHOT/" % basever
491 def GetNumJobs(props):
492 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
493 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
499 if props.hasProperty("cc_command"):
500 return props["cc_command"]
506 if props.hasProperty("cxx_command"):
507 return props["cxx_command"]
513 if props.hasProperty("builddir"):
514 return props["builddir"]
515 elif props.hasProperty("workdir"):
516 return props["workdir"]
521 def GetCCache(props):
522 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
523 return props["ccache_command"]
527 def GetNextBuild(builder, requests):
529 if r.properties and r.properties.hasProperty("tag"):
533 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
536 def MakeEnv(overrides=None, tryccache=False):
538 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
539 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
542 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
543 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
544 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
546 env['CC'] = env['CCC']
547 env['CXX'] = env['CCXX']
549 if overrides is not None:
550 env.update(overrides)
554 def NetLockDl(props):
556 if props.hasProperty("dl_lock"):
557 lock = NetLocks[props["dl_lock"]]
559 return [lock.access('exclusive')]
564 def NetLockUl(props):
566 if props.hasProperty("ul_lock"):
567 lock = NetLocks[props["ul_lock"]]
569 return [lock.access('exclusive')]
574 def TagPropertyValue(props):
575 if props.hasProperty("options"):
576 options = props.getProperty("options")
577 if type(options) is dict:
578 return options.get("tag")
581 def IsTargetSelected(target):
582 def CheckTargetProperty(step):
584 options = step.getProperty("options")
585 if type(options) is dict:
586 selected_target = options.get("target", "all")
587 if selected_target != "all" and selected_target != target:
594 return CheckTargetProperty
596 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
598 seckey = base64.b64decode(seckey)
602 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
603 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
608 dlLock = locks.WorkerLock("worker_dl")
610 checkBuiltin = re.sub('[\t\n ]+', ' ', """
612 local symbol op path file;
613 for file in $CHANGED_FILES; do
619 while read symbol op path; do
620 case "$symbol" in package-*)
621 symbol="${symbol##*(}";
622 symbol="${symbol%)}";
623 for file in $CHANGED_FILES; do
624 case "$file" in "package/$path/"*)
625 grep -qsx "$symbol=y" .config && return 0
629 done < tmp/.packagedeps;
635 class IfBuiltinShellCommand(ShellCommand):
636 def _quote(self, str):
637 if re.search("[^a-zA-Z0-9/_.-]", str):
638 return "'%s'" %(re.sub("'", "'\"'\"'", str))
641 def setCommand(self, command):
642 if not isinstance(command, (str, unicode)):
643 command = ' '.join(map(self._quote, command))
646 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
649 def setupEnvironment(self, cmd):
650 workerEnv = self.workerEnvironment
651 if workerEnv is None:
654 for request in self.build.requests:
655 for source in request.sources:
656 for change in source.changes:
657 for file in change.files:
658 changedFiles[file] = True
659 fullSlaveEnv = workerEnv.copy()
660 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
661 cmd.args['env'] = fullSlaveEnv
665 for worker in c['workers']:
666 workerNames.append(worker.workername)
668 force_factory = BuildFactory()
670 c['builders'].append(BuilderConfig(
671 name = "00_force_build",
672 workernames = workerNames,
673 factory = force_factory))
675 for target in targets:
676 ts = target.split('/')
678 factory = BuildFactory()
680 # setup shared work directory if required
681 factory.addStep(ShellCommand(
683 description = "Setting up shared work directory",
684 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
686 haltOnFailure = True,
687 doStepIf = IsSharedWorkdir))
689 # find number of cores
690 factory.addStep(SetPropertyFromCommand(
693 description = "Finding number of CPUs",
694 command = ["nproc"]))
696 # find gcc and g++ compilers
697 factory.addStep(FileDownload(
698 name = "dlfindbinpl",
699 mastersrc = scripts_dir + '/findbin.pl',
700 workerdest = "../findbin.pl",
703 factory.addStep(SetPropertyFromCommand(
705 property = "cc_command",
706 description = "Finding gcc command",
708 "../findbin.pl", "gcc",
709 cc_version[0] if cc_version is not None else '',
710 cc_version[1] if cc_version is not None else ''
712 haltOnFailure = True))
714 factory.addStep(SetPropertyFromCommand(
716 property = "cxx_command",
717 description = "Finding g++ command",
719 "../findbin.pl", "g++",
720 cc_version[0] if cc_version is not None else '',
721 cc_version[1] if cc_version is not None else ''
723 haltOnFailure = True))
725 # see if ccache is available
726 factory.addStep(SetPropertyFromCommand(
727 property = "ccache_command",
728 command = ["which", "ccache"],
729 description = "Testing for ccache command",
730 haltOnFailure = False,
731 flunkOnFailure = False,
732 warnOnFailure = False,
735 # expire tree if needed
737 factory.addStep(FileDownload(
739 doStepIf = IsExpireRequested,
740 mastersrc = scripts_dir + '/expire.sh',
741 workerdest = "../expire.sh",
744 factory.addStep(ShellCommand(
746 description = "Checking for build tree expiry",
747 command = ["./expire.sh", str(tree_expire)],
749 haltOnFailure = True,
750 doStepIf = IsExpireRequested,
753 # cleanup.sh if needed
754 factory.addStep(FileDownload(
755 name = "dlcleanupsh",
756 mastersrc = scripts_dir + '/cleanup.sh',
757 workerdest = "../cleanup.sh",
759 doStepIf = IsCleanupRequested))
761 factory.addStep(ShellCommand(
763 description = "Cleaning previous builds",
764 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
766 haltOnFailure = True,
767 doStepIf = IsCleanupRequested,
770 factory.addStep(ShellCommand(
772 description = "Cleaning work area",
773 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
775 haltOnFailure = True,
776 doStepIf = IsCleanupRequested,
779 # user-requested clean targets
780 for tuple in CleanTargetMap:
781 factory.addStep(ShellCommand(
783 description = 'User-requested "make %s"' % tuple[1],
784 command = ["make", tuple[1], "V=s"],
786 doStepIf = IsMakeCleanRequested(tuple[0])
789 # Workaround bug when switching from a checked out tag back to a branch
790 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
791 factory.addStep(ShellCommand(
792 name = "gitcheckout",
793 description = "Ensure that Git HEAD is sane",
794 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
795 haltOnFailure = True))
797 # check out the source
799 # if repo doesn't exist: 'git clone repourl'
800 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
801 # 'git fetch -t repourl branch; git reset --hard revision'
802 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
803 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
807 branch = repo_branch,
810 haltOnFailure = True,
811 doStepIf = IsGitCleanRequested,
817 branch = repo_branch,
820 haltOnFailure = True,
821 doStepIf = IsGitFreshRequested,
825 factory.addStep(ShellCommand(
827 description = "Fetching Git remote refs",
828 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
833 factory.addStep(ShellCommand(
835 description = "Checking out Git tag",
836 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
837 haltOnFailure = True,
838 doStepIf = IsTaggingRequested
841 # Verify that Git HEAD points to a tag or branch
842 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
843 factory.addStep(ShellCommand(
845 description = "Ensure that Git HEAD is pointing to a branch or tag",
846 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
847 haltOnFailure = True))
849 factory.addStep(ShellCommand(
851 description = "Remove tmp folder",
852 command=["rm", "-rf", "tmp/"]))
855 # factory.addStep(ShellCommand(
856 # name = "feedsconf",
857 # description = "Copy the feeds.conf",
858 # command='''cp ~/feeds.conf ./feeds.conf''' ))
861 factory.addStep(ShellCommand(
862 name = "rmfeedlinks",
863 description = "Remove feed symlinks",
864 command=["rm", "-rf", "package/feeds/"]))
866 factory.addStep(StringDownload(
868 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
869 workerdest = "../ccache_cc.sh",
873 factory.addStep(StringDownload(
875 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
876 workerdest = "../ccache_cxx.sh",
882 factory.addStep(StringDownload(
883 name = "dlgitclonekey",
885 workerdest = "../git-clone.key",
889 factory.addStep(ShellCommand(
890 name = "patchfeedsconf",
891 description = "Patching feeds.conf",
892 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
897 factory.addStep(ShellCommand(
898 name = "updatefeeds",
899 description = "Updating feeds",
900 command=["./scripts/feeds", "update"],
901 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
907 factory.addStep(ShellCommand(
908 name = "rmfeedsconf",
909 description = "Removing feeds.conf",
910 command=["rm", "feeds.conf"],
915 factory.addStep(ShellCommand(
916 name = "installfeeds",
917 description = "Installing feeds",
918 command=["./scripts/feeds", "install", "-a"],
919 env = MakeEnv(tryccache=True),
924 if config_seed is not None:
925 factory.addStep(StringDownload(
926 name = "dlconfigseed",
927 s = config_seed + '\n',
928 workerdest = ".config",
933 factory.addStep(ShellCommand(
935 description = "Seeding .config",
936 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
939 factory.addStep(ShellCommand(
941 description = "Removing output directory",
942 command = ["rm", "-rf", "bin/"]
945 factory.addStep(ShellCommand(
947 description = "Populating .config",
948 command = ["make", "defconfig"],
953 factory.addStep(ShellCommand(
955 description = "Checking architecture",
956 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
964 factory.addStep(SetPropertyFromCommand(
967 description = "Finding libc suffix",
968 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
971 if usign_key is not None:
972 factory.addStep(StringDownload(
973 name = "dlkeybuildpub",
974 s = UsignSec2Pub(usign_key, usign_comment),
975 workerdest = "key-build.pub",
979 factory.addStep(StringDownload(
981 s = "# fake private key",
982 workerdest = "key-build",
986 factory.addStep(StringDownload(
987 name = "dlkeybuilducert",
988 s = "# fake certificate",
989 workerdest = "key-build.ucert",
994 factory.addStep(ShellCommand(
996 description = "Preparing dl/",
997 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1003 factory.addStep(ShellCommand(
1005 description = "Building and installing GNU tar",
1006 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1007 env = MakeEnv(tryccache=True),
1008 haltOnFailure = True
1012 factory.addStep(ShellCommand(
1014 description = "Populating dl/",
1015 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1018 locks = [dlLock.access('exclusive')],
1021 factory.addStep(ShellCommand(
1023 description = "Cleaning base-files",
1024 command=["make", "package/base-files/clean", "V=s"]
1028 factory.addStep(ShellCommand(
1030 description = "Building and installing tools",
1031 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1032 env = MakeEnv(tryccache=True),
1033 haltOnFailure = True
1036 factory.addStep(ShellCommand(
1038 description = "Building and installing toolchain",
1039 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1041 haltOnFailure = True
1044 factory.addStep(ShellCommand(
1046 description = "Building kmods",
1047 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1049 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1050 haltOnFailure = True
1053 # find kernel version
1054 factory.addStep(SetPropertyFromCommand(
1055 name = "kernelversion",
1056 property = "kernelversion",
1057 description = "Finding the effective Kernel version",
1058 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1059 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1062 factory.addStep(ShellCommand(
1064 description = "Cleaning up package build",
1065 command=["make", "package/cleanup", "V=s"]
1068 factory.addStep(ShellCommand(
1070 description = "Building packages",
1071 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1073 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1074 haltOnFailure = True
1077 # factory.addStep(IfBuiltinShellCommand(
1078 factory.addStep(ShellCommand(
1079 name = "pkginstall",
1080 description = "Installing packages",
1081 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1083 haltOnFailure = True
1086 factory.addStep(ShellCommand(
1088 description = "Indexing packages",
1089 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1091 haltOnFailure = True
1094 if enable_kmod_archive and embed_kmod_repository:
1095 # embed kmod repository. Must happen before 'images'
1097 # find rootfs staging directory
1098 factory.addStep(SetPropertyFromCommand(
1100 property = "stageroot",
1101 description = "Finding the rootfs staging directory",
1102 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1103 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1107 factory.addStep(ShellCommand(
1109 description = "Creating file overlay directory",
1110 command=["mkdir", "-p", "files/etc/opkg"],
1111 haltOnFailure = True
1114 factory.addStep(ShellCommand(
1115 name = "kmodconfig",
1116 description = "Embedding kmod repository configuration",
1117 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1118 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1119 haltOnFailure = True
1122 #factory.addStep(IfBuiltinShellCommand(
1123 factory.addStep(ShellCommand(
1125 description = "Building and installing images",
1126 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1128 haltOnFailure = True
1131 factory.addStep(ShellCommand(
1133 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1134 command = "make -j1 buildinfo V=s || true",
1136 haltOnFailure = True
1139 factory.addStep(ShellCommand(
1140 name = "json_overview_image_info",
1141 description = "Generate profiles.json in target folder",
1142 command = "make -j1 json_overview_image_info V=s || true",
1144 haltOnFailure = True
1147 factory.addStep(ShellCommand(
1149 description = "Calculating checksums",
1150 command=["make", "-j1", "checksum", "V=s"],
1152 haltOnFailure = True
1155 if enable_kmod_archive:
1156 factory.addStep(ShellCommand(
1158 description = "Creating kmod directory",
1159 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1160 haltOnFailure = True
1163 factory.addStep(ShellCommand(
1164 name = "kmodprepare",
1165 description = "Preparing kmod archive",
1166 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1167 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1168 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1169 haltOnFailure = True
1172 factory.addStep(ShellCommand(
1174 description = "Indexing kmod archive",
1175 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1176 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1178 haltOnFailure = True
1182 if ini.has_option("gpg", "key") or usign_key is not None:
1183 factory.addStep(MasterShellCommand(
1184 name = "signprepare",
1185 description = "Preparing temporary signing directory",
1186 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1187 haltOnFailure = True
1190 factory.addStep(ShellCommand(
1192 description = "Packing files to sign",
1193 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1194 haltOnFailure = True
1197 factory.addStep(FileUpload(
1198 workersrc = "sign.tar.gz",
1199 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1200 haltOnFailure = True
1203 factory.addStep(MasterShellCommand(
1205 description = "Signing files",
1206 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1207 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1208 haltOnFailure = True
1211 factory.addStep(FileDownload(
1212 name = "dlsigntargz",
1213 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1214 workerdest = "sign.tar.gz",
1215 haltOnFailure = True
1218 factory.addStep(ShellCommand(
1219 name = "signunpack",
1220 description = "Unpacking signed files",
1221 command = ["tar", "-xzf", "sign.tar.gz"],
1222 haltOnFailure = True
1226 factory.addStep(ShellCommand(
1227 name = "dirprepare",
1228 description = "Preparing upload directory structure",
1229 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1230 haltOnFailure = True
1233 factory.addStep(ShellCommand(
1234 name = "linkprepare",
1235 description = "Preparing repository symlink",
1236 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1237 doStepIf = IsNoMasterBuild,
1238 haltOnFailure = True
1241 if enable_kmod_archive:
1242 factory.addStep(ShellCommand(
1243 name = "kmoddirprepare",
1244 description = "Preparing kmod archive upload directory",
1245 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1246 haltOnFailure = True
1249 factory.addStep(ShellCommand(
1251 description = "Uploading directory structure",
1252 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1253 env={'RSYNC_PASSWORD': rsync_bin_key},
1254 haltOnFailure = True,
1258 # download remote sha256sums to 'target-sha256sums'
1259 factory.addStep(ShellCommand(
1260 name = "target-sha256sums",
1261 description = "Fetching remote sha256sums for target",
1262 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1263 env={'RSYNC_PASSWORD': rsync_bin_key},
1265 haltOnFailure = False,
1266 flunkOnFailure = False,
1267 warnOnFailure = False,
1270 # build list of files to upload
1271 factory.addStep(FileDownload(
1272 name = "dlsha2rsyncpl",
1273 mastersrc = scripts_dir + '/sha2rsync.pl',
1274 workerdest = "../sha2rsync.pl",
1278 factory.addStep(ShellCommand(
1280 description = "Building list of files to upload",
1281 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1282 haltOnFailure = True,
1285 factory.addStep(FileDownload(
1286 name = "dlrsync.sh",
1287 mastersrc = scripts_dir + '/rsync.sh',
1288 workerdest = "../rsync.sh",
1292 # upload new files and update existing ones
1293 factory.addStep(ShellCommand(
1294 name = "targetupload",
1295 description = "Uploading target files",
1296 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1297 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1298 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1299 env={'RSYNC_PASSWORD': rsync_bin_key},
1300 haltOnFailure = True,
1304 # delete files which don't exist locally
1305 factory.addStep(ShellCommand(
1306 name = "targetprune",
1307 description = "Pruning target files",
1308 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1309 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1310 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1311 env={'RSYNC_PASSWORD': rsync_bin_key},
1312 haltOnFailure = True,
1316 if enable_kmod_archive:
1317 factory.addStep(ShellCommand(
1318 name = "kmodupload",
1319 description = "Uploading kmod archive",
1320 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1321 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1322 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1323 env={'RSYNC_PASSWORD': rsync_bin_key},
1324 haltOnFailure = True,
1328 if rsync_src_url is not None:
1329 factory.addStep(ShellCommand(
1330 name = "sourcelist",
1331 description = "Finding source archives to upload",
1332 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1333 haltOnFailure = True
1336 factory.addStep(ShellCommand(
1337 name = "sourceupload",
1338 description = "Uploading source archives",
1339 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1340 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1341 env={'RSYNC_PASSWORD': rsync_src_key},
1342 haltOnFailure = True,
1347 factory.addStep(ShellCommand(
1348 name = "packageupload",
1349 description = "Uploading package files",
1350 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1351 env={'RSYNC_PASSWORD': rsync_bin_key},
1352 haltOnFailure = False,
1353 flunkOnFailure = False,
1354 warnOnFailure = True,
1360 factory.addStep(ShellCommand(
1362 description = "Uploading logs",
1363 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1364 env={'RSYNC_PASSWORD': rsync_bin_key},
1365 haltOnFailure = False,
1366 flunkOnFailure = False,
1367 warnOnFailure = True,
1372 factory.addStep(ShellCommand(
1374 description = "Reporting disk usage",
1375 command=["df", "-h", "."],
1376 env={'LC_ALL': 'C'},
1377 haltOnFailure = False,
1378 flunkOnFailure = False,
1379 warnOnFailure = False,
1383 factory.addStep(ShellCommand(
1384 name = "ccachestat",
1385 description = "Reporting ccache stats",
1386 command=["ccache", "-s"],
1387 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1388 want_stderr = False,
1389 haltOnFailure = False,
1390 flunkOnFailure = False,
1391 warnOnFailure = False,
1395 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1397 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1398 force_factory.addStep(steps.Trigger(
1399 name = "trigger_%s" % target,
1400 description = "Triggering %s build" % target,
1401 schedulerNames = [ "trigger_%s" % target ],
1402 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1403 doStepIf = IsTargetSelected(target)
1407 ####### STATUS TARGETS
1409 # 'status' is a list of Status Targets. The results of each build will be
1410 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1411 # including web pages, email senders, and IRC bots.
1413 if ini.has_option("phase1", "status_bind"):
1415 'port': ini.get("phase1", "status_bind"),
1417 'waterfall_view': True,
1418 'console_view': True,
1423 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1424 c['www']['auth'] = util.UserPasswordAuth([
1425 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1427 c['www']['authz'] = util.Authz(
1428 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1429 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1433 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1434 irc_host = ini.get("irc", "host")
1436 irc_chan = ini.get("irc", "channel")
1437 irc_nick = ini.get("irc", "nickname")
1440 if ini.has_option("irc", "port"):
1441 irc_port = ini.getint("irc", "port")
1443 if ini.has_option("irc", "password"):
1444 irc_pass = ini.get("irc", "password")
1446 irc = reporters.IRC(irc_host, irc_nick,
1448 password = irc_pass,
1449 channels = [ irc_chan ],
1450 notify_events = [ 'exception', 'problem', 'recovery' ]
1453 c['services'].append(irc)
1455 c['revlink'] = util.RevlinkMatch([
1456 r'https://git.openwrt.org/openwrt/(.*).git'
1458 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1463 # This specifies what database buildbot uses to store its state. You can leave
1464 # this at its default for all but the largest installations.
1465 'db_url' : "sqlite:///state.sqlite",
1468 c['buildbotNetUsageData'] = None