2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
58 ####### PROJECT IDENTITY
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
124 c['protocols'] = {'pb': {'port': worker_port}}
127 c['collapseRequests'] = True
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
143 @returns: datetime instance or None, via Deferred
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
153 order=['-complete_at'], limit=1)
157 complete_at = completed[0]['complete_at']
159 last_build = yield bldr.master.data.get(
162 resultspec.Filter('builderid', 'eq', [bldrid]),
164 order=['-started_at'], limit=1)
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
178 @returns: list of sorted builders
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
190 (complete_at, bldr) = item
194 complete_at = date.replace(tzinfo=tzutc())
196 if is_building(bldr):
198 complete_at = date.replace(tzinfo=tzutc())
200 return (complete_at, bldr.name)
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
208 return [r[1] for r in results]
210 c['prioritizeBuilders'] = prioritizeBuilders
212 ####### CHANGESOURCES
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
223 if ini.has_option("phase1", "expire"):
224 tree_expire = ini.getint("phase1", "expire")
226 if ini.has_option("phase1", "config_seed"):
227 config_seed = ini.get("phase1", "config_seed")
229 repo_url = ini.get("repo", "url")
230 repo_branch = "master"
232 if ini.has_option("repo", "branch"):
233 repo_branch = ini.get("repo", "branch")
235 rsync_bin_url = ini.get("rsync", "binary_url")
236 rsync_bin_key = ini.get("rsync", "binary_password")
237 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
239 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
240 rsync_bin_defopts += ["--contimeout=20"]
244 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
246 if ini.has_option("rsync", "source_url"):
247 rsync_src_url = ini.get("rsync", "source_url")
248 rsync_src_key = ini.get("rsync", "source_password")
250 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
251 rsync_src_defopts += ["--contimeout=20"]
254 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
256 if ini.has_option("usign", "key"):
257 usign_key = ini.get("usign", "key")
259 if ini.has_option("usign", "comment"):
260 usign_comment = ini.get("usign", "comment")
262 enable_kmod_archive = False
263 embed_kmod_repository = False
265 if ini.has_option("phase1", "kmod_archive"):
266 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
268 if ini.has_option("phase1", "kmod_repository"):
269 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
275 if not os.path.isdir(work_dir+'/source.git'):
276 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
278 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
280 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
281 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
282 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
285 line = findtargets.stdout.readline()
288 ta = line.decode().strip().split(' ')
289 targets.append(ta[0])
292 # the 'change_source' setting tells the buildmaster how it should find out
293 # about source code changes. Here we point to the buildbot clone of pyflakes.
295 c['change_source'] = []
296 c['change_source'].append(GitPoller(
298 workdir=work_dir+'/work.git', branch=repo_branch,
303 # Configure the Schedulers, which decide how to react to incoming changes. In this
304 # case, just kick off a 'basebuild' build
306 class TagChoiceParameter(BaseParameter):
307 spec_attributes = ["strict", "choices"]
311 def __init__(self, name, label=None, **kw):
312 super().__init__(name, label, **kw)
313 self._choice_list = []
318 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
321 findtags = subprocess.Popen(
322 ['git', 'ls-remote', '--tags', repo_url],
323 stdout = subprocess.PIPE)
326 line = findtags.stdout.readline()
331 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
333 if tagver and tagver[1].find(basever[1]) == 0:
334 taglist.append(tagver[1])
336 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
337 taglist.insert(0, '')
339 self._choice_list = taglist
341 return self._choice_list
343 def parse_from_arg(self, s):
344 if self.strict and s not in self._choice_list:
345 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
349 c['schedulers'].append(SingleBranchScheduler(
351 change_filter = filter.ChangeFilter(branch=repo_branch),
352 treeStableTimer = 60,
353 builderNames = targets))
355 c['schedulers'].append(ForceScheduler(
357 buttonName = "Force builds",
358 label = "Force build details",
359 builderNames = [ "00_force_build" ],
362 util.CodebaseParameter(
364 label = "Repository",
365 branch = util.FixedParameter(name = "branch", default = ""),
366 revision = util.FixedParameter(name = "revision", default = ""),
367 repository = util.FixedParameter(name = "repository", default = ""),
368 project = util.FixedParameter(name = "project", default = "")
372 reason = util.StringParameter(
375 default = "Trigger build",
381 util.NestedParameter(
383 label="Build Options",
386 util.ChoiceStringParameter(
388 label = "Build target",
390 choices = [ "all" ] + targets
404 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
405 # what steps, and which workers can execute them. Note that any particular build will
406 # only take place on one worker.
409 [ "tools", "tools/clean" ],
410 [ "chain", "toolchain/clean" ],
411 [ "linux", "target/linux/clean" ],
412 [ "dir", "dirclean" ],
413 [ "dist", "distclean" ]
416 def IsMakeCleanRequested(pattern):
417 def CheckCleanProperty(step):
418 val = step.getProperty("clean")
419 if val and re.match(pattern, val):
424 return CheckCleanProperty
426 def IsSharedWorkdir(step):
427 return bool(step.getProperty("shared_wd"))
429 def IsCleanupRequested(step):
430 if IsSharedWorkdir(step):
432 do_cleanup = step.getProperty("do_cleanup")
438 def IsExpireRequested(step):
439 if IsSharedWorkdir(step):
442 return not IsCleanupRequested(step)
444 def IsGitFreshRequested(step):
445 do_cleanup = step.getProperty("do_cleanup")
451 def IsGitCleanRequested(step):
452 return not IsGitFreshRequested(step)
454 def IsTaggingRequested(step):
455 val = step.getProperty("tag")
456 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
461 def IsNoTaggingRequested(step):
462 return not IsTaggingRequested(step)
464 def IsNoMasterBuild(step):
465 return repo_branch != "master"
467 def GetBaseVersion():
468 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
469 return repo_branch.split('-')[1]
474 def GetVersionPrefix(props):
475 basever = GetBaseVersion()
476 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
477 return "%s/" % props["tag"]
478 elif basever != "master":
479 return "%s-SNAPSHOT/" % basever
484 def GetNumJobs(props):
485 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
486 return str(int(int(props["nproc"]) / props["max_builds"]))
492 if props.hasProperty("cc_command"):
493 return props["cc_command"]
499 if props.hasProperty("cxx_command"):
500 return props["cxx_command"]
506 if props.hasProperty("builddir"):
507 return props["builddir"]
508 elif props.hasProperty("workdir"):
509 return props["workdir"]
514 def GetCCache(props):
515 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
516 return props["ccache_command"]
520 def GetNextBuild(builder, requests):
522 if r.properties and r.properties.hasProperty("tag"):
526 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
529 def MakeEnv(overrides=None, tryccache=False):
531 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
532 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
535 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
536 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
537 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
539 env['CC'] = env['CCC']
540 env['CXX'] = env['CCXX']
542 if overrides is not None:
543 env.update(overrides)
547 def NetLockDl(props):
549 if props.hasProperty("dl_lock"):
550 lock = NetLocks[props["dl_lock"]]
552 return [lock.access('exclusive')]
557 def NetLockUl(props):
559 if props.hasProperty("ul_lock"):
560 lock = NetLocks[props["ul_lock"]]
562 return [lock.access('exclusive')]
567 def TagPropertyValue(props):
568 if props.hasProperty("options"):
569 options = props.getProperty("options")
570 if type(options) is dict:
571 return options.get("tag")
574 def IsTargetSelected(target):
575 def CheckTargetProperty(step):
577 options = step.getProperty("options")
578 if type(options) is dict:
579 selected_target = options.get("target", "all")
580 if selected_target != "all" and selected_target != target:
587 return CheckTargetProperty
589 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
591 seckey = base64.b64decode(seckey)
595 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
596 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
601 dlLock = locks.WorkerLock("worker_dl")
603 checkBuiltin = re.sub('[\t\n ]+', ' ', """
605 local symbol op path file;
606 for file in $CHANGED_FILES; do
612 while read symbol op path; do
613 case "$symbol" in package-*)
614 symbol="${symbol##*(}";
615 symbol="${symbol%)}";
616 for file in $CHANGED_FILES; do
617 case "$file" in "package/$path/"*)
618 grep -qsx "$symbol=y" .config && return 0
622 done < tmp/.packagedeps;
628 class IfBuiltinShellCommand(ShellCommand):
629 def _quote(self, str):
630 if re.search("[^a-zA-Z0-9/_.-]", str):
631 return "'%s'" %(re.sub("'", "'\"'\"'", str))
634 def setCommand(self, command):
635 if not isinstance(command, (str, unicode)):
636 command = ' '.join(map(self._quote, command))
639 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
642 def setupEnvironment(self, cmd):
643 workerEnv = self.workerEnvironment
644 if workerEnv is None:
647 for request in self.build.requests:
648 for source in request.sources:
649 for change in source.changes:
650 for file in change.files:
651 changedFiles[file] = True
652 fullSlaveEnv = workerEnv.copy()
653 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
654 cmd.args['env'] = fullSlaveEnv
658 for worker in c['workers']:
659 workerNames.append(worker.workername)
661 force_factory = BuildFactory()
663 c['builders'].append(BuilderConfig(
664 name = "00_force_build",
665 workernames = workerNames,
666 factory = force_factory))
668 for target in targets:
669 ts = target.split('/')
671 factory = BuildFactory()
673 # setup shared work directory if required
674 factory.addStep(ShellCommand(
676 description = "Setting up shared work directory",
677 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
679 haltOnFailure = True,
680 doStepIf = IsSharedWorkdir))
682 # find number of cores
683 factory.addStep(SetPropertyFromCommand(
686 description = "Finding number of CPUs",
687 command = ["nproc"]))
689 # find gcc and g++ compilers
690 factory.addStep(FileDownload(
691 name = "dlfindbinpl",
692 mastersrc = scripts_dir + '/findbin.pl',
693 workerdest = "../findbin.pl",
696 factory.addStep(SetPropertyFromCommand(
698 property = "cc_command",
699 description = "Finding gcc command",
701 "../findbin.pl", "gcc", "", "",
703 haltOnFailure = True))
705 factory.addStep(SetPropertyFromCommand(
707 property = "cxx_command",
708 description = "Finding g++ command",
710 "../findbin.pl", "g++", "", "",
712 haltOnFailure = True))
714 # see if ccache is available
715 factory.addStep(SetPropertyFromCommand(
716 property = "ccache_command",
717 command = ["which", "ccache"],
718 description = "Testing for ccache command",
719 haltOnFailure = False,
720 flunkOnFailure = False,
721 warnOnFailure = False,
724 # expire tree if needed
726 factory.addStep(FileDownload(
728 doStepIf = IsExpireRequested,
729 mastersrc = scripts_dir + '/expire.sh',
730 workerdest = "../expire.sh",
733 factory.addStep(ShellCommand(
735 description = "Checking for build tree expiry",
736 command = ["./expire.sh", str(tree_expire)],
738 haltOnFailure = True,
739 doStepIf = IsExpireRequested,
742 # cleanup.sh if needed
743 factory.addStep(FileDownload(
744 name = "dlcleanupsh",
745 mastersrc = scripts_dir + '/cleanup.sh',
746 workerdest = "../cleanup.sh",
748 doStepIf = IsCleanupRequested))
750 factory.addStep(ShellCommand(
752 description = "Cleaning previous builds",
753 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
755 haltOnFailure = True,
756 doStepIf = IsCleanupRequested,
759 factory.addStep(ShellCommand(
761 description = "Cleaning work area",
762 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
764 haltOnFailure = True,
765 doStepIf = IsCleanupRequested,
768 # user-requested clean targets
769 for tuple in CleanTargetMap:
770 factory.addStep(ShellCommand(
772 description = 'User-requested "make %s"' % tuple[1],
773 command = ["make", tuple[1], "V=s"],
775 doStepIf = IsMakeCleanRequested(tuple[0])
778 # Workaround bug when switching from a checked out tag back to a branch
779 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
780 factory.addStep(ShellCommand(
781 name = "gitcheckout",
782 description = "Ensure that Git HEAD is sane",
783 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
784 haltOnFailure = True))
786 # check out the source
788 # if repo doesn't exist: 'git clone repourl'
789 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
790 # 'git fetch -t repourl branch; git reset --hard revision'
791 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
792 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
796 branch = repo_branch,
800 haltOnFailure = True,
801 doStepIf = IsGitCleanRequested,
807 branch = repo_branch,
811 haltOnFailure = True,
812 doStepIf = IsGitFreshRequested,
816 factory.addStep(ShellCommand(
818 description = "Fetching Git remote refs",
819 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
824 factory.addStep(ShellCommand(
826 description = "Checking out Git tag",
827 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
828 haltOnFailure = True,
829 doStepIf = IsTaggingRequested
832 # Verify that Git HEAD points to a tag or branch
833 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
834 factory.addStep(ShellCommand(
836 description = "Ensure that Git HEAD is pointing to a branch or tag",
837 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
838 haltOnFailure = True))
840 factory.addStep(ShellCommand(
842 description = "Remove tmp folder",
843 command=["rm", "-rf", "tmp/"]))
846 # factory.addStep(ShellCommand(
847 # name = "feedsconf",
848 # description = "Copy the feeds.conf",
849 # command='''cp ~/feeds.conf ./feeds.conf''' ))
852 factory.addStep(ShellCommand(
853 name = "rmfeedlinks",
854 description = "Remove feed symlinks",
855 command=["rm", "-rf", "package/feeds/"]))
857 factory.addStep(StringDownload(
859 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
860 workerdest = "../ccache_cc.sh",
864 factory.addStep(StringDownload(
866 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
867 workerdest = "../ccache_cxx.sh",
872 factory.addStep(ShellCommand(
873 name = "updatefeeds",
874 description = "Updating feeds",
875 command=["./scripts/feeds", "update"],
876 env = MakeEnv(tryccache=True),
877 haltOnFailure = True,
882 factory.addStep(ShellCommand(
883 name = "installfeeds",
884 description = "Installing feeds",
885 command=["./scripts/feeds", "install", "-a"],
886 env = MakeEnv(tryccache=True),
891 if config_seed is not None:
892 factory.addStep(StringDownload(
893 name = "dlconfigseed",
894 s = config_seed + '\n',
895 workerdest = ".config",
900 factory.addStep(ShellCommand(
902 description = "Seeding .config",
903 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
906 factory.addStep(ShellCommand(
908 description = "Removing output directory",
909 command = ["rm", "-rf", "bin/"]
912 factory.addStep(ShellCommand(
914 description = "Populating .config",
915 command = ["make", "defconfig"],
920 factory.addStep(ShellCommand(
922 description = "Checking architecture",
923 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
931 factory.addStep(SetPropertyFromCommand(
934 description = "Finding libc suffix",
935 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
938 if usign_key is not None:
939 factory.addStep(StringDownload(
940 name = "dlkeybuildpub",
941 s = UsignSec2Pub(usign_key, usign_comment),
942 workerdest = "key-build.pub",
946 factory.addStep(StringDownload(
948 s = "# fake private key",
949 workerdest = "key-build",
953 factory.addStep(StringDownload(
954 name = "dlkeybuilducert",
955 s = "# fake certificate",
956 workerdest = "key-build.ucert",
961 factory.addStep(ShellCommand(
963 description = "Preparing dl/",
964 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
970 factory.addStep(ShellCommand(
972 description = "Building and installing GNU tar",
973 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
974 env = MakeEnv(tryccache=True),
979 factory.addStep(ShellCommand(
981 description = "Populating dl/",
982 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
985 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
988 factory.addStep(ShellCommand(
990 description = "Cleaning base-files",
991 command=["make", "package/base-files/clean", "V=s"]
995 factory.addStep(ShellCommand(
997 description = "Building and installing tools",
998 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
999 env = MakeEnv(tryccache=True),
1000 haltOnFailure = True
1003 factory.addStep(ShellCommand(
1005 description = "Building and installing toolchain",
1006 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1008 haltOnFailure = True
1011 factory.addStep(ShellCommand(
1013 description = "Building kmods",
1014 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1016 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1017 haltOnFailure = True
1020 # find kernel version
1021 factory.addStep(SetPropertyFromCommand(
1022 name = "kernelversion",
1023 property = "kernelversion",
1024 description = "Finding the effective Kernel version",
1025 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1026 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1029 factory.addStep(ShellCommand(
1031 description = "Cleaning up package build",
1032 command=["make", "package/cleanup", "V=s"]
1035 factory.addStep(ShellCommand(
1037 description = "Building packages",
1038 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1040 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1041 haltOnFailure = True
1044 # factory.addStep(IfBuiltinShellCommand(
1045 factory.addStep(ShellCommand(
1046 name = "pkginstall",
1047 description = "Installing packages",
1048 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1050 haltOnFailure = True
1053 factory.addStep(ShellCommand(
1055 description = "Indexing packages",
1056 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1058 haltOnFailure = True
1061 if enable_kmod_archive and embed_kmod_repository:
1062 # embed kmod repository. Must happen before 'images'
1064 # find rootfs staging directory
1065 factory.addStep(SetPropertyFromCommand(
1067 property = "stageroot",
1068 description = "Finding the rootfs staging directory",
1069 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1070 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1074 factory.addStep(ShellCommand(
1076 description = "Creating file overlay directory",
1077 command=["mkdir", "-p", "files/etc/opkg"],
1078 haltOnFailure = True
1081 factory.addStep(ShellCommand(
1082 name = "kmodconfig",
1083 description = "Embedding kmod repository configuration",
1084 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1085 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1086 haltOnFailure = True
1089 #factory.addStep(IfBuiltinShellCommand(
1090 factory.addStep(ShellCommand(
1092 description = "Building and installing images",
1093 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1095 haltOnFailure = True
1098 factory.addStep(ShellCommand(
1100 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1101 command = "make -j1 buildinfo V=s || true",
1103 haltOnFailure = True
1106 factory.addStep(ShellCommand(
1107 name = "json_overview_image_info",
1108 description = "Generate profiles.json in target folder",
1109 command = "make -j1 json_overview_image_info V=s || true",
1111 haltOnFailure = True
1114 factory.addStep(ShellCommand(
1116 description = "Calculating checksums",
1117 command=["make", "-j1", "checksum", "V=s"],
1119 haltOnFailure = True
1122 if enable_kmod_archive:
1123 factory.addStep(ShellCommand(
1125 description = "Creating kmod directory",
1126 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1127 haltOnFailure = True
1130 factory.addStep(ShellCommand(
1131 name = "kmodprepare",
1132 description = "Preparing kmod archive",
1133 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1134 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1135 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1136 haltOnFailure = True
1139 factory.addStep(ShellCommand(
1141 description = "Indexing kmod archive",
1142 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1143 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1145 haltOnFailure = True
1149 if ini.has_option("gpg", "key") or usign_key is not None:
1150 factory.addStep(MasterShellCommand(
1151 name = "signprepare",
1152 description = "Preparing temporary signing directory",
1153 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1154 haltOnFailure = True
1157 factory.addStep(ShellCommand(
1159 description = "Packing files to sign",
1160 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1161 haltOnFailure = True
1164 factory.addStep(FileUpload(
1165 workersrc = "sign.tar.gz",
1166 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1167 haltOnFailure = True
1170 factory.addStep(MasterShellCommand(
1172 description = "Signing files",
1173 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1174 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1175 haltOnFailure = True
1178 factory.addStep(FileDownload(
1179 name = "dlsigntargz",
1180 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1181 workerdest = "sign.tar.gz",
1182 haltOnFailure = True
1185 factory.addStep(ShellCommand(
1186 name = "signunpack",
1187 description = "Unpacking signed files",
1188 command = ["tar", "-xzf", "sign.tar.gz"],
1189 haltOnFailure = True
1193 factory.addStep(ShellCommand(
1194 name = "dirprepare",
1195 description = "Preparing upload directory structure",
1196 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1197 haltOnFailure = True
1200 factory.addStep(ShellCommand(
1201 name = "linkprepare",
1202 description = "Preparing repository symlink",
1203 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1204 doStepIf = IsNoMasterBuild,
1205 haltOnFailure = True
1208 if enable_kmod_archive:
1209 factory.addStep(ShellCommand(
1210 name = "kmoddirprepare",
1211 description = "Preparing kmod archive upload directory",
1212 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1213 haltOnFailure = True
1216 factory.addStep(ShellCommand(
1218 description = "Uploading directory structure",
1219 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1220 env={'RSYNC_PASSWORD': rsync_bin_key},
1221 haltOnFailure = True,
1226 # download remote sha256sums to 'target-sha256sums'
1227 factory.addStep(ShellCommand(
1228 name = "target-sha256sums",
1229 description = "Fetching remote sha256sums for target",
1230 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1231 env={'RSYNC_PASSWORD': rsync_bin_key},
1233 haltOnFailure = False,
1234 flunkOnFailure = False,
1235 warnOnFailure = False,
1238 # build list of files to upload
1239 factory.addStep(FileDownload(
1240 name = "dlsha2rsyncpl",
1241 mastersrc = scripts_dir + '/sha2rsync.pl',
1242 workerdest = "../sha2rsync.pl",
1246 factory.addStep(ShellCommand(
1248 description = "Building list of files to upload",
1249 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1250 haltOnFailure = True,
1253 factory.addStep(FileDownload(
1254 name = "dlrsync.sh",
1255 mastersrc = scripts_dir + '/rsync.sh',
1256 workerdest = "../rsync.sh",
1260 # upload new files and update existing ones
1261 factory.addStep(ShellCommand(
1262 name = "targetupload",
1263 description = "Uploading target files",
1264 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1265 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1266 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1267 env={'RSYNC_PASSWORD': rsync_bin_key},
1268 haltOnFailure = True,
1272 # delete files which don't exist locally
1273 factory.addStep(ShellCommand(
1274 name = "targetprune",
1275 description = "Pruning target files",
1276 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1277 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1278 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1279 env={'RSYNC_PASSWORD': rsync_bin_key},
1280 haltOnFailure = True,
1285 if enable_kmod_archive:
1286 factory.addStep(ShellCommand(
1287 name = "kmodupload",
1288 description = "Uploading kmod archive",
1289 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1290 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1291 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1292 env={'RSYNC_PASSWORD': rsync_bin_key},
1293 haltOnFailure = True,
1298 if rsync_src_url is not None:
1299 factory.addStep(ShellCommand(
1300 name = "sourcelist",
1301 description = "Finding source archives to upload",
1302 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1303 haltOnFailure = True
1306 factory.addStep(ShellCommand(
1307 name = "sourceupload",
1308 description = "Uploading source archives",
1309 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1310 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1311 env={'RSYNC_PASSWORD': rsync_src_key},
1312 haltOnFailure = True,
1318 factory.addStep(ShellCommand(
1319 name = "packageupload",
1320 description = "Uploading package files",
1321 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1322 env={'RSYNC_PASSWORD': rsync_bin_key},
1323 haltOnFailure = False,
1324 flunkOnFailure = False,
1325 warnOnFailure = True,
1332 factory.addStep(ShellCommand(
1334 description = "Uploading logs",
1335 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1336 env={'RSYNC_PASSWORD': rsync_bin_key},
1337 haltOnFailure = False,
1338 flunkOnFailure = False,
1339 warnOnFailure = True,
1345 factory.addStep(ShellCommand(
1347 description = "Reporting disk usage",
1348 command=["df", "-h", "."],
1349 env={'LC_ALL': 'C'},
1350 haltOnFailure = False,
1351 flunkOnFailure = False,
1352 warnOnFailure = False,
1356 factory.addStep(ShellCommand(
1358 description = "Reporting estimated file space usage",
1359 command=["du", "-sh", "."],
1360 env={'LC_ALL': 'C'},
1361 haltOnFailure = False,
1362 flunkOnFailure = False,
1363 warnOnFailure = False,
1367 factory.addStep(ShellCommand(
1368 name = "ccachestat",
1369 description = "Reporting ccache stats",
1370 command=["ccache", "-s"],
1371 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1372 want_stderr = False,
1373 haltOnFailure = False,
1374 flunkOnFailure = False,
1375 warnOnFailure = False,
1379 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1381 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1382 force_factory.addStep(steps.Trigger(
1383 name = "trigger_%s" % target,
1384 description = "Triggering %s build" % target,
1385 schedulerNames = [ "trigger_%s" % target ],
1386 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1387 doStepIf = IsTargetSelected(target)
1391 ####### STATUS TARGETS
1393 # 'status' is a list of Status Targets. The results of each build will be
1394 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1395 # including web pages, email senders, and IRC bots.
1397 if ini.has_option("phase1", "status_bind"):
1399 'port': ini.get("phase1", "status_bind"),
1401 'waterfall_view': True,
1402 'console_view': True,
1407 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1408 c['www']['auth'] = util.UserPasswordAuth([
1409 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1411 c['www']['authz'] = util.Authz(
1412 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1413 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1417 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1418 irc_host = ini.get("irc", "host")
1420 irc_chan = ini.get("irc", "channel")
1421 irc_nick = ini.get("irc", "nickname")
1424 if ini.has_option("irc", "port"):
1425 irc_port = ini.getint("irc", "port")
1427 if ini.has_option("irc", "password"):
1428 irc_pass = ini.get("irc", "password")
1430 irc = reporters.IRC(irc_host, irc_nick,
1432 password = irc_pass,
1433 channels = [ irc_chan ],
1434 notify_events = [ 'exception', 'problem', 'recovery' ]
1437 c['services'].append(irc)
1439 c['revlink'] = util.RevlinkMatch([
1440 r'https://git.openwrt.org/openwrt/(.*).git'
1442 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1447 # This specifies what database buildbot uses to store its state. You can leave
1448 # this at its default for all but the largest installations.
1449 'db_url' : "sqlite:///state.sqlite",
1452 c['buildbotNetUsageData'] = None