2 # ex: set syntax=python:
11 from datetime import timedelta
13 from buildbot import locks
14 from buildbot.changes import filter
15 from buildbot.changes.gitpoller import GitPoller
16 from buildbot.config import BuilderConfig
17 from buildbot.plugins import reporters
18 from buildbot.plugins import schedulers
19 from buildbot.plugins import steps
20 from buildbot.plugins import util
21 from buildbot.process import properties
22 from buildbot.process.factory import BuildFactory
23 from buildbot.process.properties import Interpolate
24 from buildbot.process.properties import Property
25 from buildbot.schedulers.basic import SingleBranchScheduler
26 from buildbot.schedulers.forcesched import BaseParameter
27 from buildbot.schedulers.forcesched import ForceScheduler
28 from buildbot.schedulers.forcesched import ValidationError
29 from buildbot.steps.master import MasterShellCommand
30 from buildbot.steps.shell import SetPropertyFromCommand
31 from buildbot.steps.shell import ShellCommand
32 from buildbot.steps.source.git import Git
33 from buildbot.steps.transfer import FileDownload
34 from buildbot.steps.transfer import FileUpload
35 from buildbot.steps.transfer import StringDownload
36 from buildbot.worker import Worker
39 # This is a sample buildmaster config file. It must be installed as
40 # 'master.cfg' in your buildmaster's base directory.
42 ini = configparser.ConfigParser()
43 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
45 # This is the dictionary that the buildmaster pays attention to. We also use
46 # a shorter alias to save typing.
47 c = BuildmasterConfig = {}
49 ####### PROJECT IDENTITY
51 # the 'title' string will appear at the top of this buildbot
52 # installation's html.WebStatus home page (linked to the
53 # 'titleURL') and is embedded in the title of the waterfall HTML page.
55 c['title'] = ini.get("general", "title")
56 c['titleURL'] = ini.get("general", "title_url")
58 # the 'buildbotURL' string should point to the location where the buildbot's
59 # internal web server (usually the html.WebStatus page) is visible. This
60 # typically uses the port number set in the Waterfall 'status' entry, but
61 # with an externally-visible host name which the buildbot cannot figure out
64 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
68 # The 'workers' list defines the set of recognized buildworkers. Each element is
69 # a Worker object, specifying a unique worker name and password. The same
70 # worker name and password must be configured on the worker.
74 if ini.has_option("phase1", "port"):
75 worker_port = ini.get("phase1", "port")
80 for section in ini.sections():
81 if section.startswith("worker "):
82 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
83 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
84 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
85 name = ini.get(section, "name")
86 password = ini.get(section, "password")
88 if ini.has_option(section, "builds"):
89 max_builds = ini.getint(section, "builds")
90 sl_props['max_builds'] = max_builds
92 sl_props['shared_wd'] = True
93 if ini.has_option(section, "cleanup"):
94 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
95 if ini.has_option(section, "dl_lock"):
96 lockname = ini.get(section, "dl_lock")
97 sl_props['dl_lock'] = lockname
98 if lockname not in NetLocks:
99 NetLocks[lockname] = locks.MasterLock(lockname)
100 if ini.has_option(section, "ul_lock"):
101 lockname = ini.get(section, "dl_lock")
102 sl_props['ul_lock'] = lockname
103 if lockname not in NetLocks:
104 NetLocks[lockname] = locks.MasterLock(lockname)
105 if ini.has_option(section, "shared_wd"):
106 shared_wd = ini.getboolean(section, "shared_wd")
107 sl_props['shared_wd'] = shared_wd
108 if shared_wd and (max_builds != 1):
109 raise ValueError('max_builds must be 1 with shared workdir!')
110 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
112 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
113 # This must match the value configured into the buildworkers (with their
115 c['protocols'] = {'pb': {'port': worker_port}}
118 c['collapseRequests'] = True
120 # Reduce amount of backlog data
121 c['configurators'] = [util.JanitorConfigurator(
122 logHorizon=timedelta(days=3),
126 ####### CHANGESOURCES
128 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
129 scripts_dir = os.path.abspath("../scripts")
142 if ini.has_option("phase1", "expire"):
143 tree_expire = ini.getint("phase1", "expire")
145 if ini.has_option("phase1", "other_builds"):
146 other_builds = ini.getint("phase1", "other_builds")
148 if ini.has_option("phase1", "cc_version"):
149 cc_version = ini.get("phase1", "cc_version").split()
150 if len(cc_version) == 1:
151 cc_version = ["eq", cc_version[0]]
153 if ini.has_option("general", "git_ssh"):
154 git_ssh = ini.getboolean("general", "git_ssh")
156 if ini.has_option("general", "git_ssh_key"):
157 git_ssh_key = ini.get("general", "git_ssh_key")
161 if ini.has_option("phase1", "config_seed"):
162 config_seed = ini.get("phase1", "config_seed")
164 repo_url = ini.get("repo", "url")
165 repo_branch = "master"
167 if ini.has_option("repo", "branch"):
168 repo_branch = ini.get("repo", "branch")
170 rsync_bin_url = ini.get("rsync", "binary_url")
171 rsync_bin_key = ini.get("rsync", "binary_password")
172 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
174 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
175 rsync_bin_defopts += ["--contimeout=20"]
179 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
181 if ini.has_option("rsync", "source_url"):
182 rsync_src_url = ini.get("rsync", "source_url")
183 rsync_src_key = ini.get("rsync", "source_password")
185 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
186 rsync_src_defopts += ["--contimeout=20"]
189 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
191 if ini.has_option("usign", "key"):
192 usign_key = ini.get("usign", "key")
194 if ini.has_option("usign", "comment"):
195 usign_comment = ini.get("usign", "comment")
197 enable_kmod_archive = False
198 embed_kmod_repository = False
200 if ini.has_option("phase1", "kmod_archive"):
201 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
203 if ini.has_option("phase1", "kmod_repository"):
204 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
210 if not os.path.isdir(work_dir+'/source.git'):
211 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
213 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
215 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
216 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
217 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
220 line = findtargets.stdout.readline()
223 ta = line.decode().strip().split(' ')
224 targets.append(ta[0])
227 # the 'change_source' setting tells the buildmaster how it should find out
228 # about source code changes. Here we point to the buildbot clone of pyflakes.
230 c['change_source'] = []
231 c['change_source'].append(GitPoller(
233 workdir=work_dir+'/work.git', branch=repo_branch,
238 # Configure the Schedulers, which decide how to react to incoming changes. In this
239 # case, just kick off a 'basebuild' build
241 class TagChoiceParameter(BaseParameter):
242 spec_attributes = ["strict", "choices"]
246 def __init__(self, name, label=None, **kw):
247 super().__init__(name, label, **kw)
248 self._choice_list = []
253 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
256 findtags = subprocess.Popen(
257 ['git', 'ls-remote', '--tags', repo_url],
258 stdout = subprocess.PIPE)
261 line = findtags.stdout.readline()
266 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
268 if tagver and tagver[1].find(basever[1]) == 0:
269 taglist.append(tagver[1])
271 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
272 taglist.insert(0, '')
274 self._choice_list = taglist
276 return self._choice_list
278 def parse_from_arg(self, s):
279 if self.strict and s not in self._choice_list:
280 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
284 c['schedulers'].append(SingleBranchScheduler(
286 change_filter = filter.ChangeFilter(branch=repo_branch),
287 treeStableTimer = 60,
288 builderNames = targets))
290 c['schedulers'].append(ForceScheduler(
292 buttonName = "Force builds",
293 label = "Force build details",
294 builderNames = [ "00_force_build" ],
297 util.CodebaseParameter(
299 label = "Repository",
300 branch = util.FixedParameter(name = "branch", default = ""),
301 revision = util.FixedParameter(name = "revision", default = ""),
302 repository = util.FixedParameter(name = "repository", default = ""),
303 project = util.FixedParameter(name = "project", default = "")
307 reason = util.StringParameter(
310 default = "Trigger build",
316 util.NestedParameter(
318 label="Build Options",
321 util.ChoiceStringParameter(
323 label = "Build target",
325 choices = [ "all" ] + targets
339 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
340 # what steps, and which workers can execute them. Note that any particular build will
341 # only take place on one worker.
344 [ "tools", "tools/clean" ],
345 [ "chain", "toolchain/clean" ],
346 [ "linux", "target/linux/clean" ],
347 [ "dir", "dirclean" ],
348 [ "dist", "distclean" ]
351 def IsMakeCleanRequested(pattern):
352 def CheckCleanProperty(step):
353 val = step.getProperty("clean")
354 if val and re.match(pattern, val):
359 return CheckCleanProperty
361 def IsSharedWorkdir(step):
362 return bool(step.getProperty("shared_wd"))
364 def IsCleanupRequested(step):
365 if IsSharedWorkdir(step):
367 do_cleanup = step.getProperty("do_cleanup")
373 def IsExpireRequested(step):
374 if IsSharedWorkdir(step):
377 return not IsCleanupRequested(step)
379 def IsGitFreshRequested(step):
380 do_cleanup = step.getProperty("do_cleanup")
386 def IsGitCleanRequested(step):
387 return not IsGitFreshRequested(step)
389 def IsTaggingRequested(step):
390 val = step.getProperty("tag")
391 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
396 def IsNoTaggingRequested(step):
397 return not IsTaggingRequested(step)
399 def IsNoMasterBuild(step):
400 return repo_branch != "master"
402 def GetBaseVersion():
403 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
404 return repo_branch.split('-')[1]
409 def GetVersionPrefix(props):
410 basever = GetBaseVersion()
411 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
412 return "%s/" % props["tag"]
413 elif basever != "master":
414 return "%s-SNAPSHOT/" % basever
419 def GetNumJobs(props):
420 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
421 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
427 if props.hasProperty("cc_command"):
428 return props["cc_command"]
434 if props.hasProperty("cxx_command"):
435 return props["cxx_command"]
441 if props.hasProperty("builddir"):
442 return props["builddir"]
443 elif props.hasProperty("workdir"):
444 return props["workdir"]
449 def GetCCache(props):
450 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
451 return props["ccache_command"]
455 def GetNextBuild(builder, requests):
457 if r.properties and r.properties.hasProperty("tag"):
461 def prioritizeBuilders(buildmaster, builders):
462 random.shuffle(builders)
465 c['prioritizeBuilders'] = prioritizeBuilders
467 def MakeEnv(overrides=None, tryccache=False):
469 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
470 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
473 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
474 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
475 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
477 env['CC'] = env['CCC']
478 env['CXX'] = env['CCXX']
480 if overrides is not None:
481 env.update(overrides)
485 def NetLockDl(props):
487 if props.hasProperty("dl_lock"):
488 lock = NetLocks[props["dl_lock"]]
490 return [lock.access('exclusive')]
495 def NetLockUl(props):
497 if props.hasProperty("ul_lock"):
498 lock = NetLocks[props["ul_lock"]]
500 return [lock.access('exclusive')]
505 def TagPropertyValue(props):
506 if props.hasProperty("options"):
507 options = props.getProperty("options")
508 if type(options) is dict:
509 return options.get("tag")
512 def IsTargetSelected(target):
513 def CheckTargetProperty(step):
515 options = step.getProperty("options")
516 if type(options) is dict:
517 selected_target = options.get("target", "all")
518 if selected_target != "all" and selected_target != target:
525 return CheckTargetProperty
527 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
529 seckey = base64.b64decode(seckey)
533 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
534 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
539 dlLock = locks.WorkerLock("worker_dl")
541 checkBuiltin = re.sub('[\t\n ]+', ' ', """
543 local symbol op path file;
544 for file in $CHANGED_FILES; do
550 while read symbol op path; do
551 case "$symbol" in package-*)
552 symbol="${symbol##*(}";
553 symbol="${symbol%)}";
554 for file in $CHANGED_FILES; do
555 case "$file" in "package/$path/"*)
556 grep -qsx "$symbol=y" .config && return 0
560 done < tmp/.packagedeps;
566 class IfBuiltinShellCommand(ShellCommand):
567 def _quote(self, str):
568 if re.search("[^a-zA-Z0-9/_.-]", str):
569 return "'%s'" %(re.sub("'", "'\"'\"'", str))
572 def setCommand(self, command):
573 if not isinstance(command, (str, unicode)):
574 command = ' '.join(map(self._quote, command))
577 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
580 def setupEnvironment(self, cmd):
581 workerEnv = self.workerEnvironment
582 if workerEnv is None:
585 for request in self.build.requests:
586 for source in request.sources:
587 for change in source.changes:
588 for file in change.files:
589 changedFiles[file] = True
590 fullSlaveEnv = workerEnv.copy()
591 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
592 cmd.args['env'] = fullSlaveEnv
596 for worker in c['workers']:
597 workerNames.append(worker.workername)
599 force_factory = BuildFactory()
601 c['builders'].append(BuilderConfig(
602 name = "00_force_build",
603 workernames = workerNames,
604 factory = force_factory))
606 for target in targets:
607 ts = target.split('/')
609 factory = BuildFactory()
611 # setup shared work directory if required
612 factory.addStep(ShellCommand(
614 description = "Setting up shared work directory",
615 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
617 haltOnFailure = True,
618 doStepIf = IsSharedWorkdir))
620 # find number of cores
621 factory.addStep(SetPropertyFromCommand(
624 description = "Finding number of CPUs",
625 command = ["nproc"]))
627 # find gcc and g++ compilers
628 factory.addStep(FileDownload(
629 name = "dlfindbinpl",
630 mastersrc = scripts_dir + '/findbin.pl',
631 workerdest = "../findbin.pl",
634 factory.addStep(SetPropertyFromCommand(
636 property = "cc_command",
637 description = "Finding gcc command",
639 "../findbin.pl", "gcc",
640 cc_version[0] if cc_version is not None else '',
641 cc_version[1] if cc_version is not None else ''
643 haltOnFailure = True))
645 factory.addStep(SetPropertyFromCommand(
647 property = "cxx_command",
648 description = "Finding g++ command",
650 "../findbin.pl", "g++",
651 cc_version[0] if cc_version is not None else '',
652 cc_version[1] if cc_version is not None else ''
654 haltOnFailure = True))
656 # see if ccache is available
657 factory.addStep(SetPropertyFromCommand(
658 property = "ccache_command",
659 command = ["which", "ccache"],
660 description = "Testing for ccache command",
661 haltOnFailure = False,
662 flunkOnFailure = False,
663 warnOnFailure = False,
666 # expire tree if needed
668 factory.addStep(FileDownload(
670 doStepIf = IsExpireRequested,
671 mastersrc = scripts_dir + '/expire.sh',
672 workerdest = "../expire.sh",
675 factory.addStep(ShellCommand(
677 description = "Checking for build tree expiry",
678 command = ["./expire.sh", str(tree_expire)],
680 haltOnFailure = True,
681 doStepIf = IsExpireRequested,
684 # cleanup.sh if needed
685 factory.addStep(FileDownload(
686 name = "dlcleanupsh",
687 mastersrc = scripts_dir + '/cleanup.sh',
688 workerdest = "../cleanup.sh",
690 doStepIf = IsCleanupRequested))
692 factory.addStep(ShellCommand(
694 description = "Cleaning previous builds",
695 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
697 haltOnFailure = True,
698 doStepIf = IsCleanupRequested,
701 factory.addStep(ShellCommand(
703 description = "Cleaning work area",
704 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
706 haltOnFailure = True,
707 doStepIf = IsCleanupRequested,
710 # user-requested clean targets
711 for tuple in CleanTargetMap:
712 factory.addStep(ShellCommand(
714 description = 'User-requested "make %s"' % tuple[1],
715 command = ["make", tuple[1], "V=s"],
717 doStepIf = IsMakeCleanRequested(tuple[0])
720 # Workaround bug when switching from a checked out tag back to a branch
721 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
722 factory.addStep(ShellCommand(
723 name = "gitcheckout",
724 description = "Ensure that Git HEAD is sane",
725 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
726 haltOnFailure = True))
728 # check out the source
730 # if repo doesn't exist: 'git clone repourl'
731 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
732 # 'git fetch -t repourl branch; git reset --hard revision'
733 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
734 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
738 branch = repo_branch,
741 haltOnFailure = True,
742 doStepIf = IsGitCleanRequested,
748 branch = repo_branch,
751 haltOnFailure = True,
752 doStepIf = IsGitFreshRequested,
756 factory.addStep(ShellCommand(
758 description = "Fetching Git remote refs",
759 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
764 factory.addStep(ShellCommand(
766 description = "Checking out Git tag",
767 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
768 haltOnFailure = True,
769 doStepIf = IsTaggingRequested
772 # Verify that Git HEAD points to a tag or branch
773 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
774 factory.addStep(ShellCommand(
776 description = "Ensure that Git HEAD is pointing to a branch or tag",
777 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
778 haltOnFailure = True))
780 factory.addStep(ShellCommand(
782 description = "Remove tmp folder",
783 command=["rm", "-rf", "tmp/"]))
786 # factory.addStep(ShellCommand(
787 # name = "feedsconf",
788 # description = "Copy the feeds.conf",
789 # command='''cp ~/feeds.conf ./feeds.conf''' ))
792 factory.addStep(ShellCommand(
793 name = "rmfeedlinks",
794 description = "Remove feed symlinks",
795 command=["rm", "-rf", "package/feeds/"]))
797 factory.addStep(StringDownload(
799 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
800 workerdest = "../ccache_cc.sh",
804 factory.addStep(StringDownload(
806 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
807 workerdest = "../ccache_cxx.sh",
813 factory.addStep(StringDownload(
814 name = "dlgitclonekey",
816 workerdest = "../git-clone.key",
820 factory.addStep(ShellCommand(
821 name = "patchfeedsconf",
822 description = "Patching feeds.conf",
823 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
828 factory.addStep(ShellCommand(
829 name = "updatefeeds",
830 description = "Updating feeds",
831 command=["./scripts/feeds", "update"],
832 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
838 factory.addStep(ShellCommand(
839 name = "rmfeedsconf",
840 description = "Removing feeds.conf",
841 command=["rm", "feeds.conf"],
846 factory.addStep(ShellCommand(
847 name = "installfeeds",
848 description = "Installing feeds",
849 command=["./scripts/feeds", "install", "-a"],
850 env = MakeEnv(tryccache=True),
855 if config_seed is not None:
856 factory.addStep(StringDownload(
857 name = "dlconfigseed",
858 s = config_seed + '\n',
859 workerdest = ".config",
864 factory.addStep(ShellCommand(
866 description = "Seeding .config",
867 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
870 factory.addStep(ShellCommand(
872 description = "Removing output directory",
873 command = ["rm", "-rf", "bin/"]
876 factory.addStep(ShellCommand(
878 description = "Populating .config",
879 command = ["make", "defconfig"],
884 factory.addStep(ShellCommand(
886 description = "Checking architecture",
887 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
895 factory.addStep(SetPropertyFromCommand(
898 description = "Finding libc suffix",
899 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
902 if usign_key is not None:
903 factory.addStep(StringDownload(
904 name = "dlkeybuildpub",
905 s = UsignSec2Pub(usign_key, usign_comment),
906 workerdest = "key-build.pub",
910 factory.addStep(StringDownload(
912 s = "# fake private key",
913 workerdest = "key-build",
917 factory.addStep(StringDownload(
918 name = "dlkeybuilducert",
919 s = "# fake certificate",
920 workerdest = "key-build.ucert",
925 factory.addStep(ShellCommand(
927 description = "Preparing dl/",
928 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
934 factory.addStep(ShellCommand(
936 description = "Building and installing GNU tar",
937 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
938 env = MakeEnv(tryccache=True),
943 factory.addStep(ShellCommand(
945 description = "Populating dl/",
946 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
949 locks = [dlLock.access('exclusive')],
952 factory.addStep(ShellCommand(
954 description = "Cleaning base-files",
955 command=["make", "package/base-files/clean", "V=s"]
959 factory.addStep(ShellCommand(
961 description = "Building and installing tools",
962 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
963 env = MakeEnv(tryccache=True),
967 factory.addStep(ShellCommand(
969 description = "Building and installing toolchain",
970 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
975 factory.addStep(ShellCommand(
977 description = "Building kmods",
978 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
980 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
984 # find kernel version
985 factory.addStep(SetPropertyFromCommand(
986 name = "kernelversion",
987 property = "kernelversion",
988 description = "Finding the effective Kernel version",
989 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
990 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
993 factory.addStep(ShellCommand(
995 description = "Cleaning up package build",
996 command=["make", "package/cleanup", "V=s"]
999 factory.addStep(ShellCommand(
1001 description = "Building packages",
1002 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1004 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1005 haltOnFailure = True
1008 # factory.addStep(IfBuiltinShellCommand(
1009 factory.addStep(ShellCommand(
1010 name = "pkginstall",
1011 description = "Installing packages",
1012 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1014 haltOnFailure = True
1017 factory.addStep(ShellCommand(
1019 description = "Indexing packages",
1020 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1022 haltOnFailure = True
1025 if enable_kmod_archive and embed_kmod_repository:
1026 # embed kmod repository. Must happen before 'images'
1028 # find rootfs staging directory
1029 factory.addStep(SetPropertyFromCommand(
1031 property = "stageroot",
1032 description = "Finding the rootfs staging directory",
1033 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1034 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1038 factory.addStep(ShellCommand(
1040 description = "Creating file overlay directory",
1041 command=["mkdir", "-p", "files/etc/opkg"],
1042 haltOnFailure = True
1045 factory.addStep(ShellCommand(
1046 name = "kmodconfig",
1047 description = "Embedding kmod repository configuration",
1048 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1049 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1050 haltOnFailure = True
1053 #factory.addStep(IfBuiltinShellCommand(
1054 factory.addStep(ShellCommand(
1056 description = "Building and installing images",
1057 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1059 haltOnFailure = True
1062 factory.addStep(ShellCommand(
1064 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1065 command = "make -j1 buildinfo V=s || true",
1067 haltOnFailure = True
1070 factory.addStep(ShellCommand(
1071 name = "json_overview_image_info",
1072 description = "Generate profiles.json in target folder",
1073 command = "make -j1 json_overview_image_info V=s || true",
1075 haltOnFailure = True
1078 factory.addStep(ShellCommand(
1080 description = "Calculating checksums",
1081 command=["make", "-j1", "checksum", "V=s"],
1083 haltOnFailure = True
1086 if enable_kmod_archive:
1087 factory.addStep(ShellCommand(
1089 description = "Creating kmod directory",
1090 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1091 haltOnFailure = True
1094 factory.addStep(ShellCommand(
1095 name = "kmodprepare",
1096 description = "Preparing kmod archive",
1097 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1098 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1099 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1100 haltOnFailure = True
1103 factory.addStep(ShellCommand(
1105 description = "Indexing kmod archive",
1106 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1107 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1109 haltOnFailure = True
1113 if ini.has_option("gpg", "key") or usign_key is not None:
1114 factory.addStep(MasterShellCommand(
1115 name = "signprepare",
1116 description = "Preparing temporary signing directory",
1117 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1118 haltOnFailure = True
1121 factory.addStep(ShellCommand(
1123 description = "Packing files to sign",
1124 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1125 haltOnFailure = True
1128 factory.addStep(FileUpload(
1129 workersrc = "sign.tar.gz",
1130 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1131 haltOnFailure = True
1134 factory.addStep(MasterShellCommand(
1136 description = "Signing files",
1137 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1138 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1139 haltOnFailure = True
1142 factory.addStep(FileDownload(
1143 name = "dlsigntargz",
1144 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1145 workerdest = "sign.tar.gz",
1146 haltOnFailure = True
1149 factory.addStep(ShellCommand(
1150 name = "signunpack",
1151 description = "Unpacking signed files",
1152 command = ["tar", "-xzf", "sign.tar.gz"],
1153 haltOnFailure = True
1157 factory.addStep(ShellCommand(
1158 name = "dirprepare",
1159 description = "Preparing upload directory structure",
1160 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1161 haltOnFailure = True
1164 factory.addStep(ShellCommand(
1165 name = "linkprepare",
1166 description = "Preparing repository symlink",
1167 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1168 doStepIf = IsNoMasterBuild,
1169 haltOnFailure = True
1172 if enable_kmod_archive:
1173 factory.addStep(ShellCommand(
1174 name = "kmoddirprepare",
1175 description = "Preparing kmod archive upload directory",
1176 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1177 haltOnFailure = True
1180 factory.addStep(ShellCommand(
1182 description = "Uploading directory structure",
1183 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1184 env={'RSYNC_PASSWORD': rsync_bin_key},
1185 haltOnFailure = True,
1189 # download remote sha256sums to 'target-sha256sums'
1190 factory.addStep(ShellCommand(
1191 name = "target-sha256sums",
1192 description = "Fetching remote sha256sums for target",
1193 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1194 env={'RSYNC_PASSWORD': rsync_bin_key},
1196 haltOnFailure = False,
1197 flunkOnFailure = False,
1198 warnOnFailure = False,
1201 # build list of files to upload
1202 factory.addStep(FileDownload(
1203 name = "dlsha2rsyncpl",
1204 mastersrc = scripts_dir + '/sha2rsync.pl',
1205 workerdest = "../sha2rsync.pl",
1209 factory.addStep(ShellCommand(
1211 description = "Building list of files to upload",
1212 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1213 haltOnFailure = True,
1216 factory.addStep(FileDownload(
1217 name = "dlrsync.sh",
1218 mastersrc = scripts_dir + '/rsync.sh',
1219 workerdest = "../rsync.sh",
1223 # upload new files and update existing ones
1224 factory.addStep(ShellCommand(
1225 name = "targetupload",
1226 description = "Uploading target files",
1227 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1228 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1229 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1230 env={'RSYNC_PASSWORD': rsync_bin_key},
1231 haltOnFailure = True,
1235 # delete files which don't exist locally
1236 factory.addStep(ShellCommand(
1237 name = "targetprune",
1238 description = "Pruning target files",
1239 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1240 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1241 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1242 env={'RSYNC_PASSWORD': rsync_bin_key},
1243 haltOnFailure = True,
1247 if enable_kmod_archive:
1248 factory.addStep(ShellCommand(
1249 name = "kmodupload",
1250 description = "Uploading kmod archive",
1251 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1252 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1253 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1254 env={'RSYNC_PASSWORD': rsync_bin_key},
1255 haltOnFailure = True,
1259 if rsync_src_url is not None:
1260 factory.addStep(ShellCommand(
1261 name = "sourcelist",
1262 description = "Finding source archives to upload",
1263 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1264 haltOnFailure = True
1267 factory.addStep(ShellCommand(
1268 name = "sourceupload",
1269 description = "Uploading source archives",
1270 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1271 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1272 env={'RSYNC_PASSWORD': rsync_src_key},
1273 haltOnFailure = True,
1278 factory.addStep(ShellCommand(
1279 name = "packageupload",
1280 description = "Uploading package files",
1281 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1282 env={'RSYNC_PASSWORD': rsync_bin_key},
1283 haltOnFailure = False,
1289 factory.addStep(ShellCommand(
1291 description = "Uploading logs",
1292 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1293 env={'RSYNC_PASSWORD': rsync_bin_key},
1294 haltOnFailure = False,
1299 factory.addStep(ShellCommand(
1301 description = "Reporting disk usage",
1302 command=["df", "-h", "."],
1303 env={'LC_ALL': 'C'},
1304 haltOnFailure = False,
1308 factory.addStep(ShellCommand(
1309 name = "ccachestat",
1310 description = "Reporting ccache stats",
1311 command=["ccache", "-s"],
1312 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1313 want_stderr = False,
1314 haltOnFailure = False,
1315 flunkOnFailure = False,
1316 warnOnFailure = False,
1320 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1322 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1323 force_factory.addStep(steps.Trigger(
1324 name = "trigger_%s" % target,
1325 description = "Triggering %s build" % target,
1326 schedulerNames = [ "trigger_%s" % target ],
1327 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1328 doStepIf = IsTargetSelected(target)
1332 ####### STATUS TARGETS
1334 # 'status' is a list of Status Targets. The results of each build will be
1335 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1336 # including web pages, email senders, and IRC bots.
1338 if ini.has_option("phase1", "status_bind"):
1340 'port': ini.get("phase1", "status_bind"),
1342 'waterfall_view': True,
1343 'console_view': True,
1348 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1349 c['www']['auth'] = util.UserPasswordAuth([
1350 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1352 c['www']['authz'] = util.Authz(
1353 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1354 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1358 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1359 irc_host = ini.get("irc", "host")
1361 irc_chan = ini.get("irc", "channel")
1362 irc_nick = ini.get("irc", "nickname")
1365 if ini.has_option("irc", "port"):
1366 irc_port = ini.getint("irc", "port")
1368 if ini.has_option("irc", "password"):
1369 irc_pass = ini.get("irc", "password")
1371 irc = reporters.IRC(irc_host, irc_nick,
1373 password = irc_pass,
1374 channels = [ irc_chan ],
1375 notify_events = [ 'exception', 'problem', 'recovery' ]
1378 c['services'].append(irc)
1380 c['revlink'] = util.RevlinkMatch([
1381 r'https://git.openwrt.org/openwrt/(.*).git'
1383 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1388 # This specifies what database buildbot uses to store its state. You can leave
1389 # this at its default for all but the largest installations.
1390 'db_url' : "sqlite:///state.sqlite",
1393 c['buildbotNetUsageData'] = None