2 # ex: set syntax=python:
10 from datetime import timedelta
12 from buildbot import locks
13 from buildbot.changes import filter
14 from buildbot.changes.gitpoller import GitPoller
15 from buildbot.config import BuilderConfig
16 from buildbot.plugins import reporters
17 from buildbot.plugins import schedulers
18 from buildbot.plugins import steps
19 from buildbot.plugins import util
20 from buildbot.process import properties
21 from buildbot.process.factory import BuildFactory
22 from buildbot.process.properties import Interpolate
23 from buildbot.process.properties import Property
24 from buildbot.schedulers.basic import SingleBranchScheduler
25 from buildbot.schedulers.forcesched import BaseParameter
26 from buildbot.schedulers.forcesched import ForceScheduler
27 from buildbot.schedulers.forcesched import ValidationError
28 from buildbot.steps.master import MasterShellCommand
29 from buildbot.steps.shell import SetPropertyFromCommand
30 from buildbot.steps.shell import ShellCommand
31 from buildbot.steps.source.git import Git
32 from buildbot.steps.transfer import FileDownload
33 from buildbot.steps.transfer import FileUpload
34 from buildbot.steps.transfer import StringDownload
35 from buildbot.worker import Worker
38 # This is a sample buildmaster config file. It must be installed as
39 # 'master.cfg' in your buildmaster's base directory.
41 ini = configparser.ConfigParser()
42 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
44 # This is the dictionary that the buildmaster pays attention to. We also use
45 # a shorter alias to save typing.
46 c = BuildmasterConfig = {}
48 ####### PROJECT IDENTITY
50 # the 'title' string will appear at the top of this buildbot
51 # installation's html.WebStatus home page (linked to the
52 # 'titleURL') and is embedded in the title of the waterfall HTML page.
54 c['title'] = ini.get("general", "title")
55 c['titleURL'] = ini.get("general", "title_url")
57 # the 'buildbotURL' string should point to the location where the buildbot's
58 # internal web server (usually the html.WebStatus page) is visible. This
59 # typically uses the port number set in the Waterfall 'status' entry, but
60 # with an externally-visible host name which the buildbot cannot figure out
63 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
67 # The 'workers' list defines the set of recognized buildworkers. Each element is
68 # a Worker object, specifying a unique worker name and password. The same
69 # worker name and password must be configured on the worker.
73 if ini.has_option("phase1", "port"):
74 worker_port = ini.get("phase1", "port")
79 for section in ini.sections():
80 if section.startswith("worker "):
81 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
82 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
83 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
84 name = ini.get(section, "name")
85 password = ini.get(section, "password")
87 if ini.has_option(section, "builds"):
88 max_builds = ini.getint(section, "builds")
89 sl_props['max_builds'] = max_builds
91 sl_props['shared_wd'] = True
92 if ini.has_option(section, "cleanup"):
93 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
94 if ini.has_option(section, "dl_lock"):
95 lockname = ini.get(section, "dl_lock")
96 sl_props['dl_lock'] = lockname
97 if lockname not in NetLocks:
98 NetLocks[lockname] = locks.MasterLock(lockname)
99 if ini.has_option(section, "ul_lock"):
100 lockname = ini.get(section, "dl_lock")
101 sl_props['ul_lock'] = lockname
102 if lockname not in NetLocks:
103 NetLocks[lockname] = locks.MasterLock(lockname)
104 if ini.has_option(section, "shared_wd"):
105 shared_wd = ini.getboolean(section, "shared_wd")
106 sl_props['shared_wd'] = shared_wd
107 if shared_wd and (max_builds != 1):
108 raise ValueError('max_builds must be 1 with shared workdir!')
109 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
114 c['protocols'] = {'pb': {'port': worker_port}}
117 c['collapseRequests'] = True
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
125 ####### CHANGESOURCES
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
141 if ini.has_option("phase1", "expire"):
142 tree_expire = ini.getint("phase1", "expire")
144 if ini.has_option("phase1", "other_builds"):
145 other_builds = ini.getint("phase1", "other_builds")
147 if ini.has_option("phase1", "cc_version"):
148 cc_version = ini.get("phase1", "cc_version").split()
149 if len(cc_version) == 1:
150 cc_version = ["eq", cc_version[0]]
152 if ini.has_option("general", "git_ssh"):
153 git_ssh = ini.getboolean("general", "git_ssh")
155 if ini.has_option("general", "git_ssh_key"):
156 git_ssh_key = ini.get("general", "git_ssh_key")
160 if ini.has_option("phase1", "config_seed"):
161 config_seed = ini.get("phase1", "config_seed")
163 repo_url = ini.get("repo", "url")
164 repo_branch = "master"
166 if ini.has_option("repo", "branch"):
167 repo_branch = ini.get("repo", "branch")
169 rsync_bin_url = ini.get("rsync", "binary_url")
170 rsync_bin_key = ini.get("rsync", "binary_password")
171 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
173 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
174 rsync_bin_defopts += ["--contimeout=20"]
178 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
180 if ini.has_option("rsync", "source_url"):
181 rsync_src_url = ini.get("rsync", "source_url")
182 rsync_src_key = ini.get("rsync", "source_password")
184 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
185 rsync_src_defopts += ["--contimeout=20"]
188 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
190 if ini.has_option("usign", "key"):
191 usign_key = ini.get("usign", "key")
193 if ini.has_option("usign", "comment"):
194 usign_comment = ini.get("usign", "comment")
196 enable_kmod_archive = False
197 embed_kmod_repository = False
199 if ini.has_option("phase1", "kmod_archive"):
200 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
202 if ini.has_option("phase1", "kmod_repository"):
203 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
209 if not os.path.isdir(work_dir+'/source.git'):
210 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
212 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
214 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
215 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
216 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
219 line = findtargets.stdout.readline()
222 ta = line.decode().strip().split(' ')
223 targets.append(ta[0])
226 # the 'change_source' setting tells the buildmaster how it should find out
227 # about source code changes. Here we point to the buildbot clone of pyflakes.
229 c['change_source'] = []
230 c['change_source'].append(GitPoller(
232 workdir=work_dir+'/work.git', branch=repo_branch,
237 # Configure the Schedulers, which decide how to react to incoming changes. In this
238 # case, just kick off a 'basebuild' build
240 class TagChoiceParameter(BaseParameter):
241 spec_attributes = ["strict", "choices"]
245 def __init__(self, name, label=None, **kw):
246 super().__init__(name, label, **kw)
247 self._choice_list = []
252 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
255 findtags = subprocess.Popen(
256 ['git', 'ls-remote', '--tags', repo_url],
257 stdout = subprocess.PIPE)
260 line = findtags.stdout.readline()
265 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
267 if tagver and tagver[1].find(basever[1]) == 0:
268 taglist.append(tagver[1])
270 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
271 taglist.insert(0, '')
273 self._choice_list = taglist
275 return self._choice_list
277 def parse_from_arg(self, s):
278 if self.strict and s not in self._choice_list:
279 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
283 c['schedulers'].append(SingleBranchScheduler(
285 change_filter = filter.ChangeFilter(branch=repo_branch),
286 treeStableTimer = 60,
287 builderNames = targets))
289 c['schedulers'].append(ForceScheduler(
291 buttonName = "Force builds",
292 label = "Force build details",
293 builderNames = [ "00_force_build" ],
296 util.CodebaseParameter(
298 label = "Repository",
299 branch = util.FixedParameter(name = "branch", default = ""),
300 revision = util.FixedParameter(name = "revision", default = ""),
301 repository = util.FixedParameter(name = "repository", default = ""),
302 project = util.FixedParameter(name = "project", default = "")
306 reason = util.StringParameter(
309 default = "Trigger build",
315 util.NestedParameter(
317 label="Build Options",
320 util.ChoiceStringParameter(
322 label = "Build target",
324 choices = [ "all" ] + targets
338 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
339 # what steps, and which workers can execute them. Note that any particular build will
340 # only take place on one worker.
343 [ "tools", "tools/clean" ],
344 [ "chain", "toolchain/clean" ],
345 [ "linux", "target/linux/clean" ],
346 [ "dir", "dirclean" ],
347 [ "dist", "distclean" ]
350 def IsMakeCleanRequested(pattern):
351 def CheckCleanProperty(step):
352 val = step.getProperty("clean")
353 if val and re.match(pattern, val):
358 return CheckCleanProperty
360 def IsSharedWorkdir(step):
361 return bool(step.getProperty("shared_wd"))
363 def IsCleanupRequested(step):
364 if IsSharedWorkdir(step):
366 do_cleanup = step.getProperty("do_cleanup")
372 def IsExpireRequested(step):
373 if IsSharedWorkdir(step):
376 return not IsCleanupRequested(step)
378 def IsGitFreshRequested(step):
379 do_cleanup = step.getProperty("do_cleanup")
385 def IsGitCleanRequested(step):
386 return not IsGitFreshRequested(step)
388 def IsTaggingRequested(step):
389 val = step.getProperty("tag")
390 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
395 def IsNoTaggingRequested(step):
396 return not IsTaggingRequested(step)
398 def IsNoMasterBuild(step):
399 return repo_branch != "master"
401 def GetBaseVersion():
402 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
403 return repo_branch.split('-')[1]
408 def GetVersionPrefix(props):
409 basever = GetBaseVersion()
410 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
411 return "%s/" % props["tag"]
412 elif basever != "master":
413 return "%s-SNAPSHOT/" % basever
418 def GetNumJobs(props):
419 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
420 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
426 if props.hasProperty("cc_command"):
427 return props["cc_command"]
433 if props.hasProperty("cxx_command"):
434 return props["cxx_command"]
440 if props.hasProperty("builddir"):
441 return props["builddir"]
442 elif props.hasProperty("workdir"):
443 return props["workdir"]
448 def GetCCache(props):
449 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
450 return props["ccache_command"]
454 def GetNextBuild(builder, requests):
456 if r.properties and r.properties.hasProperty("tag"):
460 def MakeEnv(overrides=None, tryccache=False):
462 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
463 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
466 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
467 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
468 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
470 env['CC'] = env['CCC']
471 env['CXX'] = env['CCXX']
473 if overrides is not None:
474 env.update(overrides)
478 def NetLockDl(props):
480 if props.hasProperty("dl_lock"):
481 lock = NetLocks[props["dl_lock"]]
483 return [lock.access('exclusive')]
488 def NetLockUl(props):
490 if props.hasProperty("ul_lock"):
491 lock = NetLocks[props["ul_lock"]]
493 return [lock.access('exclusive')]
498 def TagPropertyValue(props):
499 if props.hasProperty("options"):
500 options = props.getProperty("options")
501 if type(options) is dict:
502 return options.get("tag")
505 def IsTargetSelected(target):
506 def CheckTargetProperty(step):
508 options = step.getProperty("options")
509 if type(options) is dict:
510 selected_target = options.get("target", "all")
511 if selected_target != "all" and selected_target != target:
518 return CheckTargetProperty
520 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
522 seckey = base64.b64decode(seckey)
526 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
527 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
532 dlLock = locks.WorkerLock("worker_dl")
534 checkBuiltin = re.sub('[\t\n ]+', ' ', """
536 local symbol op path file;
537 for file in $CHANGED_FILES; do
543 while read symbol op path; do
544 case "$symbol" in package-*)
545 symbol="${symbol##*(}";
546 symbol="${symbol%)}";
547 for file in $CHANGED_FILES; do
548 case "$file" in "package/$path/"*)
549 grep -qsx "$symbol=y" .config && return 0
553 done < tmp/.packagedeps;
559 class IfBuiltinShellCommand(ShellCommand):
560 def _quote(self, str):
561 if re.search("[^a-zA-Z0-9/_.-]", str):
562 return "'%s'" %(re.sub("'", "'\"'\"'", str))
565 def setCommand(self, command):
566 if not isinstance(command, (str, unicode)):
567 command = ' '.join(map(self._quote, command))
570 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
573 def setupEnvironment(self, cmd):
574 workerEnv = self.workerEnvironment
575 if workerEnv is None:
578 for request in self.build.requests:
579 for source in request.sources:
580 for change in source.changes:
581 for file in change.files:
582 changedFiles[file] = True
583 fullSlaveEnv = workerEnv.copy()
584 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
585 cmd.args['env'] = fullSlaveEnv
589 for worker in c['workers']:
590 workerNames.append(worker.workername)
592 force_factory = BuildFactory()
594 c['builders'].append(BuilderConfig(
595 name = "00_force_build",
596 workernames = workerNames,
597 factory = force_factory))
599 for target in targets:
600 ts = target.split('/')
602 factory = BuildFactory()
604 # setup shared work directory if required
605 factory.addStep(ShellCommand(
607 description = "Setting up shared work directory",
608 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
610 haltOnFailure = True,
611 doStepIf = IsSharedWorkdir))
613 # find number of cores
614 factory.addStep(SetPropertyFromCommand(
617 description = "Finding number of CPUs",
618 command = ["nproc"]))
620 # find gcc and g++ compilers
621 factory.addStep(FileDownload(
622 name = "dlfindbinpl",
623 mastersrc = scripts_dir + '/findbin.pl',
624 workerdest = "../findbin.pl",
627 factory.addStep(SetPropertyFromCommand(
629 property = "cc_command",
630 description = "Finding gcc command",
632 "../findbin.pl", "gcc",
633 cc_version[0] if cc_version is not None else '',
634 cc_version[1] if cc_version is not None else ''
636 haltOnFailure = True))
638 factory.addStep(SetPropertyFromCommand(
640 property = "cxx_command",
641 description = "Finding g++ command",
643 "../findbin.pl", "g++",
644 cc_version[0] if cc_version is not None else '',
645 cc_version[1] if cc_version is not None else ''
647 haltOnFailure = True))
649 # see if ccache is available
650 factory.addStep(SetPropertyFromCommand(
651 property = "ccache_command",
652 command = ["which", "ccache"],
653 description = "Testing for ccache command",
654 haltOnFailure = False,
655 flunkOnFailure = False,
656 warnOnFailure = False,
659 # expire tree if needed
661 factory.addStep(FileDownload(
663 doStepIf = IsExpireRequested,
664 mastersrc = scripts_dir + '/expire.sh',
665 workerdest = "../expire.sh",
668 factory.addStep(ShellCommand(
670 description = "Checking for build tree expiry",
671 command = ["./expire.sh", str(tree_expire)],
673 haltOnFailure = True,
674 doStepIf = IsExpireRequested,
677 # cleanup.sh if needed
678 factory.addStep(FileDownload(
679 name = "dlcleanupsh",
680 mastersrc = scripts_dir + '/cleanup.sh',
681 workerdest = "../cleanup.sh",
683 doStepIf = IsCleanupRequested))
685 factory.addStep(ShellCommand(
687 description = "Cleaning previous builds",
688 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
690 haltOnFailure = True,
691 doStepIf = IsCleanupRequested,
694 factory.addStep(ShellCommand(
696 description = "Cleaning work area",
697 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
699 haltOnFailure = True,
700 doStepIf = IsCleanupRequested,
703 # user-requested clean targets
704 for tuple in CleanTargetMap:
705 factory.addStep(ShellCommand(
707 description = 'User-requested "make %s"' % tuple[1],
708 command = ["make", tuple[1], "V=s"],
710 doStepIf = IsMakeCleanRequested(tuple[0])
713 # Workaround bug when switching from a checked out tag back to a branch
714 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
715 factory.addStep(ShellCommand(
716 name = "gitcheckout",
717 description = "Ensure that Git HEAD is sane",
718 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
719 haltOnFailure = True))
721 # check out the source
723 # if repo doesn't exist: 'git clone repourl'
724 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
725 # 'git fetch -t repourl branch; git reset --hard revision'
726 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
727 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
731 branch = repo_branch,
734 haltOnFailure = True,
735 doStepIf = IsGitCleanRequested,
741 branch = repo_branch,
744 haltOnFailure = True,
745 doStepIf = IsGitFreshRequested,
749 factory.addStep(ShellCommand(
751 description = "Fetching Git remote refs",
752 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
757 factory.addStep(ShellCommand(
759 description = "Checking out Git tag",
760 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
761 haltOnFailure = True,
762 doStepIf = IsTaggingRequested
765 # Verify that Git HEAD points to a tag or branch
766 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
767 factory.addStep(ShellCommand(
769 description = "Ensure that Git HEAD is pointing to a branch or tag",
770 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
771 haltOnFailure = True))
773 factory.addStep(ShellCommand(
775 description = "Remove tmp folder",
776 command=["rm", "-rf", "tmp/"]))
779 # factory.addStep(ShellCommand(
780 # name = "feedsconf",
781 # description = "Copy the feeds.conf",
782 # command='''cp ~/feeds.conf ./feeds.conf''' ))
785 factory.addStep(ShellCommand(
786 name = "rmfeedlinks",
787 description = "Remove feed symlinks",
788 command=["rm", "-rf", "package/feeds/"]))
790 factory.addStep(StringDownload(
792 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
793 workerdest = "../ccache_cc.sh",
797 factory.addStep(StringDownload(
799 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
800 workerdest = "../ccache_cxx.sh",
806 factory.addStep(StringDownload(
807 name = "dlgitclonekey",
809 workerdest = "../git-clone.key",
813 factory.addStep(ShellCommand(
814 name = "patchfeedsconf",
815 description = "Patching feeds.conf",
816 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
821 factory.addStep(ShellCommand(
822 name = "updatefeeds",
823 description = "Updating feeds",
824 command=["./scripts/feeds", "update"],
825 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
831 factory.addStep(ShellCommand(
832 name = "rmfeedsconf",
833 description = "Removing feeds.conf",
834 command=["rm", "feeds.conf"],
839 factory.addStep(ShellCommand(
840 name = "installfeeds",
841 description = "Installing feeds",
842 command=["./scripts/feeds", "install", "-a"],
843 env = MakeEnv(tryccache=True),
848 if config_seed is not None:
849 factory.addStep(StringDownload(
850 name = "dlconfigseed",
851 s = config_seed + '\n',
852 workerdest = ".config",
857 factory.addStep(ShellCommand(
859 description = "Seeding .config",
860 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
863 factory.addStep(ShellCommand(
865 description = "Removing output directory",
866 command = ["rm", "-rf", "bin/"]
869 factory.addStep(ShellCommand(
871 description = "Populating .config",
872 command = ["make", "defconfig"],
877 factory.addStep(ShellCommand(
879 description = "Checking architecture",
880 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
888 factory.addStep(SetPropertyFromCommand(
891 description = "Finding libc suffix",
892 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
895 if usign_key is not None:
896 factory.addStep(StringDownload(
897 name = "dlkeybuildpub",
898 s = UsignSec2Pub(usign_key, usign_comment),
899 workerdest = "key-build.pub",
903 factory.addStep(StringDownload(
905 s = "# fake private key",
906 workerdest = "key-build",
910 factory.addStep(StringDownload(
911 name = "dlkeybuilducert",
912 s = "# fake certificate",
913 workerdest = "key-build.ucert",
918 factory.addStep(ShellCommand(
920 description = "Preparing dl/",
921 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
927 factory.addStep(ShellCommand(
929 description = "Building and installing GNU tar",
930 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
931 env = MakeEnv(tryccache=True),
936 factory.addStep(ShellCommand(
938 description = "Populating dl/",
939 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
942 locks = [dlLock.access('exclusive')],
945 factory.addStep(ShellCommand(
947 description = "Cleaning base-files",
948 command=["make", "package/base-files/clean", "V=s"]
952 factory.addStep(ShellCommand(
954 description = "Building and installing tools",
955 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
956 env = MakeEnv(tryccache=True),
960 factory.addStep(ShellCommand(
962 description = "Building and installing toolchain",
963 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
968 factory.addStep(ShellCommand(
970 description = "Building kmods",
971 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
973 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
977 # find kernel version
978 factory.addStep(SetPropertyFromCommand(
979 name = "kernelversion",
980 property = "kernelversion",
981 description = "Finding the effective Kernel version",
982 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
983 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
986 factory.addStep(ShellCommand(
988 description = "Cleaning up package build",
989 command=["make", "package/cleanup", "V=s"]
992 factory.addStep(ShellCommand(
994 description = "Building packages",
995 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
997 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1001 # factory.addStep(IfBuiltinShellCommand(
1002 factory.addStep(ShellCommand(
1003 name = "pkginstall",
1004 description = "Installing packages",
1005 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1007 haltOnFailure = True
1010 factory.addStep(ShellCommand(
1012 description = "Indexing packages",
1013 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1015 haltOnFailure = True
1018 if enable_kmod_archive and embed_kmod_repository:
1019 # embed kmod repository. Must happen before 'images'
1021 # find rootfs staging directory
1022 factory.addStep(SetPropertyFromCommand(
1024 property = "stageroot",
1025 description = "Finding the rootfs staging directory",
1026 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1027 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1031 factory.addStep(ShellCommand(
1033 description = "Creating file overlay directory",
1034 command=["mkdir", "-p", "files/etc/opkg"],
1035 haltOnFailure = True
1038 factory.addStep(ShellCommand(
1039 name = "kmodconfig",
1040 description = "Embedding kmod repository configuration",
1041 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1042 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1043 haltOnFailure = True
1046 #factory.addStep(IfBuiltinShellCommand(
1047 factory.addStep(ShellCommand(
1049 description = "Building and installing images",
1050 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1052 haltOnFailure = True
1055 factory.addStep(ShellCommand(
1057 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1058 command = "make -j1 buildinfo V=s || true",
1060 haltOnFailure = True
1063 factory.addStep(ShellCommand(
1064 name = "json_overview_image_info",
1065 description = "Generate profiles.json in target folder",
1066 command = "make -j1 json_overview_image_info V=s || true",
1068 haltOnFailure = True
1071 factory.addStep(ShellCommand(
1073 description = "Calculating checksums",
1074 command=["make", "-j1", "checksum", "V=s"],
1076 haltOnFailure = True
1079 if enable_kmod_archive:
1080 factory.addStep(ShellCommand(
1082 description = "Creating kmod directory",
1083 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1084 haltOnFailure = True
1087 factory.addStep(ShellCommand(
1088 name = "kmodprepare",
1089 description = "Preparing kmod archive",
1090 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1091 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1092 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1093 haltOnFailure = True
1096 factory.addStep(ShellCommand(
1098 description = "Indexing kmod archive",
1099 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1100 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1102 haltOnFailure = True
1106 if ini.has_option("gpg", "key") or usign_key is not None:
1107 factory.addStep(MasterShellCommand(
1108 name = "signprepare",
1109 description = "Preparing temporary signing directory",
1110 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1111 haltOnFailure = True
1114 factory.addStep(ShellCommand(
1116 description = "Packing files to sign",
1117 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1118 haltOnFailure = True
1121 factory.addStep(FileUpload(
1122 workersrc = "sign.tar.gz",
1123 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1124 haltOnFailure = True
1127 factory.addStep(MasterShellCommand(
1129 description = "Signing files",
1130 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1131 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1132 haltOnFailure = True
1135 factory.addStep(FileDownload(
1136 name = "dlsigntargz",
1137 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1138 workerdest = "sign.tar.gz",
1139 haltOnFailure = True
1142 factory.addStep(ShellCommand(
1143 name = "signunpack",
1144 description = "Unpacking signed files",
1145 command = ["tar", "-xzf", "sign.tar.gz"],
1146 haltOnFailure = True
1150 factory.addStep(ShellCommand(
1151 name = "dirprepare",
1152 description = "Preparing upload directory structure",
1153 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1154 haltOnFailure = True
1157 factory.addStep(ShellCommand(
1158 name = "linkprepare",
1159 description = "Preparing repository symlink",
1160 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1161 doStepIf = IsNoMasterBuild,
1162 haltOnFailure = True
1165 if enable_kmod_archive:
1166 factory.addStep(ShellCommand(
1167 name = "kmoddirprepare",
1168 description = "Preparing kmod archive upload directory",
1169 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1170 haltOnFailure = True
1173 factory.addStep(ShellCommand(
1175 description = "Uploading directory structure",
1176 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1177 env={'RSYNC_PASSWORD': rsync_bin_key},
1178 haltOnFailure = True,
1182 # download remote sha256sums to 'target-sha256sums'
1183 factory.addStep(ShellCommand(
1184 name = "target-sha256sums",
1185 description = "Fetching remote sha256sums for target",
1186 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1187 env={'RSYNC_PASSWORD': rsync_bin_key},
1189 haltOnFailure = False,
1190 flunkOnFailure = False,
1191 warnOnFailure = False,
1194 # build list of files to upload
1195 factory.addStep(FileDownload(
1196 name = "dlsha2rsyncpl",
1197 mastersrc = scripts_dir + '/sha2rsync.pl',
1198 workerdest = "../sha2rsync.pl",
1202 factory.addStep(ShellCommand(
1204 description = "Building list of files to upload",
1205 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1206 haltOnFailure = True,
1209 factory.addStep(FileDownload(
1210 name = "dlrsync.sh",
1211 mastersrc = scripts_dir + '/rsync.sh',
1212 workerdest = "../rsync.sh",
1216 # upload new files and update existing ones
1217 factory.addStep(ShellCommand(
1218 name = "targetupload",
1219 description = "Uploading target files",
1220 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1221 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1222 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1223 env={'RSYNC_PASSWORD': rsync_bin_key},
1224 haltOnFailure = True,
1228 # delete files which don't exist locally
1229 factory.addStep(ShellCommand(
1230 name = "targetprune",
1231 description = "Pruning target files",
1232 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1233 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1234 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1235 env={'RSYNC_PASSWORD': rsync_bin_key},
1236 haltOnFailure = True,
1240 if enable_kmod_archive:
1241 factory.addStep(ShellCommand(
1242 name = "kmodupload",
1243 description = "Uploading kmod archive",
1244 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1245 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1246 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1247 env={'RSYNC_PASSWORD': rsync_bin_key},
1248 haltOnFailure = True,
1252 if rsync_src_url is not None:
1253 factory.addStep(ShellCommand(
1254 name = "sourcelist",
1255 description = "Finding source archives to upload",
1256 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1257 haltOnFailure = True
1260 factory.addStep(ShellCommand(
1261 name = "sourceupload",
1262 description = "Uploading source archives",
1263 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1264 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1265 env={'RSYNC_PASSWORD': rsync_src_key},
1266 haltOnFailure = True,
1271 factory.addStep(ShellCommand(
1272 name = "packageupload",
1273 description = "Uploading package files",
1274 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1275 env={'RSYNC_PASSWORD': rsync_bin_key},
1276 haltOnFailure = False,
1282 factory.addStep(ShellCommand(
1284 description = "Uploading logs",
1285 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1286 env={'RSYNC_PASSWORD': rsync_bin_key},
1287 haltOnFailure = False,
1292 factory.addStep(ShellCommand(
1294 description = "Reporting disk usage",
1295 command=["df", "-h", "."],
1296 env={'LC_ALL': 'C'},
1297 haltOnFailure = False,
1301 factory.addStep(ShellCommand(
1302 name = "ccachestat",
1303 description = "Reporting ccache stats",
1304 command=["ccache", "-s"],
1305 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1306 want_stderr = False,
1307 haltOnFailure = False,
1308 flunkOnFailure = False,
1309 warnOnFailure = False,
1313 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1315 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1316 force_factory.addStep(steps.Trigger(
1317 name = "trigger_%s" % target,
1318 description = "Triggering %s build" % target,
1319 schedulerNames = [ "trigger_%s" % target ],
1320 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1321 doStepIf = IsTargetSelected(target)
1325 ####### STATUS TARGETS
1327 # 'status' is a list of Status Targets. The results of each build will be
1328 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1329 # including web pages, email senders, and IRC bots.
1331 if ini.has_option("phase1", "status_bind"):
1333 'port': ini.get("phase1", "status_bind"),
1335 'waterfall_view': True,
1336 'console_view': True,
1341 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1342 c['www']['auth'] = util.UserPasswordAuth([
1343 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1345 c['www']['authz'] = util.Authz(
1346 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1347 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1351 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1352 irc_host = ini.get("irc", "host")
1354 irc_chan = ini.get("irc", "channel")
1355 irc_nick = ini.get("irc", "nickname")
1358 if ini.has_option("irc", "port"):
1359 irc_port = ini.getint("irc", "port")
1361 if ini.has_option("irc", "password"):
1362 irc_pass = ini.get("irc", "password")
1364 irc = reporters.IRC(irc_host, irc_nick,
1366 password = irc_pass,
1367 channels = [ irc_chan ],
1368 notify_events = [ 'exception', 'problem', 'recovery' ]
1371 c['services'].append(irc)
1373 c['revlink'] = util.RevlinkMatch([
1374 r'https://git.openwrt.org/openwrt/(.*).git'
1376 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1381 # This specifies what database buildbot uses to store its state. You can leave
1382 # this at its default for all but the largest installations.
1383 'db_url' : "sqlite:///state.sqlite",
1386 c['buildbotNetUsageData'] = None