2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
58 ####### PROJECT IDENTITY
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
124 c['protocols'] = {'pb': {'port': worker_port}}
127 c['collapseRequests'] = True
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
143 @returns: datetime instance or None, via Deferred
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
153 order=['-complete_at'], limit=1)
157 complete_at = completed[0]['complete_at']
159 last_build = yield bldr.master.data.get(
162 resultspec.Filter('builderid', 'eq', [bldrid]),
164 order=['-started_at'], limit=1)
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
178 @returns: list of sorted builders
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
190 (complete_at, bldr) = item
194 complete_at = date.replace(tzinfo=tzutc())
196 if is_building(bldr):
198 complete_at = date.replace(tzinfo=tzutc())
200 return (complete_at, bldr.name)
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
208 return [r[1] for r in results]
210 c['prioritizeBuilders'] = prioritizeBuilders
212 ####### CHANGESOURCES
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
223 if ini.has_option("phase1", "expire"):
224 tree_expire = ini.getint("phase1", "expire")
226 if ini.has_option("phase1", "config_seed"):
227 config_seed = ini.get("phase1", "config_seed")
229 repo_url = ini.get("repo", "url")
230 repo_branch = "master"
232 if ini.has_option("repo", "branch"):
233 repo_branch = ini.get("repo", "branch")
235 rsync_bin_url = ini.get("rsync", "binary_url")
236 rsync_bin_key = ini.get("rsync", "binary_password")
237 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
239 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
240 rsync_bin_defopts += ["--contimeout=20"]
244 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
246 if ini.has_option("rsync", "source_url"):
247 rsync_src_url = ini.get("rsync", "source_url")
248 rsync_src_key = ini.get("rsync", "source_password")
250 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
251 rsync_src_defopts += ["--contimeout=20"]
254 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
256 if ini.has_option("usign", "key"):
257 usign_key = ini.get("usign", "key")
259 if ini.has_option("usign", "comment"):
260 usign_comment = ini.get("usign", "comment")
262 enable_kmod_archive = False
264 if ini.has_option("phase1", "kmod_archive"):
265 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
271 if not os.path.isdir(work_dir+'/source.git'):
272 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
274 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
276 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
277 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
278 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
281 line = findtargets.stdout.readline()
284 ta = line.decode().strip().split(' ')
285 targets.append(ta[0])
288 # the 'change_source' setting tells the buildmaster how it should find out
289 # about source code changes. Here we point to the buildbot clone of pyflakes.
291 c['change_source'] = []
292 c['change_source'].append(GitPoller(
294 workdir=work_dir+'/work.git', branch=repo_branch,
299 # Configure the Schedulers, which decide how to react to incoming changes. In this
300 # case, just kick off a 'basebuild' build
302 class TagChoiceParameter(BaseParameter):
303 spec_attributes = ["strict", "choices"]
307 def __init__(self, name, label=None, **kw):
308 super().__init__(name, label, **kw)
309 self._choice_list = []
314 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
317 findtags = subprocess.Popen(
318 ['git', 'ls-remote', '--tags', repo_url],
319 stdout = subprocess.PIPE)
322 line = findtags.stdout.readline()
327 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
329 if tagver and tagver[1].find(basever[1]) == 0:
330 taglist.append(tagver[1])
332 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
333 taglist.insert(0, '')
335 self._choice_list = taglist
337 return self._choice_list
339 def parse_from_arg(self, s):
340 if self.strict and s not in self._choice_list:
341 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
345 c['schedulers'].append(SingleBranchScheduler(
347 change_filter = filter.ChangeFilter(branch=repo_branch),
348 treeStableTimer = 60,
349 builderNames = targets))
351 c['schedulers'].append(ForceScheduler(
353 buttonName = "Force builds",
354 label = "Force build details",
355 builderNames = [ "00_force_build" ],
358 util.CodebaseParameter(
360 label = "Repository",
361 branch = util.FixedParameter(name = "branch", default = ""),
362 revision = util.FixedParameter(name = "revision", default = ""),
363 repository = util.FixedParameter(name = "repository", default = ""),
364 project = util.FixedParameter(name = "project", default = "")
368 reason = util.StringParameter(
371 default = "Trigger build",
377 util.NestedParameter(
379 label="Build Options",
382 util.ChoiceStringParameter(
384 label = "Build target",
386 choices = [ "all" ] + targets
400 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
401 # what steps, and which workers can execute them. Note that any particular build will
402 # only take place on one worker.
405 [ "tools", "tools/clean" ],
406 [ "chain", "toolchain/clean" ],
407 [ "linux", "target/linux/clean" ],
408 [ "dir", "dirclean" ],
409 [ "dist", "distclean" ]
412 def IsMakeCleanRequested(pattern):
413 def CheckCleanProperty(step):
414 val = step.getProperty("clean")
415 if val and re.match(pattern, val):
420 return CheckCleanProperty
422 def IsSharedWorkdir(step):
423 return bool(step.getProperty("shared_wd"))
425 def IsCleanupRequested(step):
426 if IsSharedWorkdir(step):
428 do_cleanup = step.getProperty("do_cleanup")
434 def IsExpireRequested(step):
435 if IsSharedWorkdir(step):
438 return not IsCleanupRequested(step)
440 def IsGitFreshRequested(step):
441 do_cleanup = step.getProperty("do_cleanup")
447 def IsGitCleanRequested(step):
448 return not IsGitFreshRequested(step)
450 def IsTaggingRequested(step):
451 val = step.getProperty("tag")
452 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
457 def IsNoTaggingRequested(step):
458 return not IsTaggingRequested(step)
460 def IsNoMasterBuild(step):
461 return repo_branch != "master"
463 def GetBaseVersion():
464 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
465 return repo_branch.split('-')[1]
470 def GetVersionPrefix(props):
471 basever = GetBaseVersion()
472 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
473 return "%s/" % props["tag"]
474 elif basever != "master":
475 return "%s-SNAPSHOT/" % basever
480 def GetNumJobs(props):
481 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
482 return str(int(int(props["nproc"]) / props["max_builds"]))
488 if props.hasProperty("cc_command"):
489 return props["cc_command"]
495 if props.hasProperty("cxx_command"):
496 return props["cxx_command"]
502 if props.hasProperty("builddir"):
503 return props["builddir"]
504 elif props.hasProperty("workdir"):
505 return props["workdir"]
510 def GetCCache(props):
511 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
512 return props["ccache_command"]
516 def GetNextBuild(builder, requests):
518 if r.properties and r.properties.hasProperty("tag"):
522 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
525 def MakeEnv(overrides=None, tryccache=False):
527 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
528 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
531 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
532 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
533 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
535 env['CC'] = env['CCC']
536 env['CXX'] = env['CCXX']
538 if overrides is not None:
539 env.update(overrides)
543 def NetLockDl(props):
545 if props.hasProperty("dl_lock"):
546 lock = NetLocks[props["dl_lock"]]
548 return [lock.access('exclusive')]
553 def NetLockUl(props):
555 if props.hasProperty("ul_lock"):
556 lock = NetLocks[props["ul_lock"]]
558 return [lock.access('exclusive')]
563 def TagPropertyValue(props):
564 if props.hasProperty("options"):
565 options = props.getProperty("options")
566 if type(options) is dict:
567 return options.get("tag")
570 def IsTargetSelected(target):
571 def CheckTargetProperty(step):
573 options = step.getProperty("options")
574 if type(options) is dict:
575 selected_target = options.get("target", "all")
576 if selected_target != "all" and selected_target != target:
583 return CheckTargetProperty
585 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
587 seckey = base64.b64decode(seckey)
591 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
592 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
597 dlLock = locks.WorkerLock("worker_dl")
599 checkBuiltin = re.sub('[\t\n ]+', ' ', """
601 local symbol op path file;
602 for file in $CHANGED_FILES; do
608 while read symbol op path; do
609 case "$symbol" in package-*)
610 symbol="${symbol##*(}";
611 symbol="${symbol%)}";
612 for file in $CHANGED_FILES; do
613 case "$file" in "package/$path/"*)
614 grep -qsx "$symbol=y" .config && return 0
618 done < tmp/.packagedeps;
624 class IfBuiltinShellCommand(ShellCommand):
625 def _quote(self, str):
626 if re.search("[^a-zA-Z0-9/_.-]", str):
627 return "'%s'" %(re.sub("'", "'\"'\"'", str))
630 def setCommand(self, command):
631 if not isinstance(command, (str, unicode)):
632 command = ' '.join(map(self._quote, command))
635 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
638 def setupEnvironment(self, cmd):
639 workerEnv = self.workerEnvironment
640 if workerEnv is None:
643 for request in self.build.requests:
644 for source in request.sources:
645 for change in source.changes:
646 for file in change.files:
647 changedFiles[file] = True
648 fullSlaveEnv = workerEnv.copy()
649 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
650 cmd.args['env'] = fullSlaveEnv
654 for worker in c['workers']:
655 workerNames.append(worker.workername)
657 force_factory = BuildFactory()
659 c['builders'].append(BuilderConfig(
660 name = "00_force_build",
661 workernames = workerNames,
662 factory = force_factory))
664 for target in targets:
665 ts = target.split('/')
667 factory = BuildFactory()
669 # setup shared work directory if required
670 factory.addStep(ShellCommand(
672 description = "Setting up shared work directory",
673 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
675 haltOnFailure = True,
676 doStepIf = IsSharedWorkdir))
678 # find number of cores
679 factory.addStep(SetPropertyFromCommand(
682 description = "Finding number of CPUs",
683 command = ["nproc"]))
685 # find gcc and g++ compilers
686 factory.addStep(FileDownload(
687 name = "dlfindbinpl",
688 mastersrc = scripts_dir + '/findbin.pl',
689 workerdest = "../findbin.pl",
692 factory.addStep(SetPropertyFromCommand(
694 property = "cc_command",
695 description = "Finding gcc command",
697 "../findbin.pl", "gcc", "", "",
699 haltOnFailure = True))
701 factory.addStep(SetPropertyFromCommand(
703 property = "cxx_command",
704 description = "Finding g++ command",
706 "../findbin.pl", "g++", "", "",
708 haltOnFailure = True))
710 # see if ccache is available
711 factory.addStep(SetPropertyFromCommand(
712 property = "ccache_command",
713 command = ["which", "ccache"],
714 description = "Testing for ccache command",
715 haltOnFailure = False,
716 flunkOnFailure = False,
717 warnOnFailure = False,
720 # expire tree if needed
722 factory.addStep(FileDownload(
724 doStepIf = IsExpireRequested,
725 mastersrc = scripts_dir + '/expire.sh',
726 workerdest = "../expire.sh",
729 factory.addStep(ShellCommand(
731 description = "Checking for build tree expiry",
732 command = ["./expire.sh", str(tree_expire)],
734 haltOnFailure = True,
735 doStepIf = IsExpireRequested,
738 # cleanup.sh if needed
739 factory.addStep(FileDownload(
740 name = "dlcleanupsh",
741 mastersrc = scripts_dir + '/cleanup.sh',
742 workerdest = "../cleanup.sh",
744 doStepIf = IsCleanupRequested))
746 factory.addStep(ShellCommand(
748 description = "Cleaning previous builds",
749 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
751 haltOnFailure = True,
752 doStepIf = IsCleanupRequested,
755 factory.addStep(ShellCommand(
757 description = "Cleaning work area",
758 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
760 haltOnFailure = True,
761 doStepIf = IsCleanupRequested,
764 # user-requested clean targets
765 for tuple in CleanTargetMap:
766 factory.addStep(ShellCommand(
768 description = 'User-requested "make %s"' % tuple[1],
769 command = ["make", tuple[1], "V=s"],
771 doStepIf = IsMakeCleanRequested(tuple[0])
774 # Workaround bug when switching from a checked out tag back to a branch
775 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
776 factory.addStep(ShellCommand(
777 name = "gitcheckout",
778 description = "Ensure that Git HEAD is sane",
779 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
780 haltOnFailure = True))
782 # check out the source
784 # if repo doesn't exist: 'git clone repourl'
785 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
786 # 'git fetch -t repourl branch; git reset --hard revision'
787 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
788 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
792 branch = repo_branch,
796 haltOnFailure = True,
797 doStepIf = IsGitCleanRequested,
803 branch = repo_branch,
807 haltOnFailure = True,
808 doStepIf = IsGitFreshRequested,
812 factory.addStep(ShellCommand(
814 description = "Fetching Git remote refs",
815 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
820 factory.addStep(ShellCommand(
822 description = "Checking out Git tag",
823 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
824 haltOnFailure = True,
825 doStepIf = IsTaggingRequested
828 # Verify that Git HEAD points to a tag or branch
829 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
830 factory.addStep(ShellCommand(
832 description = "Ensure that Git HEAD is pointing to a branch or tag",
833 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
834 haltOnFailure = True))
836 factory.addStep(ShellCommand(
838 description = "Remove tmp folder",
839 command=["rm", "-rf", "tmp/"]))
842 # factory.addStep(ShellCommand(
843 # name = "feedsconf",
844 # description = "Copy the feeds.conf",
845 # command='''cp ~/feeds.conf ./feeds.conf''' ))
848 factory.addStep(ShellCommand(
849 name = "rmfeedlinks",
850 description = "Remove feed symlinks",
851 command=["rm", "-rf", "package/feeds/"]))
853 factory.addStep(StringDownload(
855 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
856 workerdest = "../ccache_cc.sh",
860 factory.addStep(StringDownload(
862 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
863 workerdest = "../ccache_cxx.sh",
868 factory.addStep(ShellCommand(
869 name = "updatefeeds",
870 description = "Updating feeds",
871 command=["./scripts/feeds", "update"],
872 env = MakeEnv(tryccache=True),
873 haltOnFailure = True,
878 factory.addStep(ShellCommand(
879 name = "installfeeds",
880 description = "Installing feeds",
881 command=["./scripts/feeds", "install", "-a"],
882 env = MakeEnv(tryccache=True),
887 if config_seed is not None:
888 factory.addStep(StringDownload(
889 name = "dlconfigseed",
890 s = config_seed + '\n',
891 workerdest = ".config",
896 factory.addStep(ShellCommand(
898 description = "Seeding .config",
899 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
902 factory.addStep(ShellCommand(
904 description = "Removing output directory",
905 command = ["rm", "-rf", "bin/"]
908 factory.addStep(ShellCommand(
910 description = "Populating .config",
911 command = ["make", "defconfig"],
916 factory.addStep(ShellCommand(
918 description = "Checking architecture",
919 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
927 factory.addStep(SetPropertyFromCommand(
930 description = "Finding libc suffix",
931 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
934 if usign_key is not None:
935 factory.addStep(StringDownload(
936 name = "dlkeybuildpub",
937 s = UsignSec2Pub(usign_key, usign_comment),
938 workerdest = "key-build.pub",
942 factory.addStep(StringDownload(
944 s = "# fake private key",
945 workerdest = "key-build",
949 factory.addStep(StringDownload(
950 name = "dlkeybuilducert",
951 s = "# fake certificate",
952 workerdest = "key-build.ucert",
957 factory.addStep(ShellCommand(
959 description = "Preparing dl/",
960 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
966 factory.addStep(ShellCommand(
968 description = "Building and installing GNU tar",
969 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
970 env = MakeEnv(tryccache=True),
975 factory.addStep(ShellCommand(
977 description = "Populating dl/",
978 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
981 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
984 factory.addStep(ShellCommand(
986 description = "Cleaning base-files",
987 command=["make", "package/base-files/clean", "V=s"]
991 factory.addStep(ShellCommand(
993 description = "Building and installing tools",
994 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
995 env = MakeEnv(tryccache=True),
999 factory.addStep(ShellCommand(
1001 description = "Building and installing toolchain",
1002 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1004 haltOnFailure = True
1007 factory.addStep(ShellCommand(
1009 description = "Building kmods",
1010 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1012 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1013 haltOnFailure = True
1016 # find kernel version
1017 factory.addStep(SetPropertyFromCommand(
1018 name = "kernelversion",
1019 property = "kernelversion",
1020 description = "Finding the effective Kernel version",
1021 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1022 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1025 factory.addStep(ShellCommand(
1027 description = "Cleaning up package build",
1028 command=["make", "package/cleanup", "V=s"]
1031 factory.addStep(ShellCommand(
1033 description = "Building packages",
1034 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1036 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1037 haltOnFailure = True
1040 # factory.addStep(IfBuiltinShellCommand(
1041 factory.addStep(ShellCommand(
1042 name = "pkginstall",
1043 description = "Installing packages",
1044 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1046 haltOnFailure = True
1049 factory.addStep(ShellCommand(
1051 description = "Indexing packages",
1052 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1054 haltOnFailure = True
1057 #factory.addStep(IfBuiltinShellCommand(
1058 factory.addStep(ShellCommand(
1060 description = "Building and installing images",
1061 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1063 haltOnFailure = True
1066 factory.addStep(ShellCommand(
1068 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1069 command = "make -j1 buildinfo V=s || true",
1071 haltOnFailure = True
1074 factory.addStep(ShellCommand(
1075 name = "json_overview_image_info",
1076 description = "Generate profiles.json in target folder",
1077 command = "make -j1 json_overview_image_info V=s || true",
1079 haltOnFailure = True
1082 factory.addStep(ShellCommand(
1084 description = "Calculating checksums",
1085 command=["make", "-j1", "checksum", "V=s"],
1087 haltOnFailure = True
1090 if enable_kmod_archive:
1091 factory.addStep(ShellCommand(
1093 description = "Creating kmod directory",
1094 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1095 haltOnFailure = True
1098 factory.addStep(ShellCommand(
1099 name = "kmodprepare",
1100 description = "Preparing kmod archive",
1101 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1102 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1103 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1104 haltOnFailure = True
1107 factory.addStep(ShellCommand(
1109 description = "Indexing kmod archive",
1110 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1111 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1113 haltOnFailure = True
1117 if ini.has_option("gpg", "key") or usign_key is not None:
1118 factory.addStep(MasterShellCommand(
1119 name = "signprepare",
1120 description = "Preparing temporary signing directory",
1121 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1122 haltOnFailure = True
1125 factory.addStep(ShellCommand(
1127 description = "Packing files to sign",
1128 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1129 haltOnFailure = True
1132 factory.addStep(FileUpload(
1133 workersrc = "sign.tar.gz",
1134 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1135 haltOnFailure = True
1138 factory.addStep(MasterShellCommand(
1140 description = "Signing files",
1141 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1142 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1143 haltOnFailure = True
1146 factory.addStep(FileDownload(
1147 name = "dlsigntargz",
1148 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1149 workerdest = "sign.tar.gz",
1150 haltOnFailure = True
1153 factory.addStep(ShellCommand(
1154 name = "signunpack",
1155 description = "Unpacking signed files",
1156 command = ["tar", "-xzf", "sign.tar.gz"],
1157 haltOnFailure = True
1161 factory.addStep(ShellCommand(
1162 name = "dirprepare",
1163 description = "Preparing upload directory structure",
1164 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1165 haltOnFailure = True
1168 factory.addStep(ShellCommand(
1169 name = "linkprepare",
1170 description = "Preparing repository symlink",
1171 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1172 doStepIf = IsNoMasterBuild,
1173 haltOnFailure = True
1176 if enable_kmod_archive:
1177 factory.addStep(ShellCommand(
1178 name = "kmoddirprepare",
1179 description = "Preparing kmod archive upload directory",
1180 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1181 haltOnFailure = True
1184 factory.addStep(ShellCommand(
1186 description = "Uploading directory structure",
1187 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1188 env={'RSYNC_PASSWORD': rsync_bin_key},
1189 haltOnFailure = True,
1194 # download remote sha256sums to 'target-sha256sums'
1195 factory.addStep(ShellCommand(
1196 name = "target-sha256sums",
1197 description = "Fetching remote sha256sums for target",
1198 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1199 env={'RSYNC_PASSWORD': rsync_bin_key},
1201 haltOnFailure = False,
1202 flunkOnFailure = False,
1203 warnOnFailure = False,
1206 # build list of files to upload
1207 factory.addStep(FileDownload(
1208 name = "dlsha2rsyncpl",
1209 mastersrc = scripts_dir + '/sha2rsync.pl',
1210 workerdest = "../sha2rsync.pl",
1214 factory.addStep(ShellCommand(
1216 description = "Building list of files to upload",
1217 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1218 haltOnFailure = True,
1221 factory.addStep(FileDownload(
1222 name = "dlrsync.sh",
1223 mastersrc = scripts_dir + '/rsync.sh',
1224 workerdest = "../rsync.sh",
1228 # upload new files and update existing ones
1229 factory.addStep(ShellCommand(
1230 name = "targetupload",
1231 description = "Uploading target files",
1232 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1233 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1234 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1235 env={'RSYNC_PASSWORD': rsync_bin_key},
1236 haltOnFailure = True,
1240 # delete files which don't exist locally
1241 factory.addStep(ShellCommand(
1242 name = "targetprune",
1243 description = "Pruning target files",
1244 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1245 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1246 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1247 env={'RSYNC_PASSWORD': rsync_bin_key},
1248 haltOnFailure = True,
1253 if enable_kmod_archive:
1254 factory.addStep(ShellCommand(
1255 name = "kmodupload",
1256 description = "Uploading kmod archive",
1257 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1258 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1259 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1260 env={'RSYNC_PASSWORD': rsync_bin_key},
1261 haltOnFailure = True,
1266 if rsync_src_url is not None:
1267 factory.addStep(ShellCommand(
1268 name = "sourcelist",
1269 description = "Finding source archives to upload",
1270 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1271 haltOnFailure = True
1274 factory.addStep(ShellCommand(
1275 name = "sourceupload",
1276 description = "Uploading source archives",
1277 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1278 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1279 env={'RSYNC_PASSWORD': rsync_src_key},
1280 haltOnFailure = True,
1286 factory.addStep(ShellCommand(
1287 name = "packageupload",
1288 description = "Uploading package files",
1289 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1290 env={'RSYNC_PASSWORD': rsync_bin_key},
1291 haltOnFailure = False,
1292 flunkOnFailure = False,
1293 warnOnFailure = True,
1300 factory.addStep(ShellCommand(
1302 description = "Uploading logs",
1303 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1304 env={'RSYNC_PASSWORD': rsync_bin_key},
1305 haltOnFailure = False,
1306 flunkOnFailure = False,
1307 warnOnFailure = True,
1313 factory.addStep(ShellCommand(
1315 description = "Reporting disk usage",
1316 command=["df", "-h", "."],
1317 env={'LC_ALL': 'C'},
1318 haltOnFailure = False,
1319 flunkOnFailure = False,
1320 warnOnFailure = False,
1324 factory.addStep(ShellCommand(
1326 description = "Reporting estimated file space usage",
1327 command=["du", "-sh", "."],
1328 env={'LC_ALL': 'C'},
1329 haltOnFailure = False,
1330 flunkOnFailure = False,
1331 warnOnFailure = False,
1335 factory.addStep(ShellCommand(
1336 name = "ccachestat",
1337 description = "Reporting ccache stats",
1338 command=["ccache", "-s"],
1339 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1340 want_stderr = False,
1341 haltOnFailure = False,
1342 flunkOnFailure = False,
1343 warnOnFailure = False,
1347 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1349 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1350 force_factory.addStep(steps.Trigger(
1351 name = "trigger_%s" % target,
1352 description = "Triggering %s build" % target,
1353 schedulerNames = [ "trigger_%s" % target ],
1354 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1355 doStepIf = IsTargetSelected(target)
1359 ####### STATUS TARGETS
1361 # 'status' is a list of Status Targets. The results of each build will be
1362 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1363 # including web pages, email senders, and IRC bots.
1365 if ini.has_option("phase1", "status_bind"):
1367 'port': ini.get("phase1", "status_bind"),
1369 'waterfall_view': True,
1370 'console_view': True,
1375 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1376 c['www']['auth'] = util.UserPasswordAuth([
1377 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1379 c['www']['authz'] = util.Authz(
1380 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1381 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1385 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1386 irc_host = ini.get("irc", "host")
1388 irc_chan = ini.get("irc", "channel")
1389 irc_nick = ini.get("irc", "nickname")
1392 if ini.has_option("irc", "port"):
1393 irc_port = ini.getint("irc", "port")
1395 if ini.has_option("irc", "password"):
1396 irc_pass = ini.get("irc", "password")
1398 irc = reporters.IRC(irc_host, irc_nick,
1400 password = irc_pass,
1401 channels = [ irc_chan ],
1402 notify_events = [ 'exception', 'problem', 'recovery' ]
1405 c['services'].append(irc)
1407 c['revlink'] = util.RevlinkMatch([
1408 r'https://git.openwrt.org/openwrt/(.*).git'
1410 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1415 # This specifies what database buildbot uses to store its state. You can leave
1416 # this at its default for all but the largest installations.
1417 'db_url' : "sqlite:///state.sqlite",
1420 c['buildbotNetUsageData'] = None