2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
63 ####### PROJECT IDENTITY
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
78 c['buildbotURL'] = inip1.get("buildbot_url")
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
126 c['collapseRequests'] = True
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
142 @returns: datetime instance or None, via Deferred
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 order=['-complete_at'], limit=1)
156 complete_at = completed[0]['complete_at']
158 last_build = yield bldr.master.data.get(
161 resultspec.Filter('builderid', 'eq', [bldrid]),
163 order=['-started_at'], limit=1)
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
177 @returns: list of sorted builders
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
189 (complete_at, bldr) = item
193 complete_at = date.replace(tzinfo=tzutc())
195 if is_building(bldr):
197 complete_at = date.replace(tzinfo=tzutc())
199 return (complete_at, bldr.name)
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207 return [r[1] for r in results]
209 c['prioritizeBuilders'] = prioritizeBuilders
211 ####### CHANGESOURCES
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
259 line = findtargets.stdout.readline()
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
272 workdir=work_dir+'/work.git', branch=repo_branch,
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
300 line = findtags.stdout.readline()
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
313 self._choice_list = taglist
315 return self._choice_list
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
323 c['schedulers'].append(SingleBranchScheduler(
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
329 c['schedulers'].append(ForceScheduler(
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
336 util.CodebaseParameter(
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
346 reason = util.StringParameter(
349 default = "Trigger build",
355 util.NestedParameter(
357 label="Build Options",
360 util.ChoiceStringParameter(
362 label = "Build target",
364 choices = [ "all" ] + targets
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
383 [ "tools", "tools/clean" ],
384 [ "chain", "toolchain/clean" ],
385 [ "linux", "target/linux/clean" ],
386 [ "dir", "dirclean" ],
387 [ "dist", "distclean" ]
390 def IsMakeCleanRequested(pattern):
391 def CheckCleanProperty(step):
392 val = step.getProperty("clean")
393 if val and re.match(pattern, val):
398 return CheckCleanProperty
400 def IsSharedWorkdir(step):
401 return bool(step.getProperty("shared_wd"))
403 def IsCleanupRequested(step):
404 if IsSharedWorkdir(step):
406 do_cleanup = step.getProperty("do_cleanup")
412 def IsExpireRequested(step):
413 if IsSharedWorkdir(step):
416 return not IsCleanupRequested(step)
418 def IsGitFreshRequested(step):
419 do_cleanup = step.getProperty("do_cleanup")
425 def IsGitCleanRequested(step):
426 return not IsGitFreshRequested(step)
428 def IsTaggingRequested(step):
429 val = step.getProperty("tag")
430 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
435 def IsNoTaggingRequested(step):
436 return not IsTaggingRequested(step)
438 def IsNoMasterBuild(step):
439 return repo_branch != "master"
441 def GetBaseVersion():
442 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
443 return repo_branch.split('-')[1]
448 def GetVersionPrefix(props):
449 basever = GetBaseVersion()
450 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
451 return "%s/" % props["tag"]
452 elif basever != "master":
453 return "%s-SNAPSHOT/" % basever
458 def GetNumJobs(props):
459 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
460 return str(int(int(props["nproc"]) / props["max_builds"]))
466 if props.hasProperty("cc_command"):
467 return props["cc_command"]
473 if props.hasProperty("cxx_command"):
474 return props["cxx_command"]
480 if props.hasProperty("builddir"):
481 return props["builddir"]
482 elif props.hasProperty("workdir"):
483 return props["workdir"]
488 def GetCCache(props):
489 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
490 return props["ccache_command"]
494 def GetNextBuild(builder, requests):
496 if r.properties and r.properties.hasProperty("tag"):
500 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
503 def MakeEnv(overrides=None, tryccache=False):
505 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
506 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
509 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
510 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
511 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
513 env['CC'] = env['CCC']
514 env['CXX'] = env['CCXX']
516 if overrides is not None:
517 env.update(overrides)
521 def NetLockDl(props):
523 if props.hasProperty("dl_lock"):
524 lock = NetLocks[props["dl_lock"]]
526 return [lock.access('exclusive')]
531 def NetLockUl(props):
533 if props.hasProperty("ul_lock"):
534 lock = NetLocks[props["ul_lock"]]
536 return [lock.access('exclusive')]
541 def TagPropertyValue(props):
542 if props.hasProperty("options"):
543 options = props.getProperty("options")
544 if type(options) is dict:
545 return options.get("tag")
548 def IsTargetSelected(target):
549 def CheckTargetProperty(step):
551 options = step.getProperty("options")
552 if type(options) is dict:
553 selected_target = options.get("target", "all")
554 if selected_target != "all" and selected_target != target:
561 return CheckTargetProperty
563 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
565 seckey = base64.b64decode(seckey)
569 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
570 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
575 dlLock = locks.WorkerLock("worker_dl")
579 for worker in c['workers']:
580 workerNames.append(worker.workername)
582 force_factory = BuildFactory()
584 c['builders'].append(BuilderConfig(
585 name = "00_force_build",
586 workernames = workerNames,
587 factory = force_factory))
589 for target in targets:
590 ts = target.split('/')
592 factory = BuildFactory()
594 # setup shared work directory if required
595 factory.addStep(ShellCommand(
597 description = "Setting up shared work directory",
598 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
600 haltOnFailure = True,
601 doStepIf = IsSharedWorkdir))
603 # find number of cores
604 factory.addStep(SetPropertyFromCommand(
607 description = "Finding number of CPUs",
608 command = ["nproc"]))
610 # find gcc and g++ compilers
611 factory.addStep(FileDownload(
612 name = "dlfindbinpl",
613 mastersrc = scripts_dir + '/findbin.pl',
614 workerdest = "../findbin.pl",
617 factory.addStep(SetPropertyFromCommand(
619 property = "cc_command",
620 description = "Finding gcc command",
622 "../findbin.pl", "gcc", "", "",
624 haltOnFailure = True))
626 factory.addStep(SetPropertyFromCommand(
628 property = "cxx_command",
629 description = "Finding g++ command",
631 "../findbin.pl", "g++", "", "",
633 haltOnFailure = True))
635 # see if ccache is available
636 factory.addStep(SetPropertyFromCommand(
637 property = "ccache_command",
638 command = ["which", "ccache"],
639 description = "Testing for ccache command",
640 haltOnFailure = False,
641 flunkOnFailure = False,
642 warnOnFailure = False,
645 # expire tree if needed
647 factory.addStep(FileDownload(
649 doStepIf = IsExpireRequested,
650 mastersrc = scripts_dir + '/expire.sh',
651 workerdest = "../expire.sh",
654 factory.addStep(ShellCommand(
656 description = "Checking for build tree expiry",
657 command = ["./expire.sh", str(tree_expire)],
659 haltOnFailure = True,
660 doStepIf = IsExpireRequested,
663 # cleanup.sh if needed
664 factory.addStep(FileDownload(
665 name = "dlcleanupsh",
666 mastersrc = scripts_dir + '/cleanup.sh',
667 workerdest = "../cleanup.sh",
669 doStepIf = IsCleanupRequested))
671 factory.addStep(ShellCommand(
673 description = "Cleaning previous builds",
674 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
676 haltOnFailure = True,
677 doStepIf = IsCleanupRequested,
680 factory.addStep(ShellCommand(
682 description = "Cleaning work area",
683 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
685 haltOnFailure = True,
686 doStepIf = IsCleanupRequested,
689 # user-requested clean targets
690 for tuple in CleanTargetMap:
691 factory.addStep(ShellCommand(
693 description = 'User-requested "make %s"' % tuple[1],
694 command = ["make", tuple[1], "V=s"],
696 doStepIf = IsMakeCleanRequested(tuple[0])
699 # Workaround bug when switching from a checked out tag back to a branch
700 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
701 factory.addStep(ShellCommand(
702 name = "gitcheckout",
703 description = "Ensure that Git HEAD is sane",
704 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
705 haltOnFailure = True))
707 # check out the source
709 # if repo doesn't exist: 'git clone repourl'
710 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
711 # 'git fetch -t repourl branch; git reset --hard revision'
712 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
713 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
717 branch = repo_branch,
721 haltOnFailure = True,
722 doStepIf = IsGitCleanRequested,
728 branch = repo_branch,
732 haltOnFailure = True,
733 doStepIf = IsGitFreshRequested,
737 factory.addStep(ShellCommand(
739 description = "Fetching Git remote refs",
740 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
745 factory.addStep(ShellCommand(
747 description = "Checking out Git tag",
748 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
749 haltOnFailure = True,
750 doStepIf = IsTaggingRequested
753 # Verify that Git HEAD points to a tag or branch
754 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
755 factory.addStep(ShellCommand(
757 description = "Ensure that Git HEAD is pointing to a branch or tag",
758 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
759 haltOnFailure = True))
761 factory.addStep(ShellCommand(
763 description = "Remove tmp folder",
764 command=["rm", "-rf", "tmp/"]))
767 # factory.addStep(ShellCommand(
768 # name = "feedsconf",
769 # description = "Copy the feeds.conf",
770 # command='''cp ~/feeds.conf ./feeds.conf''' ))
773 factory.addStep(ShellCommand(
774 name = "rmfeedlinks",
775 description = "Remove feed symlinks",
776 command=["rm", "-rf", "package/feeds/"]))
778 factory.addStep(StringDownload(
780 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
781 workerdest = "../ccache_cc.sh",
785 factory.addStep(StringDownload(
787 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
788 workerdest = "../ccache_cxx.sh",
793 factory.addStep(ShellCommand(
794 name = "updatefeeds",
795 description = "Updating feeds",
796 command=["./scripts/feeds", "update"],
797 env = MakeEnv(tryccache=True),
798 haltOnFailure = True,
803 factory.addStep(ShellCommand(
804 name = "installfeeds",
805 description = "Installing feeds",
806 command=["./scripts/feeds", "install", "-a"],
807 env = MakeEnv(tryccache=True),
812 if config_seed is not None:
813 factory.addStep(StringDownload(
814 name = "dlconfigseed",
815 s = config_seed + '\n',
816 workerdest = ".config",
821 factory.addStep(ShellCommand(
823 description = "Seeding .config",
824 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
827 factory.addStep(ShellCommand(
829 description = "Removing output directory",
830 command = ["rm", "-rf", "bin/"]
833 factory.addStep(ShellCommand(
835 description = "Populating .config",
836 command = ["make", "defconfig"],
841 factory.addStep(ShellCommand(
843 description = "Checking architecture",
844 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
852 factory.addStep(SetPropertyFromCommand(
855 description = "Finding libc suffix",
856 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
859 if usign_key is not None:
860 factory.addStep(StringDownload(
861 name = "dlkeybuildpub",
862 s = UsignSec2Pub(usign_key, usign_comment),
863 workerdest = "key-build.pub",
867 factory.addStep(StringDownload(
869 s = "# fake private key",
870 workerdest = "key-build",
874 factory.addStep(StringDownload(
875 name = "dlkeybuilducert",
876 s = "# fake certificate",
877 workerdest = "key-build.ucert",
882 factory.addStep(ShellCommand(
884 description = "Preparing dl/",
885 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
891 factory.addStep(ShellCommand(
893 description = "Building and installing GNU tar",
894 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
895 env = MakeEnv(tryccache=True),
900 factory.addStep(ShellCommand(
902 description = "Populating dl/",
903 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
906 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
909 factory.addStep(ShellCommand(
911 description = "Cleaning base-files",
912 command=["make", "package/base-files/clean", "V=s"]
916 factory.addStep(ShellCommand(
918 description = "Building and installing tools",
919 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
920 env = MakeEnv(tryccache=True),
924 factory.addStep(ShellCommand(
926 description = "Building and installing toolchain",
927 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
932 factory.addStep(ShellCommand(
934 description = "Building kmods",
935 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
937 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
941 # find kernel version
942 factory.addStep(SetPropertyFromCommand(
943 name = "kernelversion",
944 property = "kernelversion",
945 description = "Finding the effective Kernel version",
946 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
947 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
950 factory.addStep(ShellCommand(
952 description = "Cleaning up package build",
953 command=["make", "package/cleanup", "V=s"]
956 factory.addStep(ShellCommand(
958 description = "Building packages",
959 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
961 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
965 factory.addStep(ShellCommand(
967 description = "Installing packages",
968 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
973 factory.addStep(ShellCommand(
975 description = "Indexing packages",
976 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
981 factory.addStep(ShellCommand(
983 description = "Building and installing images",
984 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
989 factory.addStep(ShellCommand(
991 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
992 command = "make -j1 buildinfo V=s || true",
997 factory.addStep(ShellCommand(
998 name = "json_overview_image_info",
999 description = "Generate profiles.json in target folder",
1000 command = "make -j1 json_overview_image_info V=s || true",
1002 haltOnFailure = True
1005 factory.addStep(ShellCommand(
1007 description = "Calculating checksums",
1008 command=["make", "-j1", "checksum", "V=s"],
1010 haltOnFailure = True
1013 if enable_kmod_archive:
1014 factory.addStep(ShellCommand(
1016 description = "Creating kmod directory",
1017 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1018 haltOnFailure = True
1021 factory.addStep(ShellCommand(
1022 name = "kmodprepare",
1023 description = "Preparing kmod archive",
1024 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1025 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1026 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1027 haltOnFailure = True
1030 factory.addStep(ShellCommand(
1032 description = "Indexing kmod archive",
1033 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1034 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1036 haltOnFailure = True
1040 if ini.has_option("gpg", "key") or usign_key is not None:
1041 factory.addStep(MasterShellCommand(
1042 name = "signprepare",
1043 description = "Preparing temporary signing directory",
1044 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1045 haltOnFailure = True
1048 factory.addStep(ShellCommand(
1050 description = "Packing files to sign",
1051 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1052 haltOnFailure = True
1055 factory.addStep(FileUpload(
1056 workersrc = "sign.tar.gz",
1057 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1058 haltOnFailure = True
1061 factory.addStep(MasterShellCommand(
1063 description = "Signing files",
1064 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1065 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1066 haltOnFailure = True
1069 factory.addStep(FileDownload(
1070 name = "dlsigntargz",
1071 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1072 workerdest = "sign.tar.gz",
1073 haltOnFailure = True
1076 factory.addStep(ShellCommand(
1077 name = "signunpack",
1078 description = "Unpacking signed files",
1079 command = ["tar", "-xzf", "sign.tar.gz"],
1080 haltOnFailure = True
1084 factory.addStep(ShellCommand(
1085 name = "dirprepare",
1086 description = "Preparing upload directory structure",
1087 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1088 haltOnFailure = True
1091 factory.addStep(ShellCommand(
1092 name = "linkprepare",
1093 description = "Preparing repository symlink",
1094 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1095 doStepIf = IsNoMasterBuild,
1096 haltOnFailure = True
1099 if enable_kmod_archive:
1100 factory.addStep(ShellCommand(
1101 name = "kmoddirprepare",
1102 description = "Preparing kmod archive upload directory",
1103 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1104 haltOnFailure = True
1107 factory.addStep(ShellCommand(
1109 description = "Uploading directory structure",
1110 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1111 env={'RSYNC_PASSWORD': rsync_bin_key},
1112 haltOnFailure = True,
1117 # download remote sha256sums to 'target-sha256sums'
1118 factory.addStep(ShellCommand(
1119 name = "target-sha256sums",
1120 description = "Fetching remote sha256sums for target",
1121 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1122 env={'RSYNC_PASSWORD': rsync_bin_key},
1124 haltOnFailure = False,
1125 flunkOnFailure = False,
1126 warnOnFailure = False,
1129 # build list of files to upload
1130 factory.addStep(FileDownload(
1131 name = "dlsha2rsyncpl",
1132 mastersrc = scripts_dir + '/sha2rsync.pl',
1133 workerdest = "../sha2rsync.pl",
1137 factory.addStep(ShellCommand(
1139 description = "Building list of files to upload",
1140 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1141 haltOnFailure = True,
1144 factory.addStep(FileDownload(
1145 name = "dlrsync.sh",
1146 mastersrc = scripts_dir + '/rsync.sh',
1147 workerdest = "../rsync.sh",
1151 # upload new files and update existing ones
1152 factory.addStep(ShellCommand(
1153 name = "targetupload",
1154 description = "Uploading target files",
1155 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1156 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1157 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1158 env={'RSYNC_PASSWORD': rsync_bin_key},
1159 haltOnFailure = True,
1163 # delete files which don't exist locally
1164 factory.addStep(ShellCommand(
1165 name = "targetprune",
1166 description = "Pruning target files",
1167 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1168 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1169 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1170 env={'RSYNC_PASSWORD': rsync_bin_key},
1171 haltOnFailure = True,
1176 if enable_kmod_archive:
1177 factory.addStep(ShellCommand(
1178 name = "kmodupload",
1179 description = "Uploading kmod archive",
1180 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1181 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1182 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1183 env={'RSYNC_PASSWORD': rsync_bin_key},
1184 haltOnFailure = True,
1189 if rsync_src_url is not None:
1190 factory.addStep(ShellCommand(
1191 name = "sourcelist",
1192 description = "Finding source archives to upload",
1193 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1194 haltOnFailure = True
1197 factory.addStep(ShellCommand(
1198 name = "sourceupload",
1199 description = "Uploading source archives",
1200 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1201 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1202 env={'RSYNC_PASSWORD': rsync_src_key},
1203 haltOnFailure = True,
1209 factory.addStep(ShellCommand(
1210 name = "packageupload",
1211 description = "Uploading package files",
1212 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1213 env={'RSYNC_PASSWORD': rsync_bin_key},
1214 haltOnFailure = False,
1215 flunkOnFailure = False,
1216 warnOnFailure = True,
1223 factory.addStep(ShellCommand(
1225 description = "Uploading logs",
1226 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1227 env={'RSYNC_PASSWORD': rsync_bin_key},
1228 haltOnFailure = False,
1229 flunkOnFailure = False,
1230 warnOnFailure = True,
1236 factory.addStep(ShellCommand(
1238 description = "Reporting disk usage",
1239 command=["df", "-h", "."],
1240 env={'LC_ALL': 'C'},
1241 haltOnFailure = False,
1242 flunkOnFailure = False,
1243 warnOnFailure = False,
1247 factory.addStep(ShellCommand(
1249 description = "Reporting estimated file space usage",
1250 command=["du", "-sh", "."],
1251 env={'LC_ALL': 'C'},
1252 haltOnFailure = False,
1253 flunkOnFailure = False,
1254 warnOnFailure = False,
1258 factory.addStep(ShellCommand(
1259 name = "ccachestat",
1260 description = "Reporting ccache stats",
1261 command=["ccache", "-s"],
1262 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1263 want_stderr = False,
1264 haltOnFailure = False,
1265 flunkOnFailure = False,
1266 warnOnFailure = False,
1270 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1272 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1273 force_factory.addStep(steps.Trigger(
1274 name = "trigger_%s" % target,
1275 description = "Triggering %s build" % target,
1276 schedulerNames = [ "trigger_%s" % target ],
1277 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1278 doStepIf = IsTargetSelected(target)
1282 ####### STATUS TARGETS
1284 # 'status' is a list of Status Targets. The results of each build will be
1285 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1286 # including web pages, email senders, and IRC bots.
1288 if "status_bind" in inip1:
1290 'port': inip1.get("status_bind"),
1292 'waterfall_view': True,
1293 'console_view': True,
1298 if "status_user" in inip1 and "status_password" in inip1:
1299 c['www']['auth'] = util.UserPasswordAuth([
1300 (inip1.get("status_user"), inip1.get("status_password"))
1302 c['www']['authz'] = util.Authz(
1303 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1304 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1308 if ini.has_section("irc"):
1310 irc_host = iniirc.get("host", None)
1311 irc_port = iniirc.getint("port", 6667)
1312 irc_chan = iniirc.get("channel", None)
1313 irc_nick = iniirc.get("nickname", None)
1314 irc_pass = iniirc.get("password", None)
1316 if irc_host and irc_nick and irc_chan:
1317 irc = reporters.IRC(irc_host, irc_nick,
1319 password = irc_pass,
1320 channels = [ irc_chan ],
1321 notify_events = [ 'exception', 'problem', 'recovery' ]
1324 c['services'].append(irc)
1326 c['revlink'] = util.RevlinkMatch([
1327 r'https://git.openwrt.org/openwrt/(.*).git'
1329 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1334 # This specifies what database buildbot uses to store its state. You can leave
1335 # this at its default for all but the largest installations.
1336 'db_url' : "sqlite:///state.sqlite",
1339 c['buildbotNetUsageData'] = None