2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
63 ####### PROJECT IDENTITY
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
78 c['buildbotURL'] = inip1.get("buildbot_url")
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
126 c['collapseRequests'] = True
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
142 @returns: datetime instance or None, via Deferred
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 order=['-complete_at'], limit=1)
156 complete_at = completed[0]['complete_at']
158 last_build = yield bldr.master.data.get(
161 resultspec.Filter('builderid', 'eq', [bldrid]),
163 order=['-started_at'], limit=1)
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
177 @returns: list of sorted builders
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
189 (complete_at, bldr) = item
193 complete_at = date.replace(tzinfo=tzutc())
195 if is_building(bldr):
197 complete_at = date.replace(tzinfo=tzutc())
199 return (complete_at, bldr.name)
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207 return [r[1] for r in results]
209 c['prioritizeBuilders'] = prioritizeBuilders
211 ####### CHANGESOURCES
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
259 line = findtargets.stdout.readline()
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
272 workdir=work_dir+'/work.git', branch=repo_branch,
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
300 line = findtags.stdout.readline()
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
313 self._choice_list = taglist
315 return self._choice_list
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
323 c['schedulers'].append(SingleBranchScheduler(
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
329 c['schedulers'].append(ForceScheduler(
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
336 util.CodebaseParameter(
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
346 reason = util.StringParameter(
349 default = "Trigger build",
355 util.NestedParameter(
357 label="Build Options",
360 util.ChoiceStringParameter(
362 label = "Build target",
364 choices = [ "all" ] + targets
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
382 def IsSharedWorkdir(step):
383 return bool(step.getProperty("shared_wd"))
385 def IsCleanupRequested(step):
386 if IsSharedWorkdir(step):
388 do_cleanup = step.getProperty("do_cleanup")
394 def IsExpireRequested(step):
395 if IsSharedWorkdir(step):
398 return not IsCleanupRequested(step)
400 def IsTaggingRequested(step):
401 val = step.getProperty("tag")
402 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
407 def IsNoMasterBuild(step):
408 return repo_branch != "master"
410 def GetBaseVersion():
411 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
412 return repo_branch.split('-')[1]
417 def GetVersionPrefix(props):
418 basever = GetBaseVersion()
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
427 def GetNumJobs(props):
428 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
429 return str(int(int(props["nproc"]) / props["max_builds"]))
434 def GetCCache(props):
435 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
436 return props["ccache_command"]
440 def GetNextBuild(builder, requests):
442 if r.properties and r.properties.hasProperty("tag"):
446 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
449 def MakeEnv(overrides=None, tryccache=False):
451 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
452 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
455 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
456 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
457 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
459 env['CC'] = env['CCC']
460 env['CXX'] = env['CCXX']
462 if overrides is not None:
463 env.update(overrides)
467 def NetLockDl(props):
469 if props.hasProperty("dl_lock"):
470 lock = NetLocks[props["dl_lock"]]
472 return [lock.access('exclusive')]
477 def NetLockUl(props):
479 if props.hasProperty("ul_lock"):
480 lock = NetLocks[props["ul_lock"]]
482 return [lock.access('exclusive')]
487 def TagPropertyValue(props):
488 if props.hasProperty("options"):
489 options = props.getProperty("options")
490 if type(options) is dict:
491 return options.get("tag")
494 def IsTargetSelected(target):
495 def CheckTargetProperty(step):
497 options = step.getProperty("options")
498 if type(options) is dict:
499 selected_target = options.get("target", "all")
500 if selected_target != "all" and selected_target != target:
507 return CheckTargetProperty
509 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
511 seckey = base64.b64decode(seckey)
515 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
516 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
521 dlLock = locks.WorkerLock("worker_dl")
525 for worker in c['workers']:
526 workerNames.append(worker.workername)
528 force_factory = BuildFactory()
530 c['builders'].append(BuilderConfig(
531 name = "00_force_build",
532 workernames = workerNames,
533 factory = force_factory))
535 for target in targets:
536 ts = target.split('/')
538 factory = BuildFactory()
540 # setup shared work directory if required
541 factory.addStep(ShellCommand(
543 description = "Setting up shared work directory",
544 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
546 haltOnFailure = True,
547 doStepIf = IsSharedWorkdir))
549 # find number of cores
550 factory.addStep(SetPropertyFromCommand(
553 description = "Finding number of CPUs",
554 command = ["nproc"]))
556 # find gcc and g++ compilers
557 factory.addStep(FileDownload(
558 name = "dlfindbinpl",
559 mastersrc = scripts_dir + '/findbin.pl',
560 workerdest = "../findbin.pl",
563 factory.addStep(SetPropertyFromCommand(
565 property = "cc_command",
566 description = "Finding gcc command",
568 "../findbin.pl", "gcc", "", "",
570 haltOnFailure = True))
572 factory.addStep(SetPropertyFromCommand(
574 property = "cxx_command",
575 description = "Finding g++ command",
577 "../findbin.pl", "g++", "", "",
579 haltOnFailure = True))
581 # see if ccache is available
582 factory.addStep(SetPropertyFromCommand(
583 property = "ccache_command",
584 command = ["which", "ccache"],
585 description = "Testing for ccache command",
586 haltOnFailure = False,
587 flunkOnFailure = False,
588 warnOnFailure = False,
591 # expire tree if needed
593 factory.addStep(FileDownload(
595 doStepIf = IsExpireRequested,
596 mastersrc = scripts_dir + '/expire.sh',
597 workerdest = "../expire.sh",
600 factory.addStep(ShellCommand(
602 description = "Checking for build tree expiry",
603 command = ["./expire.sh", str(tree_expire)],
605 haltOnFailure = True,
606 doStepIf = IsExpireRequested,
609 # cleanup.sh if needed
610 factory.addStep(FileDownload(
611 name = "dlcleanupsh",
612 mastersrc = scripts_dir + '/cleanup.sh',
613 workerdest = "../cleanup.sh",
615 doStepIf = IsCleanupRequested))
617 factory.addStep(ShellCommand(
619 description = "Cleaning previous builds",
620 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
622 haltOnFailure = True,
623 doStepIf = IsCleanupRequested,
626 factory.addStep(ShellCommand(
628 description = "Cleaning work area",
629 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
631 haltOnFailure = True,
632 doStepIf = IsCleanupRequested,
635 # Workaround bug when switching from a checked out tag back to a branch
636 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
637 factory.addStep(ShellCommand(
638 name = "gitcheckout",
639 description = "Ensure that Git HEAD is sane",
640 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
641 haltOnFailure = True))
643 # check out the source
645 # if repo doesn't exist: 'git clone repourl'
646 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
647 # 'git fetch -t repourl branch; git reset --hard revision'
651 branch = repo_branch,
653 method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
655 haltOnFailure = True,
659 factory.addStep(ShellCommand(
661 description = "Fetching Git remote refs",
662 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
667 factory.addStep(ShellCommand(
669 description = "Checking out Git tag",
670 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
671 haltOnFailure = True,
672 doStepIf = IsTaggingRequested
675 # Verify that Git HEAD points to a tag or branch
676 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
677 factory.addStep(ShellCommand(
679 description = "Ensure that Git HEAD is pointing to a branch or tag",
680 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
681 haltOnFailure = True))
683 factory.addStep(ShellCommand(
685 description = "Remove tmp folder",
686 command=["rm", "-rf", "tmp/"]))
689 factory.addStep(ShellCommand(
690 name = "rmfeedlinks",
691 description = "Remove feed symlinks",
692 command=["rm", "-rf", "package/feeds/"]))
694 factory.addStep(StringDownload(
696 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
697 workerdest = "../ccache_cc.sh",
701 factory.addStep(StringDownload(
703 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
704 workerdest = "../ccache_cxx.sh",
709 factory.addStep(ShellCommand(
710 name = "updatefeeds",
711 description = "Updating feeds",
712 command=["./scripts/feeds", "update"],
713 env = MakeEnv(tryccache=True),
714 haltOnFailure = True,
719 factory.addStep(ShellCommand(
720 name = "installfeeds",
721 description = "Installing feeds",
722 command=["./scripts/feeds", "install", "-a"],
723 env = MakeEnv(tryccache=True),
728 if config_seed is not None:
729 factory.addStep(StringDownload(
730 name = "dlconfigseed",
731 s = config_seed + '\n',
732 workerdest = ".config",
737 factory.addStep(ShellCommand(
739 description = "Seeding .config",
740 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
743 factory.addStep(ShellCommand(
745 description = "Removing output directory",
746 command = ["rm", "-rf", "bin/"]
749 factory.addStep(ShellCommand(
751 description = "Populating .config",
752 command = ["make", "defconfig"],
757 factory.addStep(ShellCommand(
759 description = "Checking architecture",
760 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
768 factory.addStep(SetPropertyFromCommand(
771 description = "Finding libc suffix",
772 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
775 if usign_key is not None:
776 factory.addStep(StringDownload(
777 name = "dlkeybuildpub",
778 s = UsignSec2Pub(usign_key, usign_comment),
779 workerdest = "key-build.pub",
783 factory.addStep(StringDownload(
785 s = "# fake private key",
786 workerdest = "key-build",
790 factory.addStep(StringDownload(
791 name = "dlkeybuilducert",
792 s = "# fake certificate",
793 workerdest = "key-build.ucert",
798 factory.addStep(ShellCommand(
800 description = "Preparing dl/",
801 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
807 factory.addStep(ShellCommand(
809 description = "Building and installing GNU tar",
810 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
811 env = MakeEnv(tryccache=True),
816 factory.addStep(ShellCommand(
818 description = "Populating dl/",
819 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
822 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
825 factory.addStep(ShellCommand(
827 description = "Cleaning base-files",
828 command=["make", "package/base-files/clean", "V=s"]
832 factory.addStep(ShellCommand(
834 description = "Building and installing tools",
835 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
836 env = MakeEnv(tryccache=True),
840 factory.addStep(ShellCommand(
842 description = "Building and installing toolchain",
843 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
848 factory.addStep(ShellCommand(
850 description = "Building kmods",
851 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
856 # find kernel version
857 factory.addStep(SetPropertyFromCommand(
858 name = "kernelversion",
859 property = "kernelversion",
860 description = "Finding the effective Kernel version",
861 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
862 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
865 factory.addStep(ShellCommand(
867 description = "Cleaning up package build",
868 command=["make", "package/cleanup", "V=s"]
871 factory.addStep(ShellCommand(
873 description = "Building packages",
874 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
879 factory.addStep(ShellCommand(
881 description = "Installing packages",
882 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
887 factory.addStep(ShellCommand(
889 description = "Indexing packages",
890 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
895 factory.addStep(ShellCommand(
897 description = "Building and installing images",
898 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
903 factory.addStep(ShellCommand(
905 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
906 command = "make -j1 buildinfo V=s || true",
911 factory.addStep(ShellCommand(
912 name = "json_overview_image_info",
913 description = "Generate profiles.json in target folder",
914 command = "make -j1 json_overview_image_info V=s || true",
919 factory.addStep(ShellCommand(
921 description = "Calculating checksums",
922 command=["make", "-j1", "checksum", "V=s"],
927 if enable_kmod_archive:
928 factory.addStep(ShellCommand(
930 description = "Creating kmod directory",
931 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
935 factory.addStep(ShellCommand(
936 name = "kmodprepare",
937 description = "Preparing kmod archive",
938 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
939 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
940 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
944 factory.addStep(ShellCommand(
946 description = "Indexing kmod archive",
947 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
948 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
954 if ini.has_option("gpg", "key") or usign_key is not None:
955 factory.addStep(MasterShellCommand(
956 name = "signprepare",
957 description = "Preparing temporary signing directory",
958 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
962 factory.addStep(ShellCommand(
964 description = "Packing files to sign",
965 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
969 factory.addStep(FileUpload(
970 workersrc = "sign.tar.gz",
971 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
975 factory.addStep(MasterShellCommand(
977 description = "Signing files",
978 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
979 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
983 factory.addStep(FileDownload(
984 name = "dlsigntargz",
985 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
986 workerdest = "sign.tar.gz",
990 factory.addStep(ShellCommand(
992 description = "Unpacking signed files",
993 command = ["tar", "-xzf", "sign.tar.gz"],
998 factory.addStep(ShellCommand(
1000 description = "Preparing upload directory structure",
1001 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1002 haltOnFailure = True
1005 factory.addStep(ShellCommand(
1006 name = "linkprepare",
1007 description = "Preparing repository symlink",
1008 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1009 doStepIf = IsNoMasterBuild,
1010 haltOnFailure = True
1013 if enable_kmod_archive:
1014 factory.addStep(ShellCommand(
1015 name = "kmoddirprepare",
1016 description = "Preparing kmod archive upload directory",
1017 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1018 haltOnFailure = True
1021 factory.addStep(ShellCommand(
1023 description = "Uploading directory structure",
1024 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1025 env={'RSYNC_PASSWORD': rsync_bin_key},
1026 haltOnFailure = True,
1031 # download remote sha256sums to 'target-sha256sums'
1032 factory.addStep(ShellCommand(
1033 name = "target-sha256sums",
1034 description = "Fetching remote sha256sums for target",
1035 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1036 env={'RSYNC_PASSWORD': rsync_bin_key},
1038 haltOnFailure = False,
1039 flunkOnFailure = False,
1040 warnOnFailure = False,
1043 # build list of files to upload
1044 factory.addStep(FileDownload(
1045 name = "dlsha2rsyncpl",
1046 mastersrc = scripts_dir + '/sha2rsync.pl',
1047 workerdest = "../sha2rsync.pl",
1051 factory.addStep(ShellCommand(
1053 description = "Building list of files to upload",
1054 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1055 haltOnFailure = True,
1058 factory.addStep(FileDownload(
1059 name = "dlrsync.sh",
1060 mastersrc = scripts_dir + '/rsync.sh',
1061 workerdest = "../rsync.sh",
1065 # upload new files and update existing ones
1066 factory.addStep(ShellCommand(
1067 name = "targetupload",
1068 description = "Uploading target files",
1069 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1070 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1071 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1072 env={'RSYNC_PASSWORD': rsync_bin_key},
1073 haltOnFailure = True,
1077 # delete files which don't exist locally
1078 factory.addStep(ShellCommand(
1079 name = "targetprune",
1080 description = "Pruning target files",
1081 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1082 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1083 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1084 env={'RSYNC_PASSWORD': rsync_bin_key},
1085 haltOnFailure = True,
1090 if enable_kmod_archive:
1091 factory.addStep(ShellCommand(
1092 name = "kmodupload",
1093 description = "Uploading kmod archive",
1094 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1095 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1096 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1097 env={'RSYNC_PASSWORD': rsync_bin_key},
1098 haltOnFailure = True,
1103 if rsync_src_url is not None:
1104 factory.addStep(ShellCommand(
1105 name = "sourcelist",
1106 description = "Finding source archives to upload",
1107 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1108 haltOnFailure = True
1111 factory.addStep(ShellCommand(
1112 name = "sourceupload",
1113 description = "Uploading source archives",
1114 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1115 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1116 env={'RSYNC_PASSWORD': rsync_src_key},
1117 haltOnFailure = True,
1122 factory.addStep(ShellCommand(
1124 description = "Reporting disk usage",
1125 command=["df", "-h", "."],
1126 env={'LC_ALL': 'C'},
1127 haltOnFailure = False,
1128 flunkOnFailure = False,
1129 warnOnFailure = False,
1133 factory.addStep(ShellCommand(
1135 description = "Reporting estimated file space usage",
1136 command=["du", "-sh", "."],
1137 env={'LC_ALL': 'C'},
1138 haltOnFailure = False,
1139 flunkOnFailure = False,
1140 warnOnFailure = False,
1144 factory.addStep(ShellCommand(
1145 name = "ccachestat",
1146 description = "Reporting ccache stats",
1147 command=["ccache", "-s"],
1148 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1149 want_stderr = False,
1150 haltOnFailure = False,
1151 flunkOnFailure = False,
1152 warnOnFailure = False,
1156 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1158 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1159 force_factory.addStep(steps.Trigger(
1160 name = "trigger_%s" % target,
1161 description = "Triggering %s build" % target,
1162 schedulerNames = [ "trigger_%s" % target ],
1163 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1164 doStepIf = IsTargetSelected(target)
1168 ####### STATUS TARGETS
1170 # 'status' is a list of Status Targets. The results of each build will be
1171 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1172 # including web pages, email senders, and IRC bots.
1174 if "status_bind" in inip1:
1176 'port': inip1.get("status_bind"),
1178 'waterfall_view': True,
1179 'console_view': True,
1184 if "status_user" in inip1 and "status_password" in inip1:
1185 c['www']['auth'] = util.UserPasswordAuth([
1186 (inip1.get("status_user"), inip1.get("status_password"))
1188 c['www']['authz'] = util.Authz(
1189 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1190 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1194 if ini.has_section("irc"):
1196 irc_host = iniirc.get("host", None)
1197 irc_port = iniirc.getint("port", 6667)
1198 irc_chan = iniirc.get("channel", None)
1199 irc_nick = iniirc.get("nickname", None)
1200 irc_pass = iniirc.get("password", None)
1202 if irc_host and irc_nick and irc_chan:
1203 irc = reporters.IRC(irc_host, irc_nick,
1205 password = irc_pass,
1206 channels = [ irc_chan ],
1207 notify_events = [ 'exception', 'problem', 'recovery' ]
1210 c['services'].append(irc)
1212 c['revlink'] = util.RevlinkMatch([
1213 r'https://git.openwrt.org/openwrt/(.*).git'
1215 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1220 # This specifies what database buildbot uses to store its state. You can leave
1221 # this at its default for all but the largest installations.
1222 'db_url' : "sqlite:///state.sqlite",
1225 c['buildbotNetUsageData'] = None