2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
114 c['protocols'] = {'pb': {'port': worker_port}}
117 c['collapseRequests'] = True
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
125 ####### CHANGESOURCES
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
153 rsync_defopts = ["-4", "-v", "--timeout=120"]
155 repo_url = ini.get("repo", "url")
156 repo_branch = "master"
158 if ini.has_option("repo", "branch"):
159 repo_branch = ini.get("repo", "branch")
162 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
164 if ini.has_option("usign", "key"):
165 usign_key = ini.get("usign", "key")
167 if ini.has_option("usign", "comment"):
168 usign_comment = ini.get("usign", "comment")
175 if not os.path.isdir(work_dir+'/source.git'):
176 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
178 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
180 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
181 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
182 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
185 line = findarches.stdout.readline()
188 at = line.decode().strip().split()
190 archnames.append(at[0])
195 feedbranches = dict()
197 c['change_source'] = []
199 def parse_feed_entry(line):
200 parts = line.strip().split()
201 if parts[0].startswith("src-git"):
203 url = parts[2].strip().split(';')
204 branch = url[1] if len(url) > 1 else 'master'
205 feedbranches[url[0]] = branch
206 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
208 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
209 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
211 line = make.stdout.readline()
213 parse_feed_entry(str(line, 'utf-8'))
215 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
217 parse_feed_entry(line)
219 if len(c['change_source']) == 0:
220 log.err("FATAL ERROR: no change_sources defined, aborting!")
225 # Configure the Schedulers, which decide how to react to incoming changes. In this
226 # case, just kick off a 'basebuild' build
229 c['schedulers'].append(SingleBranchScheduler(
231 change_filter = filter.ChangeFilter(
232 filter_fn = lambda change: change.branch == feedbranches[change.repository]
234 treeStableTimer = 60,
235 builderNames = archnames))
237 c['schedulers'].append(ForceScheduler(
239 buttonName = "Force builds",
240 label = "Force build details",
241 builderNames = [ "00_force_build" ],
244 util.CodebaseParameter(
246 label = "Repository",
247 branch = util.FixedParameter(name = "branch", default = ""),
248 revision = util.FixedParameter(name = "revision", default = ""),
249 repository = util.FixedParameter(name = "repository", default = ""),
250 project = util.FixedParameter(name = "project", default = "")
254 reason = util.StringParameter(
257 default = "Trigger build",
263 util.NestedParameter(
265 label="Build Options",
268 util.ChoiceStringParameter(
269 name = "architecture",
270 label = "Build architecture",
272 choices = [ "all" ] + archnames
281 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
282 # what steps, and which workers can execute them. Note that any particular build will
283 # only take place on one worker.
286 def GetDirectorySuffix(props):
287 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
288 if props.hasProperty("release_version"):
289 m = verpat.match(props["release_version"])
291 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
295 def GetNumJobs(props):
296 if props.hasProperty("workername") and props.hasProperty("nproc"):
297 return str(int(props["nproc"]) / max_builds[props["workername"]])
303 if props.hasProperty("builddir"):
304 return props["builddir"]
305 elif props.hasProperty("workdir"):
306 return props["workdir"]
310 def IsArchitectureSelected(target):
311 def CheckArchitectureProperty(step):
313 options = step.getProperty("options")
314 if type(options) is dict:
315 selected_arch = options.get("architecture", "all")
316 if selected_arch != "all" and selected_arch != target:
323 return CheckArchitectureProperty
325 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
327 seckey = base64.b64decode(seckey)
331 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
332 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
334 def IsSharedWorkdir(step):
335 return bool(step.getProperty("shared_wd"))
337 @defer.inlineCallbacks
338 def getNewestCompleteTime(bldr):
339 """Returns the complete_at of the latest completed and not SKIPPED
340 build request for this builder, or None if there are no such build
341 requests. We need to filter out SKIPPED requests because we're
342 using collapseRequests=True which is unfortunately marking all
343 previous requests as complete when new buildset is created.
345 @returns: datetime instance or None, via Deferred
348 bldrid = yield bldr.getBuilderId()
349 completed = yield bldr.master.data.get(
350 ('builders', bldrid, 'buildrequests'),
352 resultspec.Filter('complete', 'eq', [True]),
353 resultspec.Filter('results', 'ne', [results.SKIPPED]),
355 order=['-complete_at'], limit=1)
359 complete_at = completed[0]['complete_at']
361 last_build = yield bldr.master.data.get(
364 resultspec.Filter('builderid', 'eq', [bldrid]),
366 order=['-started_at'], limit=1)
368 if last_build and last_build[0]:
369 last_complete_at = last_build[0]['complete_at']
370 if last_complete_at and (last_complete_at > complete_at):
371 return last_complete_at
375 @defer.inlineCallbacks
376 def prioritizeBuilders(master, builders):
377 """Returns sorted list of builders by their last timestamp of completed and
380 @returns: list of sorted builders
383 def is_building(bldr):
384 return bool(bldr.building) or bool(bldr.old_building)
387 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
388 d.addCallback(lambda complete_at: (complete_at, bldr))
392 (complete_at, bldr) = item
396 complete_at = date.replace(tzinfo=tzutc())
398 if is_building(bldr):
400 complete_at = date.replace(tzinfo=tzutc())
402 return (complete_at, bldr.name)
404 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
405 results.sort(key=bldr_sort)
408 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
410 return [r[1] for r in results]
412 c['prioritizeBuilders'] = prioritizeBuilders
415 dlLock = locks.WorkerLock("worker_dl")
419 for worker in c['workers']:
420 workerNames.append(worker.workername)
422 force_factory = BuildFactory()
424 c['builders'].append(BuilderConfig(
425 name = "00_force_build",
426 workernames = workerNames,
427 factory = force_factory))
430 ts = arch[1].split('/')
432 factory = BuildFactory()
434 # setup shared work directory if required
435 factory.addStep(ShellCommand(
437 description = "Setting up shared work directory",
438 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
440 haltOnFailure = True,
441 doStepIf = IsSharedWorkdir))
443 # find number of cores
444 factory.addStep(SetPropertyFromCommand(
447 description = "Finding number of CPUs",
448 command = ["nproc"]))
451 factory.addStep(FileDownload(
452 mastersrc = scripts_dir + '/cleanup.sh',
453 workerdest = "../cleanup.sh",
457 factory.addStep(ShellCommand(
459 description = "Cleaning previous builds",
460 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
462 haltOnFailure = True,
465 factory.addStep(ShellCommand(
467 description = "Cleaning work area",
468 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
470 haltOnFailure = True,
473 # expire tree if needed
474 elif tree_expire > 0:
475 factory.addStep(FileDownload(
476 mastersrc = scripts_dir + '/expire.sh',
477 workerdest = "../expire.sh",
480 factory.addStep(ShellCommand(
482 description = "Checking for build tree expiry",
483 command = ["./expire.sh", str(tree_expire)],
485 haltOnFailure = True,
488 factory.addStep(ShellCommand(
490 description = "Preparing SDK directory",
491 command = ["mkdir", "-p", "sdk"],
492 haltOnFailure = True))
494 factory.addStep(ShellCommand(
495 name = "downloadsdk",
496 description = "Downloading SDK archive",
497 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
498 env={'RSYNC_PASSWORD': rsync_sdk_key},
499 haltOnFailure = True,
502 factory.addStep(ShellCommand(
504 description = "Unpacking SDK archive",
505 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
506 haltOnFailure = True))
508 factory.addStep(ShellCommand(
510 description = "Updating SDK",
511 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
512 haltOnFailure = True))
514 factory.addStep(ShellCommand(
515 name = "cleancmdlinks",
516 description = "Sanitizing host command symlinks",
517 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
518 haltOnFailure = True))
520 factory.addStep(StringDownload(
521 name = "writeversionmk",
522 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
523 workerdest = "sdk/getversion.mk",
526 factory.addStep(SetPropertyFromCommand(
528 property = "release_version",
529 description = "Finding SDK release version",
530 workdir = "build/sdk",
531 command = ["make", "-f", "getversion.mk"]))
534 if usign_key is not None:
535 factory.addStep(StringDownload(
536 name = "dlkeybuildpub",
537 s = UsignSec2Pub(usign_key, usign_comment),
538 workerdest = "sdk/key-build.pub",
541 factory.addStep(StringDownload(
543 s = "# fake private key",
544 workerdest = "sdk/key-build",
547 factory.addStep(StringDownload(
548 name = "dlkeybuilducert",
549 s = "# fake certificate",
550 workerdest = "sdk/key-build.ucert",
553 factory.addStep(ShellCommand(
555 description = "Preparing download directory",
556 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
557 haltOnFailure = True))
559 factory.addStep(ShellCommand(
561 description = "Preparing SDK configuration",
562 workdir = "build/sdk",
563 command = ["sh", "-c", "rm -f .config && make defconfig"]))
565 factory.addStep(FileDownload(
566 mastersrc = scripts_dir + '/ccache.sh',
567 workerdest = 'sdk/ccache.sh',
570 factory.addStep(ShellCommand(
572 description = "Preparing ccache",
573 workdir = "build/sdk",
574 command = ["./ccache.sh"],
575 haltOnFailure = True))
578 factory.addStep(StringDownload(
579 name = "dlgitclonekey",
581 workerdest = "../git-clone.key",
584 factory.addStep(ShellCommand(
585 name = "patchfeedsconf",
586 description = "Patching feeds.conf",
587 workdir = "build/sdk",
588 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
589 haltOnFailure = True))
591 factory.addStep(ShellCommand(
592 name = "updatefeeds",
593 description = "Updating feeds",
594 workdir = "build/sdk",
595 command = ["./scripts/feeds", "update", "-f"],
596 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
597 haltOnFailure = True))
600 factory.addStep(ShellCommand(
601 name = "rmfeedsconf",
602 description = "Removing feeds.conf",
603 workdir = "build/sdk",
604 command=["rm", "feeds.conf"],
605 haltOnFailure = True))
607 factory.addStep(ShellCommand(
608 name = "installfeeds",
609 description = "Installing feeds",
610 workdir = "build/sdk",
611 command = ["./scripts/feeds", "install", "-a"],
612 haltOnFailure = True))
614 factory.addStep(ShellCommand(
616 description = "Clearing failure logs",
617 workdir = "build/sdk",
618 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
619 haltOnFailure = False,
620 flunkOnFailure = False,
621 warnOnFailure = True,
624 factory.addStep(ShellCommand(
626 description = "Building packages",
627 workdir = "build/sdk",
629 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
630 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
631 haltOnFailure = True))
633 factory.addStep(ShellCommand(
634 name = "mkfeedsconf",
635 description = "Generating pinned feeds.conf",
636 workdir = "build/sdk",
637 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
639 if ini.has_option("gpg", "key") or usign_key is not None:
640 factory.addStep(MasterShellCommand(
641 name = "signprepare",
642 description = "Preparing temporary signing directory",
643 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
647 factory.addStep(ShellCommand(
649 description = "Packing files to sign",
650 workdir = "build/sdk",
651 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
655 factory.addStep(FileUpload(
656 workersrc = "sdk/sign.tar.gz",
657 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
661 factory.addStep(MasterShellCommand(
663 description = "Signing files",
664 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
665 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
669 factory.addStep(FileDownload(
670 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
671 workerdest = "sdk/sign.tar.gz",
675 factory.addStep(ShellCommand(
677 description = "Unpacking signed files",
678 workdir = "build/sdk",
679 command = ["tar", "-xzf", "sign.tar.gz"],
683 factory.addStep(FileDownload(
685 mastersrc = scripts_dir + "/rsync.sh",
686 workerdest = "../rsync.sh",
690 factory.addStep(ShellCommand(
691 name = "uploadprepare",
692 description = "Preparing package directory",
693 workdir = "build/sdk",
694 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
695 env={'RSYNC_PASSWORD': rsync_bin_key},
696 haltOnFailure = True,
700 factory.addStep(ShellCommand(
701 name = "packageupload",
702 description = "Uploading package files",
703 workdir = "build/sdk",
704 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
705 env={'RSYNC_PASSWORD': rsync_bin_key},
706 haltOnFailure = True,
710 factory.addStep(ShellCommand(
712 description = "Preparing log directory",
713 workdir = "build/sdk",
714 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
715 env={'RSYNC_PASSWORD': rsync_bin_key},
716 haltOnFailure = True,
720 factory.addStep(ShellCommand(
722 description = "Finding failure logs",
723 workdir = "build/sdk/logs/package/feeds",
724 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
725 haltOnFailure = False,
726 flunkOnFailure = False,
727 warnOnFailure = True,
730 factory.addStep(ShellCommand(
732 description = "Collecting failure logs",
733 workdir = "build/sdk",
734 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
735 haltOnFailure = False,
736 flunkOnFailure = False,
737 warnOnFailure = True,
740 factory.addStep(ShellCommand(
742 description = "Uploading failure logs",
743 workdir = "build/sdk",
744 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
745 env={'RSYNC_PASSWORD': rsync_bin_key},
746 haltOnFailure = False,
747 flunkOnFailure = False,
748 warnOnFailure = True,
752 if rsync_src_url is not None:
753 factory.addStep(ShellCommand(
755 description = "Finding source archives to upload",
756 workdir = "build/sdk",
757 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
761 factory.addStep(ShellCommand(
762 name = "sourceupload",
763 description = "Uploading source archives",
764 workdir = "build/sdk",
765 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--checksum", "--delay-updates",
766 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
767 env={'RSYNC_PASSWORD': rsync_src_key},
768 haltOnFailure = False,
769 flunkOnFailure = False,
770 warnOnFailure = True,
774 factory.addStep(ShellCommand(
776 description = "Reporting disk usage",
777 command=["df", "-h", "."],
779 haltOnFailure = False,
780 flunkOnFailure = False,
781 warnOnFailure = False,
785 factory.addStep(ShellCommand(
787 description = "Reporting estimated file space usage",
788 command=["du", "-sh", "."],
790 haltOnFailure = False,
791 flunkOnFailure = False,
792 warnOnFailure = False,
796 factory.addStep(ShellCommand(
798 description = "Reporting ccache stats",
799 command=["ccache", "-s"],
801 haltOnFailure = False,
802 flunkOnFailure = False,
803 warnOnFailure = False,
807 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
809 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
810 force_factory.addStep(steps.Trigger(
811 name = "trigger_%s" % arch[0],
812 description = "Triggering %s build" % arch[0],
813 schedulerNames = [ "trigger_%s" % arch[0] ],
814 set_properties = { "reason": Property("reason") },
815 doStepIf = IsArchitectureSelected(arch[0])
818 ####### STATUS arches
820 # 'status' is a list of Status arches. The results of each build will be
821 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
822 # including web pages, email senders, and IRC bots.
824 if ini.has_option("phase2", "status_bind"):
826 'port': ini.get("phase2", "status_bind"),
828 'waterfall_view': True,
829 'console_view': True,
834 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
835 c['www']['auth'] = util.UserPasswordAuth([
836 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
838 c['www']['authz'] = util.Authz(
839 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
840 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
843 ####### PROJECT IDENTITY
845 # the 'title' string will appear at the top of this buildbot
846 # installation's html.WebStatus home page (linked to the
847 # 'titleURL') and is embedded in the title of the waterfall HTML page.
849 c['title'] = ini.get("general", "title")
850 c['titleURL'] = ini.get("general", "title_url")
852 # the 'buildbotURL' string should point to the location where the buildbot's
853 # internal web server (usually the html.WebStatus page) is visible. This
854 # typically uses the port number set in the Waterfall 'status' entry, but
855 # with an externally-visible host name which the buildbot cannot figure out
858 c['buildbotURL'] = buildbot_url
863 # This specifies what database buildbot uses to store its state. You can leave
864 # this at its default for all but the largest installations.
865 'db_url' : "sqlite:///state.sqlite",
868 c['buildbotNetUsageData'] = None