2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
114 c['protocols'] = {'pb': {'port': worker_port}}
117 c['collapseRequests'] = True
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
125 ####### CHANGESOURCES
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
153 repo_url = ini.get("repo", "url")
154 repo_branch = "master"
156 if ini.has_option("repo", "branch"):
157 repo_branch = ini.get("repo", "branch")
160 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
162 if ini.has_option("usign", "key"):
163 usign_key = ini.get("usign", "key")
165 if ini.has_option("usign", "comment"):
166 usign_comment = ini.get("usign", "comment")
173 if not os.path.isdir(work_dir+'/source.git'):
174 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
176 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
178 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
179 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
180 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
183 line = findarches.stdout.readline()
186 at = line.decode().strip().split()
188 archnames.append(at[0])
193 feedbranches = dict()
195 c['change_source'] = []
197 def parse_feed_entry(line):
198 parts = line.strip().split()
199 if parts[0].startswith("src-git"):
201 url = parts[2].strip().split(';')
202 branch = url[1] if len(url) > 1 else 'master'
203 feedbranches[url[0]] = branch
204 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
206 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
207 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
209 line = make.stdout.readline()
211 parse_feed_entry(str(line, 'utf-8'))
213 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
215 parse_feed_entry(line)
217 if len(c['change_source']) == 0:
218 log.err("FATAL ERROR: no change_sources defined, aborting!")
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
227 c['schedulers'].append(SingleBranchScheduler(
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
232 treeStableTimer = 60,
233 builderNames = archnames))
235 c['schedulers'].append(ForceScheduler(
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
242 util.CodebaseParameter(
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
252 reason = util.StringParameter(
255 default = "Trigger build",
261 util.NestedParameter(
263 label="Build Options",
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
270 choices = [ "all" ] + archnames
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
284 def GetDirectorySuffix(props):
285 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
286 if props.hasProperty("release_version"):
287 m = verpat.match(props["release_version"])
289 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
293 def GetNumJobs(props):
294 if props.hasProperty("workername") and props.hasProperty("nproc"):
295 return str(int(props["nproc"]) / max_builds[props["workername"]])
301 if props.hasProperty("builddir"):
302 return props["builddir"]
303 elif props.hasProperty("workdir"):
304 return props["workdir"]
308 def IsArchitectureSelected(target):
309 def CheckArchitectureProperty(step):
311 options = step.getProperty("options")
312 if type(options) is dict:
313 selected_arch = options.get("architecture", "all")
314 if selected_arch != "all" and selected_arch != target:
321 return CheckArchitectureProperty
323 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
325 seckey = base64.b64decode(seckey)
329 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
330 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
332 def IsSharedWorkdir(step):
333 return bool(step.getProperty("shared_wd"))
335 @defer.inlineCallbacks
336 def getNewestCompleteTime(bldr):
337 """Returns the complete_at of the latest completed and not SKIPPED
338 build request for this builder, or None if there are no such build
339 requests. We need to filter out SKIPPED requests because we're
340 using collapseRequests=True which is unfortunately marking all
341 previous requests as complete when new buildset is created.
343 @returns: datetime instance or None, via Deferred
346 bldrid = yield bldr.getBuilderId()
347 completed = yield bldr.master.data.get(
348 ('builders', bldrid, 'buildrequests'),
350 resultspec.Filter('complete', 'eq', [True]),
351 resultspec.Filter('results', 'ne', [results.SKIPPED]),
353 order=['-complete_at'], limit=1)
357 complete_at = completed[0]['complete_at']
359 last_build = yield bldr.master.data.get(
362 resultspec.Filter('builderid', 'eq', [bldrid]),
364 order=['-started_at'], limit=1)
366 if last_build and last_build[0]:
367 last_complete_at = last_build[0]['complete_at']
368 if last_complete_at and (last_complete_at > complete_at):
369 return last_complete_at
373 @defer.inlineCallbacks
374 def prioritizeBuilders(master, builders):
375 """Returns sorted list of builders by their last timestamp of completed and
378 @returns: list of sorted builders
381 def is_building(bldr):
382 return bool(bldr.building) or bool(bldr.old_building)
385 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
386 d.addCallback(lambda complete_at: (complete_at, bldr))
390 (complete_at, bldr) = item
394 complete_at = date.replace(tzinfo=tzutc())
396 if is_building(bldr):
398 complete_at = date.replace(tzinfo=tzutc())
400 return (complete_at, bldr.name)
402 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
403 results.sort(key=bldr_sort)
406 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
408 return [r[1] for r in results]
410 c['prioritizeBuilders'] = prioritizeBuilders
413 dlLock = locks.WorkerLock("worker_dl")
417 for worker in c['workers']:
418 workerNames.append(worker.workername)
420 force_factory = BuildFactory()
422 c['builders'].append(BuilderConfig(
423 name = "00_force_build",
424 workernames = workerNames,
425 factory = force_factory))
428 ts = arch[1].split('/')
430 factory = BuildFactory()
432 # setup shared work directory if required
433 factory.addStep(ShellCommand(
435 description = "Setting up shared work directory",
436 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
438 haltOnFailure = True,
439 doStepIf = IsSharedWorkdir))
441 # find number of cores
442 factory.addStep(SetPropertyFromCommand(
445 description = "Finding number of CPUs",
446 command = ["nproc"]))
449 factory.addStep(FileDownload(
450 mastersrc = scripts_dir + '/cleanup.sh',
451 workerdest = "../cleanup.sh",
455 factory.addStep(ShellCommand(
457 description = "Cleaning previous builds",
458 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
460 haltOnFailure = True,
463 factory.addStep(ShellCommand(
465 description = "Cleaning work area",
466 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
468 haltOnFailure = True,
471 # expire tree if needed
472 elif tree_expire > 0:
473 factory.addStep(FileDownload(
474 mastersrc = scripts_dir + '/expire.sh',
475 workerdest = "../expire.sh",
478 factory.addStep(ShellCommand(
480 description = "Checking for build tree expiry",
481 command = ["./expire.sh", str(tree_expire)],
483 haltOnFailure = True,
486 factory.addStep(ShellCommand(
488 description = "Preparing SDK directory",
489 command = ["mkdir", "-p", "sdk"],
490 haltOnFailure = True))
492 factory.addStep(ShellCommand(
493 name = "downloadsdk",
494 description = "Downloading SDK archive",
495 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
496 env={'RSYNC_PASSWORD': rsync_sdk_key},
497 haltOnFailure = True,
500 factory.addStep(ShellCommand(
502 description = "Unpacking SDK archive",
503 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
504 haltOnFailure = True))
506 factory.addStep(ShellCommand(
508 description = "Updating SDK",
509 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
510 haltOnFailure = True))
512 factory.addStep(ShellCommand(
513 name = "cleancmdlinks",
514 description = "Sanitizing host command symlinks",
515 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
516 haltOnFailure = True))
518 factory.addStep(StringDownload(
519 name = "writeversionmk",
520 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
521 workerdest = "sdk/getversion.mk",
524 factory.addStep(SetPropertyFromCommand(
526 property = "release_version",
527 description = "Finding SDK release version",
528 workdir = "build/sdk",
529 command = ["make", "-f", "getversion.mk"]))
532 if usign_key is not None:
533 factory.addStep(StringDownload(
534 name = "dlkeybuildpub",
535 s = UsignSec2Pub(usign_key, usign_comment),
536 workerdest = "sdk/key-build.pub",
539 factory.addStep(StringDownload(
541 s = "# fake private key",
542 workerdest = "sdk/key-build",
545 factory.addStep(StringDownload(
546 name = "dlkeybuilducert",
547 s = "# fake certificate",
548 workerdest = "sdk/key-build.ucert",
551 factory.addStep(ShellCommand(
553 description = "Preparing download directory",
554 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
555 haltOnFailure = True))
557 factory.addStep(ShellCommand(
559 description = "Preparing SDK configuration",
560 workdir = "build/sdk",
561 command = ["sh", "-c", "rm -f .config && make defconfig"]))
563 factory.addStep(FileDownload(
564 mastersrc = scripts_dir + '/ccache.sh',
565 workerdest = 'sdk/ccache.sh',
568 factory.addStep(ShellCommand(
570 description = "Preparing ccache",
571 workdir = "build/sdk",
572 command = ["./ccache.sh"],
573 haltOnFailure = True))
575 factory.addStep(ShellCommand(
576 name = "patchfeedsconfgitfull",
577 description = "Patching feeds.conf to use src-git-full",
578 workdir = "build/sdk",
579 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
580 haltOnFailure = True))
583 factory.addStep(StringDownload(
584 name = "dlgitclonekey",
586 workerdest = "../git-clone.key",
589 factory.addStep(ShellCommand(
590 name = "patchfeedsconf",
591 description = "Patching feeds.conf to use SSH cloning",
592 workdir = "build/sdk",
593 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
594 haltOnFailure = True))
596 factory.addStep(ShellCommand(
597 name = "updatefeeds",
598 description = "Updating feeds",
599 workdir = "build/sdk",
600 command = ["./scripts/feeds", "update", "-f"],
601 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
602 haltOnFailure = True))
605 factory.addStep(ShellCommand(
606 name = "rmfeedsconf",
607 description = "Removing feeds.conf",
608 workdir = "build/sdk",
609 command=["rm", "feeds.conf"],
610 haltOnFailure = True))
612 factory.addStep(ShellCommand(
613 name = "installfeeds",
614 description = "Installing feeds",
615 workdir = "build/sdk",
616 command = ["./scripts/feeds", "install", "-a"],
617 haltOnFailure = True))
619 factory.addStep(ShellCommand(
621 description = "Clearing failure logs",
622 workdir = "build/sdk",
623 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
624 haltOnFailure = False,
625 flunkOnFailure = False,
626 warnOnFailure = True,
629 factory.addStep(ShellCommand(
631 description = "Building packages",
632 workdir = "build/sdk",
634 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
635 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
636 haltOnFailure = True))
638 factory.addStep(ShellCommand(
639 name = "mkfeedsconf",
640 description = "Generating pinned feeds.conf",
641 workdir = "build/sdk",
642 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
644 if ini.has_option("gpg", "key") or usign_key is not None:
645 factory.addStep(MasterShellCommand(
646 name = "signprepare",
647 description = "Preparing temporary signing directory",
648 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
652 factory.addStep(ShellCommand(
654 description = "Packing files to sign",
655 workdir = "build/sdk",
656 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
660 factory.addStep(FileUpload(
661 workersrc = "sdk/sign.tar.gz",
662 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
666 factory.addStep(MasterShellCommand(
668 description = "Signing files",
669 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
670 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
674 factory.addStep(FileDownload(
675 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
676 workerdest = "sdk/sign.tar.gz",
680 factory.addStep(ShellCommand(
682 description = "Unpacking signed files",
683 workdir = "build/sdk",
684 command = ["tar", "-xzf", "sign.tar.gz"],
688 factory.addStep(ShellCommand(
689 name = "uploadprepare",
690 description = "Preparing package directory",
691 workdir = "build/sdk",
692 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
693 env={'RSYNC_PASSWORD': rsync_bin_key},
694 haltOnFailure = True,
698 factory.addStep(ShellCommand(
699 name = "packageupload",
700 description = "Uploading package files",
701 workdir = "build/sdk",
702 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
703 env={'RSYNC_PASSWORD': rsync_bin_key},
704 haltOnFailure = True,
708 factory.addStep(ShellCommand(
710 description = "Preparing log directory",
711 workdir = "build/sdk",
712 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
713 env={'RSYNC_PASSWORD': rsync_bin_key},
714 haltOnFailure = True,
718 factory.addStep(ShellCommand(
720 description = "Finding failure logs",
721 workdir = "build/sdk/logs/package/feeds",
722 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
723 haltOnFailure = False,
724 flunkOnFailure = False,
725 warnOnFailure = True,
728 factory.addStep(ShellCommand(
730 description = "Collecting failure logs",
731 workdir = "build/sdk",
732 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
733 haltOnFailure = False,
734 flunkOnFailure = False,
735 warnOnFailure = True,
738 factory.addStep(ShellCommand(
740 description = "Uploading failure logs",
741 workdir = "build/sdk",
742 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
743 env={'RSYNC_PASSWORD': rsync_bin_key},
744 haltOnFailure = False,
745 flunkOnFailure = False,
746 warnOnFailure = True,
750 if rsync_src_url is not None:
751 factory.addStep(ShellCommand(
753 description = "Finding source archives to upload",
754 workdir = "build/sdk",
755 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
759 factory.addStep(ShellCommand(
760 name = "sourceupload",
761 description = "Uploading source archives",
762 workdir = "build/sdk",
763 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
764 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
765 env={'RSYNC_PASSWORD': rsync_src_key},
766 haltOnFailure = False,
767 flunkOnFailure = False,
768 warnOnFailure = True,
772 factory.addStep(ShellCommand(
774 description = "Reporting disk usage",
775 command=["df", "-h", "."],
777 haltOnFailure = False,
778 flunkOnFailure = False,
779 warnOnFailure = False,
783 factory.addStep(ShellCommand(
785 description = "Reporting estimated file space usage",
786 command=["du", "-sh", "."],
788 haltOnFailure = False,
789 flunkOnFailure = False,
790 warnOnFailure = False,
794 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
796 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
797 force_factory.addStep(steps.Trigger(
798 name = "trigger_%s" % arch[0],
799 description = "Triggering %s build" % arch[0],
800 schedulerNames = [ "trigger_%s" % arch[0] ],
801 set_properties = { "reason": Property("reason") },
802 doStepIf = IsArchitectureSelected(arch[0])
805 ####### STATUS arches
807 # 'status' is a list of Status arches. The results of each build will be
808 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
809 # including web pages, email senders, and IRC bots.
811 if ini.has_option("phase2", "status_bind"):
813 'port': ini.get("phase2", "status_bind"),
815 'waterfall_view': True,
816 'console_view': True,
821 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
822 c['www']['auth'] = util.UserPasswordAuth([
823 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
825 c['www']['authz'] = util.Authz(
826 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
827 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
830 ####### PROJECT IDENTITY
832 # the 'title' string will appear at the top of this buildbot
833 # installation's html.WebStatus home page (linked to the
834 # 'titleURL') and is embedded in the title of the waterfall HTML page.
836 c['title'] = ini.get("general", "title")
837 c['titleURL'] = ini.get("general", "title_url")
839 # the 'buildbotURL' string should point to the location where the buildbot's
840 # internal web server (usually the html.WebStatus page) is visible. This
841 # typically uses the port number set in the Waterfall 'status' entry, but
842 # with an externally-visible host name which the buildbot cannot figure out
845 c['buildbotURL'] = buildbot_url
850 # This specifies what database buildbot uses to store its state. You can leave
851 # this at its default for all but the largest installations.
852 'db_url' : "sqlite:///state.sqlite",
855 c['buildbotNetUsageData'] = None