2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47 buildbot_url = ini.get("phase2", "buildbot_url")
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
75 if ini.has_option("phase2", "other_builds"):
76 other_builds = ini.getint("phase2", "other_builds")
78 if ini.has_option("phase2", "expire"):
79 tree_expire = ini.getint("phase2", "expire")
81 if ini.has_option("general", "git_ssh"):
82 git_ssh = ini.getboolean("general", "git_ssh")
84 if ini.has_option("general", "git_ssh_key"):
85 git_ssh_key = ini.get("general", "git_ssh_key")
92 for section in ini.sections():
93 if section.startswith("worker "):
94 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
95 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
96 name = ini.get(section, "name")
97 password = ini.get(section, "password")
98 sl_props = { 'shared_wd': False }
101 if ini.has_option(section, "builds"):
102 max_builds[name] = ini.getint(section, "builds")
104 if max_builds[name] == 1:
105 sl_props['shared_wd'] = True
107 if ini.has_option(section, "shared_wd"):
108 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
109 if sl_props['shared_wd'] and (max_builds != 1):
110 raise ValueError('max_builds must be 1 with shared workdir!')
112 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
114 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
115 # This must match the value configured into the buildworkers (with their
117 c['protocols'] = {'pb': {'port': worker_port}}
120 c['collapseRequests'] = True
122 # Reduce amount of backlog data
123 c['configurators'] = [util.JanitorConfigurator(
124 logHorizon=timedelta(days=3),
128 ####### CHANGESOURCES
130 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
131 scripts_dir = os.path.abspath("../scripts")
133 rsync_bin_url = ini.get("rsync", "binary_url")
134 rsync_bin_key = ini.get("rsync", "binary_password")
139 if ini.has_option("rsync", "source_url"):
140 rsync_src_url = ini.get("rsync", "source_url")
141 rsync_src_key = ini.get("rsync", "source_password")
145 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
147 if ini.has_option("rsync", "sdk_url"):
148 rsync_sdk_url = ini.get("rsync", "sdk_url")
150 if ini.has_option("rsync", "sdk_password"):
151 rsync_sdk_key = ini.get("rsync", "sdk_password")
153 if ini.has_option("rsync", "sdk_pattern"):
154 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
156 repo_url = ini.get("repo", "url")
157 repo_branch = "master"
159 if ini.has_option("repo", "branch"):
160 repo_branch = ini.get("repo", "branch")
163 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
165 if ini.has_option("usign", "key"):
166 usign_key = ini.get("usign", "key")
168 if ini.has_option("usign", "comment"):
169 usign_comment = ini.get("usign", "comment")
176 if not os.path.isdir(work_dir+'/source.git'):
177 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
179 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
181 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
182 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
183 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
186 line = findarches.stdout.readline()
189 at = line.decode().strip().split()
191 archnames.append(at[0])
196 feedbranches = dict()
198 c['change_source'] = []
200 def parse_feed_entry(line):
201 parts = line.strip().split()
202 if parts[0].startswith("src-git"):
204 url = parts[2].strip().split(';')
205 branch = url[1] if len(url) > 1 else 'master'
206 feedbranches[url[0]] = branch
207 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
209 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
210 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
212 line = make.stdout.readline()
214 parse_feed_entry(line)
216 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
218 parse_feed_entry(line)
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
227 c['schedulers'].append(SingleBranchScheduler(
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
232 treeStableTimer = 60,
233 builderNames = archnames))
235 c['schedulers'].append(ForceScheduler(
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
242 util.CodebaseParameter(
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
252 reason = util.StringParameter(
255 default = "Trigger build",
261 util.NestedParameter(
263 label="Build Options",
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
270 choices = [ "all" ] + archnames
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
283 def GetDirectorySuffix(props):
284 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
285 if props.hasProperty("release_version"):
286 m = verpat.match(props["release_version"])
288 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
291 def GetNumJobs(props):
292 if props.hasProperty("workername") and props.hasProperty("nproc"):
293 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
298 if props.hasProperty("builddir"):
299 return props["builddir"]
300 elif props.hasProperty("workdir"):
301 return props["workdir"]
305 def IsArchitectureSelected(target):
306 def CheckArchitectureProperty(step):
308 options = step.getProperty("options")
309 if type(options) is dict:
310 selected_arch = options.get("architecture", "all")
311 if selected_arch != "all" and selected_arch != target:
318 return CheckArchitectureProperty
320 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
322 seckey = base64.b64decode(seckey)
326 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
327 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
329 def IsSharedWorkdir(step):
330 return bool(step.getProperty("shared_wd"))
332 @defer.inlineCallbacks
333 def getNewestCompleteTime(bldr):
334 """Returns the complete_at of the latest completed and not SKIPPED
335 build request for this builder, or None if there are no such build
336 requests. We need to filter out SKIPPED requests because we're
337 using collapseRequests=True which is unfortunately marking all
338 previous requests as complete when new buildset is created.
340 @returns: datetime instance or None, via Deferred
343 bldrid = yield bldr.getBuilderId()
344 completed = yield bldr.master.data.get(
345 ('builders', bldrid, 'buildrequests'),
347 resultspec.Filter('complete', 'eq', [True]),
348 resultspec.Filter('results', 'ne', [results.SKIPPED]),
350 order=['-complete_at'], limit=1)
354 complete_at = completed[0]['complete_at']
356 last_build = yield bldr.master.data.get(
359 resultspec.Filter('builderid', 'eq', [bldrid]),
361 order=['-started_at'], limit=1)
363 if last_build and last_build[0]:
364 last_complete_at = last_build[0]['complete_at']
365 if last_complete_at and (last_complete_at > complete_at):
366 return last_complete_at
370 @defer.inlineCallbacks
371 def prioritizeBuilders(master, builders):
372 """Returns sorted list of builders by their last timestamp of completed and
375 @returns: list of sorted builders
378 def is_building(bldr):
379 return bool(bldr.building) or bool(bldr.old_building)
382 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
383 d.addCallback(lambda complete_at: (complete_at, bldr))
387 (complete_at, bldr) = item
391 complete_at = date.replace(tzinfo=tzutc())
393 if is_building(bldr):
395 complete_at = date.replace(tzinfo=tzutc())
397 return (complete_at, bldr.name)
399 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
400 results.sort(key=bldr_sort)
403 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
405 return [r[1] for r in results]
407 c['prioritizeBuilders'] = prioritizeBuilders
410 dlLock = locks.WorkerLock("worker_dl")
414 for worker in c['workers']:
415 workerNames.append(worker.workername)
417 force_factory = BuildFactory()
419 c['builders'].append(BuilderConfig(
420 name = "00_force_build",
421 workernames = workerNames,
422 factory = force_factory))
425 ts = arch[1].split('/')
427 factory = BuildFactory()
429 # setup shared work directory if required
430 factory.addStep(ShellCommand(
432 description = "Setting up shared work directory",
433 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
435 haltOnFailure = True,
436 doStepIf = IsSharedWorkdir))
438 # find number of cores
439 factory.addStep(SetProperty(
442 description = "Finding number of CPUs",
443 command = ["nproc"]))
446 factory.addStep(FileDownload(
447 mastersrc = scripts_dir + '/cleanup.sh',
448 workerdest = "../cleanup.sh",
452 factory.addStep(ShellCommand(
454 description = "Cleaning previous builds",
455 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
457 haltOnFailure = True,
460 factory.addStep(ShellCommand(
462 description = "Cleaning work area",
463 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
465 haltOnFailure = True,
468 # expire tree if needed
469 elif tree_expire > 0:
470 factory.addStep(FileDownload(
471 mastersrc = scripts_dir + '/expire.sh',
472 workerdest = "../expire.sh",
475 factory.addStep(ShellCommand(
477 description = "Checking for build tree expiry",
478 command = ["./expire.sh", str(tree_expire)],
480 haltOnFailure = True,
483 factory.addStep(ShellCommand(
485 description = "Preparing SDK directory",
486 command = ["mkdir", "-p", "sdk"],
487 haltOnFailure = True))
489 factory.addStep(ShellCommand(
490 name = "downloadsdk",
491 description = "Downloading SDK archive",
492 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
493 env={'RSYNC_PASSWORD': rsync_sdk_key},
494 haltOnFailure = True,
497 factory.addStep(ShellCommand(
499 description = "Unpacking SDK archive",
500 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
501 haltOnFailure = True))
503 factory.addStep(ShellCommand(
505 description = "Updating SDK",
506 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
507 haltOnFailure = True))
509 factory.addStep(ShellCommand(
510 name = "cleancmdlinks",
511 description = "Sanitizing host command symlinks",
512 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
513 haltOnFailure = True))
515 factory.addStep(StringDownload(
516 name = "writeversionmk",
517 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
518 workerdest = "sdk/getversion.mk",
521 factory.addStep(SetProperty(
523 property = "release_version",
524 description = "Finding SDK release version",
525 workdir = "build/sdk",
526 command = ["make", "-f", "getversion.mk"]))
529 if usign_key is not None:
530 factory.addStep(StringDownload(
531 name = "dlkeybuildpub",
532 s = UsignSec2Pub(usign_key, usign_comment),
533 workerdest = "sdk/key-build.pub",
536 factory.addStep(StringDownload(
538 s = "# fake private key",
539 workerdest = "sdk/key-build",
542 factory.addStep(StringDownload(
543 name = "dlkeybuilducert",
544 s = "# fake certificate",
545 workerdest = "sdk/key-build.ucert",
548 factory.addStep(ShellCommand(
550 description = "Preparing download directory",
551 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
552 haltOnFailure = True))
554 factory.addStep(ShellCommand(
556 description = "Preparing SDK configuration",
557 workdir = "build/sdk",
558 command = ["sh", "-c", "rm -f .config && make defconfig"]))
560 factory.addStep(FileDownload(
561 mastersrc = scripts_dir + '/ccache.sh',
562 workerdest = 'sdk/ccache.sh',
565 factory.addStep(ShellCommand(
567 description = "Preparing ccache",
568 workdir = "build/sdk",
569 command = ["./ccache.sh"],
570 haltOnFailure = True))
572 factory.addStep(ShellCommand(
573 name = "patchfeedsconfgitfull",
574 description = "Patching feeds.conf to use src-git-full",
575 workdir = "build/sdk",
576 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
577 haltOnFailure = True))
580 factory.addStep(StringDownload(
581 name = "dlgitclonekey",
583 workerdest = "../git-clone.key",
586 factory.addStep(ShellCommand(
587 name = "patchfeedsconf",
588 description = "Patching feeds.conf to use SSH cloning",
589 workdir = "build/sdk",
590 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
591 haltOnFailure = True))
593 factory.addStep(ShellCommand(
594 name = "updatefeeds",
595 description = "Updating feeds",
596 workdir = "build/sdk",
597 command = ["./scripts/feeds", "update", "-f"],
598 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
599 haltOnFailure = True))
602 factory.addStep(ShellCommand(
603 name = "rmfeedsconf",
604 description = "Removing feeds.conf",
605 workdir = "build/sdk",
606 command=["rm", "feeds.conf"],
607 haltOnFailure = True))
609 factory.addStep(ShellCommand(
610 name = "installfeeds",
611 description = "Installing feeds",
612 workdir = "build/sdk",
613 command = ["./scripts/feeds", "install", "-a"],
614 haltOnFailure = True))
616 factory.addStep(ShellCommand(
618 description = "Clearing failure logs",
619 workdir = "build/sdk",
620 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
621 haltOnFailure = False,
622 flunkOnFailure = False,
623 warnOnFailure = True,
626 factory.addStep(ShellCommand(
628 description = "Building packages",
629 workdir = "build/sdk",
631 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
632 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
633 haltOnFailure = True))
635 factory.addStep(ShellCommand(
636 name = "mkfeedsconf",
637 description = "Generating pinned feeds.conf",
638 workdir = "build/sdk",
639 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
641 if ini.has_option("gpg", "key") or usign_key is not None:
642 factory.addStep(MasterShellCommand(
643 name = "signprepare",
644 description = "Preparing temporary signing directory",
645 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
649 factory.addStep(ShellCommand(
651 description = "Packing files to sign",
652 workdir = "build/sdk",
653 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
657 factory.addStep(FileUpload(
658 workersrc = "sdk/sign.tar.gz",
659 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
663 factory.addStep(MasterShellCommand(
665 description = "Signing files",
666 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
667 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
671 factory.addStep(FileDownload(
672 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
673 workerdest = "sdk/sign.tar.gz",
677 factory.addStep(ShellCommand(
679 description = "Unpacking signed files",
680 workdir = "build/sdk",
681 command = ["tar", "-xzf", "sign.tar.gz"],
685 factory.addStep(ShellCommand(
686 name = "uploadprepare",
687 description = "Preparing package directory",
688 workdir = "build/sdk",
689 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
690 env={'RSYNC_PASSWORD': rsync_bin_key},
691 haltOnFailure = True,
695 factory.addStep(ShellCommand(
696 name = "packageupload",
697 description = "Uploading package files",
698 workdir = "build/sdk",
699 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
700 env={'RSYNC_PASSWORD': rsync_bin_key},
701 haltOnFailure = True,
705 factory.addStep(ShellCommand(
707 description = "Preparing log directory",
708 workdir = "build/sdk",
709 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
710 env={'RSYNC_PASSWORD': rsync_bin_key},
711 haltOnFailure = True,
715 factory.addStep(ShellCommand(
717 description = "Finding failure logs",
718 workdir = "build/sdk/logs/package/feeds",
719 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
720 haltOnFailure = False,
721 flunkOnFailure = False,
722 warnOnFailure = True,
725 factory.addStep(ShellCommand(
727 description = "Collecting failure logs",
728 workdir = "build/sdk",
729 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
730 haltOnFailure = False,
731 flunkOnFailure = False,
732 warnOnFailure = True,
735 factory.addStep(ShellCommand(
737 description = "Uploading failure logs",
738 workdir = "build/sdk",
739 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
740 env={'RSYNC_PASSWORD': rsync_bin_key},
741 haltOnFailure = False,
742 flunkOnFailure = False,
743 warnOnFailure = True,
747 if rsync_src_url is not None:
748 factory.addStep(ShellCommand(
750 description = "Finding source archives to upload",
751 workdir = "build/sdk",
752 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
756 factory.addStep(ShellCommand(
757 name = "sourceupload",
758 description = "Uploading source archives",
759 workdir = "build/sdk",
760 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
761 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
762 env={'RSYNC_PASSWORD': rsync_src_key},
763 haltOnFailure = False,
764 flunkOnFailure = False,
765 warnOnFailure = True,
769 factory.addStep(ShellCommand(
771 description = "Reporting disk usage",
772 command=["df", "-h", "."],
774 haltOnFailure = False,
775 flunkOnFailure = False,
776 warnOnFailure = False,
780 factory.addStep(ShellCommand(
782 description = "Reporting estimated file space usage",
783 command=["du", "-sh", "."],
785 haltOnFailure = False,
786 flunkOnFailure = False,
787 warnOnFailure = False,
791 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
793 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
794 force_factory.addStep(steps.Trigger(
795 name = "trigger_%s" % arch[0],
796 description = "Triggering %s build" % arch[0],
797 schedulerNames = [ "trigger_%s" % arch[0] ],
798 set_properties = { "reason": Property("reason") },
799 doStepIf = IsArchitectureSelected(arch[0])
802 ####### STATUS arches
804 # 'status' is a list of Status arches. The results of each build will be
805 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
806 # including web pages, email senders, and IRC bots.
808 if ini.has_option("phase2", "status_bind"):
810 'port': ini.get("phase2", "status_bind"),
812 'waterfall_view': True,
813 'console_view': True,
818 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
819 c['www']['auth'] = util.UserPasswordAuth([
820 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
822 c['www']['authz'] = util.Authz(
823 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
824 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
827 ####### PROJECT IDENTITY
829 # the 'title' string will appear at the top of this buildbot
830 # installation's html.WebStatus home page (linked to the
831 # 'titleURL') and is embedded in the title of the waterfall HTML page.
833 c['title'] = ini.get("general", "title")
834 c['titleURL'] = ini.get("general", "title_url")
836 # the 'buildbotURL' string should point to the location where the buildbot's
837 # internal web server (usually the html.WebStatus page) is visible. This
838 # typically uses the port number set in the Waterfall 'status' entry, but
839 # with an externally-visible host name which the buildbot cannot figure out
842 c['buildbotURL'] = buildbot_url
847 # This specifies what database buildbot uses to store its state. You can leave
848 # this at its default for all but the largest installations.
849 'db_url' : "sqlite:///state.sqlite",
852 c['buildbotNetUsageData'] = None