2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47 buildbot_url = ini.get("phase2", "buildbot_url")
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
75 if ini.has_option("phase2", "other_builds"):
76 other_builds = ini.getint("phase2", "other_builds")
78 if ini.has_option("phase2", "expire"):
79 tree_expire = ini.getint("phase2", "expire")
81 if ini.has_option("general", "git_ssh"):
82 git_ssh = ini.getboolean("general", "git_ssh")
84 if ini.has_option("general", "git_ssh_key"):
85 git_ssh_key = ini.get("general", "git_ssh_key")
92 for section in ini.sections():
93 if section.startswith("worker "):
94 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
95 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
96 name = ini.get(section, "name")
97 password = ini.get(section, "password")
98 sl_props = { 'shared_wd': False }
101 if ini.has_option(section, "builds"):
102 max_builds[name] = ini.getint(section, "builds")
104 if max_builds[name] == 1:
105 sl_props['shared_wd'] = True
107 if ini.has_option(section, "shared_wd"):
108 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
109 if sl_props['shared_wd'] and (max_builds != 1):
110 raise ValueError('max_builds must be 1 with shared workdir!')
112 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
114 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
115 # This must match the value configured into the buildworkers (with their
117 c['protocols'] = {'pb': {'port': worker_port}}
120 c['collapseRequests'] = True
122 # Reduce amount of backlog data
123 c['configurators'] = [util.JanitorConfigurator(
124 logHorizon=timedelta(days=3),
128 ####### CHANGESOURCES
130 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
131 scripts_dir = os.path.abspath("../scripts")
133 rsync_bin_url = ini.get("rsync", "binary_url")
134 rsync_bin_key = ini.get("rsync", "binary_password")
139 if ini.has_option("rsync", "source_url"):
140 rsync_src_url = ini.get("rsync", "source_url")
141 rsync_src_key = ini.get("rsync", "source_password")
145 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
147 if ini.has_option("rsync", "sdk_url"):
148 rsync_sdk_url = ini.get("rsync", "sdk_url")
150 if ini.has_option("rsync", "sdk_password"):
151 rsync_sdk_key = ini.get("rsync", "sdk_password")
153 if ini.has_option("rsync", "sdk_pattern"):
154 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
156 repo_url = ini.get("repo", "url")
157 repo_branch = "master"
159 if ini.has_option("repo", "branch"):
160 repo_branch = ini.get("repo", "branch")
163 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
165 if ini.has_option("usign", "key"):
166 usign_key = ini.get("usign", "key")
168 if ini.has_option("usign", "comment"):
169 usign_comment = ini.get("usign", "comment")
176 if not os.path.isdir(work_dir+'/source.git'):
177 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
179 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
181 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
182 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
183 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
186 line = findarches.stdout.readline()
189 at = line.decode().strip().split()
191 archnames.append(at[0])
196 feedbranches = dict()
198 c['change_source'] = []
200 def parse_feed_entry(line):
201 parts = line.strip().split()
202 if parts[0] == "src-git":
204 url = parts[2].strip().split(';')
205 branch = url[1] if len(url) > 1 else 'master'
206 feedbranches[url[0]] = branch
207 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
209 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
210 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
212 line = make.stdout.readline()
214 parse_feed_entry(line)
216 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
218 parse_feed_entry(line)
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
227 c['schedulers'].append(SingleBranchScheduler(
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
232 treeStableTimer = 60,
233 builderNames = archnames))
235 c['schedulers'].append(ForceScheduler(
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
242 util.CodebaseParameter(
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
252 reason = util.StringParameter(
255 default = "Trigger build",
261 util.NestedParameter(
263 label="Build Options",
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
270 choices = [ "all" ] + archnames
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
283 def GetDirectorySuffix(props):
284 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
285 if props.hasProperty("release_version"):
286 m = verpat.match(props["release_version"])
288 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
291 def GetNumJobs(props):
292 if props.hasProperty("workername") and props.hasProperty("nproc"):
293 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
298 if props.hasProperty("builddir"):
299 return props["builddir"]
300 elif props.hasProperty("workdir"):
301 return props["workdir"]
305 def IsArchitectureSelected(target):
306 def CheckArchitectureProperty(step):
308 options = step.getProperty("options")
309 if type(options) is dict:
310 selected_arch = options.get("architecture", "all")
311 if selected_arch != "all" and selected_arch != target:
318 return CheckArchitectureProperty
320 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
322 seckey = base64.b64decode(seckey)
326 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
327 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
329 def IsSharedWorkdir(step):
330 return bool(step.getProperty("shared_wd"))
332 @defer.inlineCallbacks
333 def getNewestCompleteTime(bldr):
334 """Returns the complete_at of the latest completed and not SKIPPED
335 build request for this builder, or None if there are no such build
336 requests. We need to filter out SKIPPED requests because we're
337 using collapseRequests=True which is unfortunately marking all
338 previous requests as complete when new buildset is created.
340 @returns: datetime instance or None, via Deferred
343 bldrid = yield bldr.getBuilderId()
344 completed = yield bldr.master.data.get(
345 ('builders', bldrid, 'buildrequests'),
347 resultspec.Filter('complete', 'eq', [True]),
348 resultspec.Filter('results', 'ne', [results.SKIPPED]),
350 order=['-complete_at'], limit=1)
354 return completed[0]['complete_at']
356 @defer.inlineCallbacks
357 def prioritizeBuilders(master, builders):
358 """Returns sorted list of builders by their last timestamp of completed and
361 @returns: list of sorted builders
364 def is_building(bldr):
365 return bool(bldr.building) or bool(bldr.old_building)
368 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
369 d.addCallback(lambda complete_at: (complete_at, bldr))
373 (complete_at, bldr) = item
377 complete_at = date.replace(tzinfo=tzutc())
379 if is_building(bldr):
381 complete_at = date.replace(tzinfo=tzutc())
383 return (complete_at, bldr.name)
385 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
386 results.sort(key=bldr_sort)
389 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
391 return [r[1] for r in results]
393 c['prioritizeBuilders'] = prioritizeBuilders
396 dlLock = locks.WorkerLock("worker_dl")
400 for worker in c['workers']:
401 workerNames.append(worker.workername)
403 force_factory = BuildFactory()
405 c['builders'].append(BuilderConfig(
406 name = "00_force_build",
407 workernames = workerNames,
408 factory = force_factory))
411 ts = arch[1].split('/')
413 factory = BuildFactory()
415 # setup shared work directory if required
416 factory.addStep(ShellCommand(
418 description = "Setting up shared work directory",
419 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
421 haltOnFailure = True,
422 doStepIf = IsSharedWorkdir))
424 # find number of cores
425 factory.addStep(SetProperty(
428 description = "Finding number of CPUs",
429 command = ["nproc"]))
432 factory.addStep(FileDownload(
433 mastersrc = scripts_dir + '/cleanup.sh',
434 workerdest = "../cleanup.sh",
438 factory.addStep(ShellCommand(
440 description = "Cleaning previous builds",
441 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
443 haltOnFailure = True,
446 factory.addStep(ShellCommand(
448 description = "Cleaning work area",
449 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
451 haltOnFailure = True,
454 # expire tree if needed
455 elif tree_expire > 0:
456 factory.addStep(FileDownload(
457 mastersrc = scripts_dir + '/expire.sh',
458 workerdest = "../expire.sh",
461 factory.addStep(ShellCommand(
463 description = "Checking for build tree expiry",
464 command = ["./expire.sh", str(tree_expire)],
466 haltOnFailure = True,
469 factory.addStep(ShellCommand(
471 description = "Preparing SDK directory",
472 command = ["mkdir", "-p", "sdk"],
473 haltOnFailure = True))
475 factory.addStep(ShellCommand(
476 name = "downloadsdk",
477 description = "Downloading SDK archive",
478 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
479 env={'RSYNC_PASSWORD': rsync_sdk_key},
480 haltOnFailure = True,
483 factory.addStep(ShellCommand(
485 description = "Unpacking SDK archive",
486 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
487 haltOnFailure = True))
489 factory.addStep(ShellCommand(
491 description = "Updating SDK",
492 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
493 haltOnFailure = True))
495 factory.addStep(ShellCommand(
496 name = "cleancmdlinks",
497 description = "Sanitizing host command symlinks",
498 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
499 haltOnFailure = True))
501 factory.addStep(StringDownload(
502 name = "writeversionmk",
503 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
504 workerdest = "sdk/getversion.mk",
507 factory.addStep(SetProperty(
509 property = "release_version",
510 description = "Finding SDK release version",
511 workdir = "build/sdk",
512 command = ["make", "-f", "getversion.mk"]))
515 if usign_key is not None:
516 factory.addStep(StringDownload(
517 name = "dlkeybuildpub",
518 s = UsignSec2Pub(usign_key, usign_comment),
519 workerdest = "sdk/key-build.pub",
522 factory.addStep(StringDownload(
524 s = "# fake private key",
525 workerdest = "sdk/key-build",
528 factory.addStep(StringDownload(
529 name = "dlkeybuilducert",
530 s = "# fake certificate",
531 workerdest = "sdk/key-build.ucert",
534 factory.addStep(ShellCommand(
536 description = "Preparing download directory",
537 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
538 haltOnFailure = True))
540 factory.addStep(ShellCommand(
542 description = "Preparing SDK configuration",
543 workdir = "build/sdk",
544 command = ["sh", "-c", "rm -f .config && make defconfig"]))
546 factory.addStep(FileDownload(
547 mastersrc = scripts_dir + '/ccache.sh',
548 workerdest = 'sdk/ccache.sh',
551 factory.addStep(ShellCommand(
553 description = "Preparing ccache",
554 workdir = "build/sdk",
555 command = ["./ccache.sh"],
556 haltOnFailure = True))
558 factory.addStep(ShellCommand(
559 name = "patchfeedsconfgitfull",
560 description = "Patching feeds.conf to use src-git-full",
561 workdir = "build/sdk",
562 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
563 haltOnFailure = True))
566 factory.addStep(StringDownload(
567 name = "dlgitclonekey",
569 workerdest = "../git-clone.key",
572 factory.addStep(ShellCommand(
573 name = "patchfeedsconf",
574 description = "Patching feeds.conf to use SSH cloning",
575 workdir = "build/sdk",
576 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
577 haltOnFailure = True))
579 factory.addStep(ShellCommand(
580 name = "updatefeeds",
581 description = "Updating feeds",
582 workdir = "build/sdk",
583 command = ["./scripts/feeds", "update", "-f"],
584 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
585 haltOnFailure = True))
588 factory.addStep(ShellCommand(
589 name = "rmfeedsconf",
590 description = "Removing feeds.conf",
591 workdir = "build/sdk",
592 command=["rm", "feeds.conf"],
593 haltOnFailure = True))
595 factory.addStep(ShellCommand(
596 name = "installfeeds",
597 description = "Installing feeds",
598 workdir = "build/sdk",
599 command = ["./scripts/feeds", "install", "-a"],
600 haltOnFailure = True))
602 factory.addStep(ShellCommand(
604 description = "Clearing failure logs",
605 workdir = "build/sdk",
606 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
607 haltOnFailure = False
610 factory.addStep(ShellCommand(
612 description = "Building packages",
613 workdir = "build/sdk",
615 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
616 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
617 haltOnFailure = True))
619 factory.addStep(ShellCommand(
620 name = "mkfeedsconf",
621 description = "Generating pinned feeds.conf",
622 workdir = "build/sdk",
623 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
625 if ini.has_option("gpg", "key") or usign_key is not None:
626 factory.addStep(MasterShellCommand(
627 name = "signprepare",
628 description = "Preparing temporary signing directory",
629 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
633 factory.addStep(ShellCommand(
635 description = "Packing files to sign",
636 workdir = "build/sdk",
637 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
641 factory.addStep(FileUpload(
642 workersrc = "sdk/sign.tar.gz",
643 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
647 factory.addStep(MasterShellCommand(
649 description = "Signing files",
650 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
651 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
655 factory.addStep(FileDownload(
656 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
657 workerdest = "sdk/sign.tar.gz",
661 factory.addStep(ShellCommand(
663 description = "Unpacking signed files",
664 workdir = "build/sdk",
665 command = ["tar", "-xzf", "sign.tar.gz"],
669 factory.addStep(ShellCommand(
670 name = "uploadprepare",
671 description = "Preparing package directory",
672 workdir = "build/sdk",
673 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
674 env={'RSYNC_PASSWORD': rsync_bin_key},
675 haltOnFailure = True,
679 factory.addStep(ShellCommand(
680 name = "packageupload",
681 description = "Uploading package files",
682 workdir = "build/sdk",
683 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
684 env={'RSYNC_PASSWORD': rsync_bin_key},
685 haltOnFailure = True,
689 factory.addStep(ShellCommand(
691 description = "Preparing log directory",
692 workdir = "build/sdk",
693 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
694 env={'RSYNC_PASSWORD': rsync_bin_key},
695 haltOnFailure = True,
699 factory.addStep(ShellCommand(
701 description = "Finding failure logs",
702 workdir = "build/sdk/logs/package/feeds",
703 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
704 haltOnFailure = False
707 factory.addStep(ShellCommand(
709 description = "Collecting failure logs",
710 workdir = "build/sdk",
711 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
712 haltOnFailure = False
715 factory.addStep(ShellCommand(
717 description = "Uploading failure logs",
718 workdir = "build/sdk",
719 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
720 env={'RSYNC_PASSWORD': rsync_bin_key},
721 haltOnFailure = False,
725 if rsync_src_url is not None:
726 factory.addStep(ShellCommand(
728 description = "Finding source archives to upload",
729 workdir = "build/sdk",
730 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
734 factory.addStep(ShellCommand(
735 name = "sourceupload",
736 description = "Uploading source archives",
737 workdir = "build/sdk",
738 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
739 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
740 env={'RSYNC_PASSWORD': rsync_src_key},
741 haltOnFailure = False,
745 factory.addStep(ShellCommand(
747 description = "Reporting disk usage",
748 command=["df", "-h", "."],
750 haltOnFailure = False,
754 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
756 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
757 force_factory.addStep(steps.Trigger(
758 name = "trigger_%s" % arch[0],
759 description = "Triggering %s build" % arch[0],
760 schedulerNames = [ "trigger_%s" % arch[0] ],
761 set_properties = { "reason": Property("reason") },
762 doStepIf = IsArchitectureSelected(arch[0])
765 ####### STATUS arches
767 # 'status' is a list of Status arches. The results of each build will be
768 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
769 # including web pages, email senders, and IRC bots.
771 if ini.has_option("phase2", "status_bind"):
773 'port': ini.get("phase2", "status_bind"),
775 'waterfall_view': True,
776 'console_view': True,
781 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
782 c['www']['auth'] = util.UserPasswordAuth([
783 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
785 c['www']['authz'] = util.Authz(
786 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
787 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
790 ####### PROJECT IDENTITY
792 # the 'title' string will appear at the top of this buildbot
793 # installation's html.WebStatus home page (linked to the
794 # 'titleURL') and is embedded in the title of the waterfall HTML page.
796 c['title'] = ini.get("general", "title")
797 c['titleURL'] = ini.get("general", "title_url")
799 # the 'buildbotURL' string should point to the location where the buildbot's
800 # internal web server (usually the html.WebStatus page) is visible. This
801 # typically uses the port number set in the Waterfall 'status' entry, but
802 # with an externally-visible host name which the buildbot cannot figure out
805 c['buildbotURL'] = buildbot_url
810 # This specifies what database buildbot uses to store its state. You can leave
811 # this at its default for all but the largest installations.
812 'db_url' : "sqlite:///state.sqlite",
815 c['buildbotNetUsageData'] = None