phase1, phase2: update master to Debian 10 / Buildbot 2.0.1
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from buildbot import locks
11
12 ini = configparser.ConfigParser()
13 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
14
15 buildbot_url = ini.get("phase2", "buildbot_url")
16
17 # This is a sample buildmaster config file. It must be installed as
18 # 'master.cfg' in your buildmaster's base directory.
19
20 # This is the dictionary that the buildmaster pays attention to. We also use
21 # a shorter alias to save typing.
22 c = BuildmasterConfig = {}
23
24 ####### BUILDSLAVES
25
26 # The 'workers' list defines the set of recognized buildslaves. Each element is
27 # a Worker object, specifying a unique slave name and password. The same
28 # slave name and password must be configured on the slave.
29 from buildbot.worker import Worker
30
31 slave_port = 9990
32 persistent = False
33 other_builds = 0
34 tree_expire = 0
35 git_ssh = False
36 git_ssh_key = None
37
38 if ini.has_option("phase2", "port"):
39 slave_port = ini.getint("phase2", "port")
40
41 if ini.has_option("phase2", "persistent"):
42 persistent = ini.getboolean("phase2", "persistent")
43
44 if ini.has_option("phase2", "other_builds"):
45 other_builds = ini.getint("phase2", "other_builds")
46
47 if ini.has_option("phase2", "expire"):
48 tree_expire = ini.getint("phase2", "expire")
49
50 if ini.has_option("general", "git_ssh"):
51 git_ssh = ini.getboolean("general", "git_ssh")
52
53 if ini.has_option("general", "git_ssh_key"):
54 git_ssh_key = ini.get("general", "git_ssh_key")
55 else:
56 git_ssh = False
57
58 c['workers'] = []
59 max_builds = dict()
60
61 for section in ini.sections():
62 if section.startswith("slave "):
63 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
64 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
65 name = ini.get(section, "name")
66 password = ini.get(section, "password")
67 sl_props = { 'shared_wd': False }
68 max_builds[name] = 1
69
70 if ini.has_option(section, "builds"):
71 max_builds[name] = ini.getint(section, "builds")
72
73 if max_builds[name] == 1:
74 sl_props['shared_wd'] = True
75
76 if ini.has_option(section, "shared_wd"):
77 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
78 if sl_props['shared_wd'] and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80
81 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
82
83 # 'slavePortnum' defines the TCP port to listen on for connections from workers.
84 # This must match the value configured into the buildslaves (with their
85 # --master option)
86 c['protocols'] = {'pb': {'port': slave_port}}
87
88 # coalesce builds
89 c['collapseRequests'] = True
90
91 # Reduce amount of backlog data
92 c['buildHorizon'] = 30
93 c['logHorizon'] = 20
94
95 ####### CHANGESOURCES
96
97 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
98 scripts_dir = os.path.abspath("../scripts")
99
100 rsync_bin_url = ini.get("rsync", "binary_url")
101 rsync_bin_key = ini.get("rsync", "binary_password")
102
103 rsync_src_url = None
104 rsync_src_key = None
105
106 if ini.has_option("rsync", "source_url"):
107 rsync_src_url = ini.get("rsync", "source_url")
108 rsync_src_key = ini.get("rsync", "source_password")
109
110 rsync_sdk_url = None
111 rsync_sdk_key = None
112 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
113
114 if ini.has_option("rsync", "sdk_url"):
115 rsync_sdk_url = ini.get("rsync", "sdk_url")
116
117 if ini.has_option("rsync", "sdk_password"):
118 rsync_sdk_key = ini.get("rsync", "sdk_password")
119
120 if ini.has_option("rsync", "sdk_pattern"):
121 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
122
123 repo_url = ini.get("repo", "url")
124 repo_branch = "master"
125
126 if ini.has_option("repo", "branch"):
127 repo_branch = ini.get("repo", "branch")
128
129 usign_key = None
130 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
131
132 if ini.has_option("usign", "key"):
133 usign_key = ini.get("usign", "key")
134
135 if ini.has_option("usign", "comment"):
136 usign_comment = ini.get("usign", "comment")
137
138
139 # find arches
140 arches = [ ]
141 archnames = [ ]
142
143 if not os.path.isdir(work_dir+'/source.git'):
144 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
145 else:
146 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
147
148 findarches = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'architectures'],
149 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
150
151 while True:
152 line = findarches.stdout.readline()
153 if not line:
154 break
155 at = line.decode().strip().split()
156 arches.append(at)
157 archnames.append(at[0])
158
159
160 # find feeds
161 feeds = []
162 feedbranches = dict()
163
164 from buildbot.changes.gitpoller import GitPoller
165 c['change_source'] = []
166
167 def parse_feed_entry(line):
168 parts = line.strip().split()
169 if parts[0] == "src-git":
170 feeds.append(parts)
171 url = parts[2].strip().split(';')
172 branch = url[1] if len(url) > 1 else 'master'
173 feedbranches[url[0]] = branch
174 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
175
176 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
177 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
178
179 line = make.stdout.readline()
180 if line:
181 parse_feed_entry(line)
182
183 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
184 for line in f:
185 parse_feed_entry(line)
186
187
188 ####### SCHEDULERS
189
190 # Configure the Schedulers, which decide how to react to incoming changes. In this
191 # case, just kick off a 'basebuild' build
192
193 def branch_change_filter(change):
194 return change.branch == feedbranches[change.repository]
195
196 from buildbot.schedulers.basic import SingleBranchScheduler
197 from buildbot.schedulers.forcesched import ForceScheduler
198 from buildbot.changes import filter
199 c['schedulers'] = []
200 c['schedulers'].append(SingleBranchScheduler(
201 name="all",
202 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
203 treeStableTimer=60,
204 builderNames=archnames))
205
206 c['schedulers'].append(ForceScheduler(
207 name="force",
208 builderNames=archnames))
209
210 ####### BUILDERS
211
212 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
213 # what steps, and which workers can execute them. Note that any particular build will
214 # only take place on one slave.
215
216 from buildbot.process.factory import BuildFactory
217 from buildbot.steps.shell import ShellCommand
218 from buildbot.steps.shell import SetProperty
219 from buildbot.steps.transfer import FileUpload
220 from buildbot.steps.transfer import FileDownload
221 from buildbot.steps.transfer import StringDownload
222 from buildbot.steps.master import MasterShellCommand
223 from buildbot.process.properties import WithProperties
224
225
226 def GetDirectorySuffix(props):
227 verpat = re.compile('^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
228 if props.hasProperty("release_version"):
229 m = verpat.match(props["release_version"])
230 if m is not None:
231 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
232 return ""
233
234 def GetNumJobs(props):
235 if props.hasProperty("workername") and props.hasProperty("nproc"):
236 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
237 else:
238 return 1
239
240 def GetCwd(props):
241 if props.hasProperty("builddir"):
242 return props["builddir"]
243 elif props.hasProperty("workdir"):
244 return props["workdir"]
245 else:
246 return "/"
247
248 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
249 try:
250 seckey = base64.b64decode(seckey)
251 except:
252 return None
253
254 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
255 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
256
257 def IsSharedWorkdir(step):
258 return bool(step.getProperty("shared_wd"))
259
260
261 c['builders'] = []
262
263 dlLock = locks.WorkerLock("slave_dl")
264
265 slaveNames = [ ]
266
267 for slave in c['workers']:
268 slaveNames.append(slave.workername)
269
270 for arch in arches:
271 ts = arch[1].split('/')
272
273 factory = BuildFactory()
274
275 # setup shared work directory if required
276 factory.addStep(ShellCommand(
277 name = "sharedwd",
278 description = "Setting up shared work directory",
279 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
280 workdir = ".",
281 haltOnFailure = True,
282 doStepIf = IsSharedWorkdir))
283
284 # find number of cores
285 factory.addStep(SetProperty(
286 name = "nproc",
287 property = "nproc",
288 description = "Finding number of CPUs",
289 command = ["nproc"]))
290
291 # prepare workspace
292 factory.addStep(FileDownload(
293 mastersrc = scripts_dir + '/cleanup.sh',
294 workerdest = "../cleanup.sh",
295 mode = 0o755))
296
297 if not persistent:
298 factory.addStep(ShellCommand(
299 name = "cleanold",
300 description = "Cleaning previous builds",
301 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
302 workdir = ".",
303 haltOnFailure = True,
304 timeout = 2400))
305
306 factory.addStep(ShellCommand(
307 name = "cleanup",
308 description = "Cleaning work area",
309 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
310 workdir = ".",
311 haltOnFailure = True,
312 timeout = 2400))
313
314 # expire tree if needed
315 elif tree_expire > 0:
316 factory.addStep(FileDownload(
317 mastersrc = scripts_dir + '/expire.sh',
318 workerdest = "../expire.sh",
319 mode = 0o755))
320
321 factory.addStep(ShellCommand(
322 name = "expire",
323 description = "Checking for build tree expiry",
324 command = ["./expire.sh", str(tree_expire)],
325 workdir = ".",
326 haltOnFailure = True,
327 timeout = 2400))
328
329 factory.addStep(ShellCommand(
330 name = "mksdkdir",
331 description = "Preparing SDK directory",
332 command = ["mkdir", "-p", "sdk"],
333 haltOnFailure = True))
334
335 factory.addStep(ShellCommand(
336 name = "downloadsdk",
337 description = "Downloading SDK archive",
338 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
339 env={'RSYNC_PASSWORD': rsync_sdk_key},
340 haltOnFailure = True,
341 logEnviron = False))
342
343 factory.addStep(ShellCommand(
344 name = "unpacksdk",
345 description = "Unpacking SDK archive",
346 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
347 haltOnFailure = True))
348
349 factory.addStep(ShellCommand(
350 name = "updatesdk",
351 description = "Updating SDK",
352 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
353 haltOnFailure = True))
354
355 factory.addStep(ShellCommand(
356 name = "cleancmdlinks",
357 description = "Sanitizing host command symlinks",
358 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
359 haltOnFailure = True))
360
361 factory.addStep(StringDownload(
362 name = "writeversionmk",
363 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
364 workerdest = "sdk/getversion.mk",
365 mode = 0o755))
366
367 factory.addStep(SetProperty(
368 name = "getversion",
369 property = "release_version",
370 description = "Finding SDK release version",
371 workdir = "build/sdk",
372 command = ["make", "-f", "getversion.mk"]))
373
374 # install build key
375 if usign_key is not None:
376 factory.addStep(StringDownload(
377 name = "dlkeybuildpub",
378 s = UsignSec2Pub(usign_key, usign_comment),
379 workerdest = "sdk/key-build.pub",
380 mode = 0o600))
381
382 factory.addStep(StringDownload(
383 name = "dlkeybuild",
384 s = "# fake private key",
385 workerdest = "sdk/key-build",
386 mode = 0o600))
387
388 factory.addStep(StringDownload(
389 name = "dlkeybuilducert",
390 s = "# fake certificate",
391 workerdest = "sdk/key-build.ucert",
392 mode = 0o600))
393
394 factory.addStep(ShellCommand(
395 name = "mkdldir",
396 description = "Preparing download directory",
397 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
398 haltOnFailure = True))
399
400 factory.addStep(ShellCommand(
401 name = "mkconf",
402 description = "Preparing SDK configuration",
403 workdir = "build/sdk",
404 command = ["sh", "-c", "rm -f .config && make defconfig"]))
405
406 factory.addStep(FileDownload(
407 mastersrc = scripts_dir + '/ccache.sh',
408 workerdest = 'sdk/ccache.sh',
409 mode = 0o755))
410
411 factory.addStep(ShellCommand(
412 name = "prepccache",
413 description = "Preparing ccache",
414 workdir = "build/sdk",
415 command = ["./ccache.sh"],
416 haltOnFailure = True))
417
418 if git_ssh:
419 factory.addStep(StringDownload(
420 name = "dlgitclonekey",
421 s = git_ssh_key,
422 workerdest = "../git-clone.key",
423 mode = 0o600))
424
425 factory.addStep(ShellCommand(
426 name = "patchfeedsconf",
427 description = "Patching feeds.conf",
428 workdir = "build/sdk",
429 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
430 haltOnFailure = True))
431
432 factory.addStep(ShellCommand(
433 name = "updatefeeds",
434 description = "Updating feeds",
435 workdir = "build/sdk",
436 command = ["./scripts/feeds", "update", "-f"],
437 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
438 haltOnFailure = True))
439
440 if git_ssh:
441 factory.addStep(ShellCommand(
442 name = "rmfeedsconf",
443 description = "Removing feeds.conf",
444 workdir = "build/sdk",
445 command=["rm", "feeds.conf"],
446 haltOnFailure = True))
447
448 factory.addStep(ShellCommand(
449 name = "installfeeds",
450 description = "Installing feeds",
451 workdir = "build/sdk",
452 command = ["./scripts/feeds", "install", "-a"],
453 haltOnFailure = True))
454
455 factory.addStep(ShellCommand(
456 name = "logclear",
457 description = "Clearing failure logs",
458 workdir = "build/sdk",
459 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
460 haltOnFailure = False
461 ))
462
463 factory.addStep(ShellCommand(
464 name = "compile",
465 description = "Building packages",
466 workdir = "build/sdk",
467 timeout = 3600,
468 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
469 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
470 haltOnFailure = True))
471
472 factory.addStep(ShellCommand(
473 name = "mkfeedsconf",
474 description = "Generating pinned feeds.conf",
475 workdir = "build/sdk",
476 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
477
478 if ini.has_option("gpg", "key") or usign_key is not None:
479 factory.addStep(MasterShellCommand(
480 name = "signprepare",
481 description = "Preparing temporary signing directory",
482 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
483 haltOnFailure = True
484 ))
485
486 factory.addStep(ShellCommand(
487 name = "signpack",
488 description = "Packing files to sign",
489 workdir = "build/sdk",
490 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
491 haltOnFailure = True
492 ))
493
494 factory.addStep(FileUpload(
495 workersrc = "sdk/sign.tar.gz",
496 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
497 haltOnFailure = True
498 ))
499
500 factory.addStep(MasterShellCommand(
501 name = "signfiles",
502 description = "Signing files",
503 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
504 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
505 haltOnFailure = True
506 ))
507
508 factory.addStep(FileDownload(
509 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
510 workerdest = "sdk/sign.tar.gz",
511 haltOnFailure = True
512 ))
513
514 factory.addStep(ShellCommand(
515 name = "signunpack",
516 description = "Unpacking signed files",
517 workdir = "build/sdk",
518 command = ["tar", "-xzf", "sign.tar.gz"],
519 haltOnFailure = True
520 ))
521
522 factory.addStep(ShellCommand(
523 name = "uploadprepare",
524 description = "Preparing package directory",
525 workdir = "build/sdk",
526 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
527 env={'RSYNC_PASSWORD': rsync_bin_key},
528 haltOnFailure = True,
529 logEnviron = False
530 ))
531
532 factory.addStep(ShellCommand(
533 name = "packageupload",
534 description = "Uploading package files",
535 workdir = "build/sdk",
536 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
537 env={'RSYNC_PASSWORD': rsync_bin_key},
538 haltOnFailure = True,
539 logEnviron = False
540 ))
541
542 factory.addStep(ShellCommand(
543 name = "logprepare",
544 description = "Preparing log directory",
545 workdir = "build/sdk",
546 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
547 env={'RSYNC_PASSWORD': rsync_bin_key},
548 haltOnFailure = True,
549 logEnviron = False
550 ))
551
552 factory.addStep(ShellCommand(
553 name = "logfind",
554 description = "Finding failure logs",
555 workdir = "build/sdk/logs/package/feeds",
556 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
557 haltOnFailure = False
558 ))
559
560 factory.addStep(ShellCommand(
561 name = "logcollect",
562 description = "Collecting failure logs",
563 workdir = "build/sdk",
564 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
565 haltOnFailure = False
566 ))
567
568 factory.addStep(ShellCommand(
569 name = "logupload",
570 description = "Uploading failure logs",
571 workdir = "build/sdk",
572 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
573 env={'RSYNC_PASSWORD': rsync_bin_key},
574 haltOnFailure = False,
575 logEnviron = False
576 ))
577
578 if rsync_src_url is not None:
579 factory.addStep(ShellCommand(
580 name = "sourcelist",
581 description = "Finding source archives to upload",
582 workdir = "build/sdk",
583 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
584 haltOnFailure = True
585 ))
586
587 factory.addStep(ShellCommand(
588 name = "sourceupload",
589 description = "Uploading source archives",
590 workdir = "build/sdk",
591 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
592 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
593 env={'RSYNC_PASSWORD': rsync_src_key},
594 haltOnFailure = False,
595 logEnviron = False
596 ))
597
598 factory.addStep(ShellCommand(
599 name = "df",
600 description = "Reporting disk usage",
601 command=["df", "-h", "."],
602 env={'LC_ALL': 'C'},
603 haltOnFailure = False,
604 alwaysRun = True
605 ))
606
607 from buildbot.config import BuilderConfig
608
609 c['builders'].append(BuilderConfig(name=arch[0], workernames=slaveNames, factory=factory))
610
611
612 ####### STATUS arches
613
614 # 'status' is a list of Status arches. The results of each build will be
615 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
616 # including web pages, email senders, and IRC bots.
617
618 from buildbot.plugins import util
619
620 if ini.has_option("phase2", "status_bind"):
621 c['www'] = {
622 'port': ini.get("phase2", "status_bind"),
623 'plugins': {
624 'waterfall_view': True,
625 'console_view': True,
626 'grid_view': True
627 }
628 }
629
630 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
631 c['www']['auth'] = util.UserPasswordAuth([
632 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
633 ])
634 c['www']['authz'] = util.Authz(
635 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
636 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
637 )
638
639 ####### PROJECT IDENTITY
640
641 # the 'title' string will appear at the top of this buildbot
642 # installation's html.WebStatus home page (linked to the
643 # 'titleURL') and is embedded in the title of the waterfall HTML page.
644
645 c['title'] = ini.get("general", "title")
646 c['titleURL'] = ini.get("general", "title_url")
647
648 # the 'buildbotURL' string should point to the location where the buildbot's
649 # internal web server (usually the html.WebStatus page) is visible. This
650 # typically uses the port number set in the Waterfall 'status' entry, but
651 # with an externally-visible host name which the buildbot cannot figure out
652 # without some help.
653
654 c['buildbotURL'] = buildbot_url
655
656 ####### DB URL
657
658 c['db'] = {
659 # This specifies what database buildbot uses to store its state. You can leave
660 # this at its default for all but the largest installations.
661 'db_url' : "sqlite:///state.sqlite",
662 }
663
664 c['buildbotNetUsageData'] = None