phase2: allow overriding config.ini location with env var
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import ConfigParser
9
10 from buildbot import locks
11
12 ini = ConfigParser.ConfigParser()
13 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
14
15 buildbot_url = ini.get("general", "buildbot_url")
16
17 # This is a sample buildmaster config file. It must be installed as
18 # 'master.cfg' in your buildmaster's base directory.
19
20 # This is the dictionary that the buildmaster pays attention to. We also use
21 # a shorter alias to save typing.
22 c = BuildmasterConfig = {}
23
24 ####### BUILDSLAVES
25
26 # The 'slaves' list defines the set of recognized buildslaves. Each element is
27 # a BuildSlave object, specifying a unique slave name and password. The same
28 # slave name and password must be configured on the slave.
29 from buildbot.buildslave import BuildSlave
30
31 slave_port = 9990
32 persistent = False
33 other_builds = 0
34 tree_expire = 0
35 git_ssh = False
36 git_ssh_key = None
37
38 if ini.has_option("general", "port"):
39 slave_port = ini.getint("general", "port")
40
41 if ini.has_option("general", "persistent"):
42 persistent = ini.getboolean("general", "persistent")
43
44 if ini.has_option("general", "other_builds"):
45 other_builds = ini.getint("general", "other_builds")
46
47 if ini.has_option("general", "expire"):
48 tree_expire = ini.getint("general", "expire")
49
50 if ini.has_option("general", "git_ssh"):
51 git_ssh = ini.getboolean("general", "git_ssh")
52
53 if ini.has_option("general", "git_ssh_key"):
54 git_ssh_key = ini.get("general", "git_ssh_key")
55 else:
56 git_ssh = False
57
58 c['slaves'] = []
59 max_builds = dict()
60
61 for section in ini.sections():
62 if section.startswith("slave "):
63 if ini.has_option(section, "name") and ini.has_option(section, "password"):
64 name = ini.get(section, "name")
65 password = ini.get(section, "password")
66 max_builds[name] = 1
67 if ini.has_option(section, "builds"):
68 max_builds[name] = ini.getint(section, "builds")
69 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
70
71 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
72 # This must match the value configured into the buildslaves (with their
73 # --master option)
74 c['slavePortnum'] = slave_port
75
76 # coalesce builds
77 c['mergeRequests'] = True
78
79 # Reduce amount of backlog data
80 c['buildHorizon'] = 30
81 c['logHorizon'] = 20
82
83 ####### CHANGESOURCES
84
85 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
86 scripts_dir = os.path.abspath("../scripts")
87
88 rsync_bin_url = ini.get("rsync", "binary_url")
89 rsync_bin_key = ini.get("rsync", "binary_password")
90
91 rsync_src_url = None
92 rsync_src_key = None
93
94 if ini.has_option("rsync", "source_url"):
95 rsync_src_url = ini.get("rsync", "source_url")
96 rsync_src_key = ini.get("rsync", "source_password")
97
98 rsync_sdk_url = None
99 rsync_sdk_key = None
100 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
101
102 if ini.has_option("rsync", "sdk_url"):
103 rsync_sdk_url = ini.get("rsync", "sdk_url")
104
105 if ini.has_option("rsync", "sdk_password"):
106 rsync_sdk_key = ini.get("rsync", "sdk_password")
107
108 if ini.has_option("rsync", "sdk_pattern"):
109 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
110
111 repo_url = ini.get("repo", "url")
112 repo_branch = "master"
113
114 if ini.has_option("repo", "branch"):
115 repo_branch = ini.get("repo", "branch")
116
117 gpg_key = None
118 gpg_passphrase = None
119 gpg_comment = repo_branch.replace("-", " ").title() + " key"
120
121 if ini.has_option("gpg", "key"):
122 gpg_key = ini.get("gpg", "key")
123
124 if ini.has_option("gpg", "passphrase"):
125 gpg_passphrase = ini.get("gpg", "passphrase")
126
127 if ini.has_option("gpg", "comment"):
128 gpg_comment = ini.get("gpg", "comment")
129
130 usign_key = None
131 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
132
133 if ini.has_option("usign", "key"):
134 usign_key = ini.get("usign", "key")
135
136 if ini.has_option("usign", "comment"):
137 usign_comment = ini.get("usign", "comment")
138
139
140 # find arches
141 arches = [ ]
142 archnames = [ ]
143
144 if not os.path.isdir(work_dir+'/source.git'):
145 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
146 else:
147 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
148
149 findarches = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'architectures'],
150 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
151
152 while True:
153 line = findarches.stdout.readline()
154 if not line:
155 break
156 at = line.strip().split()
157 arches.append(at)
158 archnames.append(at[0])
159
160
161 # find feeds
162 feeds = []
163 feedbranches = dict()
164
165 from buildbot.changes.gitpoller import GitPoller
166 c['change_source'] = []
167
168 def parse_feed_entry(line):
169 parts = line.strip().split()
170 if parts[0] == "src-git":
171 feeds.append(parts)
172 url = parts[2].strip().split(';')
173 branch = url[1] if len(url) > 1 else 'master'
174 feedbranches[url[0]] = branch
175 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
176
177 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
178 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
179
180 line = make.stdout.readline()
181 if line:
182 parse_feed_entry(line)
183
184 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
185 for line in f:
186 parse_feed_entry(line)
187
188
189 ####### SCHEDULERS
190
191 # Configure the Schedulers, which decide how to react to incoming changes. In this
192 # case, just kick off a 'basebuild' build
193
194 def branch_change_filter(change):
195 return change.branch == feedbranches[change.repository]
196
197 from buildbot.schedulers.basic import SingleBranchScheduler
198 from buildbot.schedulers.forcesched import ForceScheduler
199 from buildbot.changes import filter
200 c['schedulers'] = []
201 c['schedulers'].append(SingleBranchScheduler(
202 name="all",
203 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
204 treeStableTimer=60,
205 builderNames=archnames))
206
207 c['schedulers'].append(ForceScheduler(
208 name="force",
209 builderNames=archnames))
210
211 ####### BUILDERS
212
213 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
214 # what steps, and which slaves can execute them. Note that any particular build will
215 # only take place on one slave.
216
217 from buildbot.process.factory import BuildFactory
218 from buildbot.steps.source import Git
219 from buildbot.steps.shell import ShellCommand
220 from buildbot.steps.shell import SetProperty
221 from buildbot.steps.transfer import FileUpload
222 from buildbot.steps.transfer import FileDownload
223 from buildbot.steps.transfer import StringDownload
224 from buildbot.steps.master import MasterShellCommand
225 from buildbot.process.properties import WithProperties
226
227
228 def GetDirectorySuffix(props):
229 verpat = re.compile('^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
230 if props.hasProperty("release_version"):
231 m = verpat.match(props["release_version"])
232 if m is not None:
233 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
234 return ""
235
236 def GetNumJobs(props):
237 if props.hasProperty("slavename") and props.hasProperty("nproc"):
238 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
239 else:
240 return 1
241
242 def GetCwd(props):
243 if props.hasProperty("builddir"):
244 return props["builddir"]
245 elif props.hasProperty("workdir"):
246 return props["workdir"]
247 else:
248 return "/"
249
250 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
251 try:
252 seckey = base64.b64decode(seckey)
253 except:
254 return None
255
256 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
257 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
258
259
260 c['builders'] = []
261
262 dlLock = locks.SlaveLock("slave_dl")
263
264 slaveNames = [ ]
265
266 for slave in c['slaves']:
267 slaveNames.append(slave.slavename)
268
269 for arch in arches:
270 ts = arch[1].split('/')
271
272 factory = BuildFactory()
273
274 # find number of cores
275 factory.addStep(SetProperty(
276 name = "nproc",
277 property = "nproc",
278 description = "Finding number of CPUs",
279 command = ["nproc"]))
280
281 # prepare workspace
282 factory.addStep(FileDownload(
283 mastersrc = scripts_dir + '/cleanup-phase2.sh',
284 slavedest = "cleanup.sh",
285 mode = 0755))
286
287 if not persistent:
288 factory.addStep(ShellCommand(
289 name = "cleanold",
290 description = "Cleaning previous builds",
291 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
292 haltOnFailure = True,
293 timeout = 2400))
294
295 factory.addStep(ShellCommand(
296 name = "cleanup",
297 description = "Cleaning work area",
298 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
299 haltOnFailure = True,
300 timeout = 2400))
301
302 # expire tree if needed
303 elif tree_expire > 0:
304 factory.addStep(FileDownload(
305 mastersrc = scripts_dir + '/expire.sh',
306 slavedest = "../expire.sh",
307 mode = 0755))
308
309 factory.addStep(ShellCommand(
310 name = "expire",
311 description = "Checking for build tree expiry",
312 command = ["./expire.sh", str(tree_expire)],
313 workdir = ".",
314 haltOnFailure = True,
315 timeout = 2400))
316
317 factory.addStep(ShellCommand(
318 name = "mksdkdir",
319 description = "Preparing SDK directory",
320 command = ["mkdir", "-p", "sdk"],
321 haltOnFailure = True))
322
323 factory.addStep(ShellCommand(
324 name = "downloadsdk",
325 description = "Downloading SDK archive",
326 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
327 env={'RSYNC_PASSWORD': rsync_sdk_key},
328 haltOnFailure = True,
329 logEnviron = False))
330
331 factory.addStep(ShellCommand(
332 name = "unpacksdk",
333 description = "Unpacking SDK archive",
334 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
335 haltOnFailure = True))
336
337 factory.addStep(ShellCommand(
338 name = "updatesdk",
339 description = "Updating SDK",
340 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
341 haltOnFailure = True))
342
343 factory.addStep(StringDownload(
344 name = "writeversionmk",
345 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
346 slavedest = "sdk/getversion.mk",
347 mode = 0755))
348
349 factory.addStep(SetProperty(
350 name = "getversion",
351 property = "release_version",
352 description = "Finding SDK release version",
353 workdir = "build/sdk",
354 command = ["make", "-f", "getversion.mk"]))
355
356 # install build key
357 if usign_key is not None:
358 factory.addStep(StringDownload(
359 name = "dlkeybuildpub",
360 s = UsignSec2Pub(usign_key, usign_comment),
361 slavedest = "sdk/key-build.pub",
362 mode = 0600))
363
364 factory.addStep(StringDownload(
365 name = "dlkeybuild",
366 s = "# fake private key",
367 slavedest = "sdk/key-build",
368 mode = 0600))
369
370 factory.addStep(StringDownload(
371 name = "dlkeybuilducert",
372 s = "# fake certificate",
373 slavedest = "sdk/key-build.ucert",
374 mode = 0600))
375
376 factory.addStep(ShellCommand(
377 name = "mkdldir",
378 description = "Preparing download directory",
379 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
380 haltOnFailure = True))
381
382 factory.addStep(ShellCommand(
383 name = "mkconf",
384 description = "Preparing SDK configuration",
385 workdir = "build/sdk",
386 command = ["sh", "-c", "rm -f .config && make defconfig"]))
387
388 factory.addStep(FileDownload(
389 mastersrc = scripts_dir + '/ccache.sh',
390 slavedest = 'sdk/ccache.sh',
391 mode = 0755))
392
393 factory.addStep(ShellCommand(
394 name = "prepccache",
395 description = "Preparing ccache",
396 workdir = "build/sdk",
397 command = ["./ccache.sh"],
398 haltOnFailure = True))
399
400 if git_ssh:
401 factory.addStep(StringDownload(
402 name = "dlgitclonekey",
403 s = git_ssh_key,
404 slavedest = "../git-clone.key",
405 mode = 0600))
406
407 factory.addStep(ShellCommand(
408 name = "patchfeedsconf",
409 description = "Patching feeds.conf",
410 workdir = "build/sdk",
411 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
412 haltOnFailure = True))
413
414 factory.addStep(ShellCommand(
415 name = "updatefeeds",
416 description = "Updating feeds",
417 workdir = "build/sdk",
418 command = ["./scripts/feeds", "update", "-f"],
419 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
420 haltOnFailure = True))
421
422 if git_ssh:
423 factory.addStep(ShellCommand(
424 name = "rmfeedsconf",
425 description = "Removing feeds.conf",
426 workdir = "build/sdk",
427 command=["rm", "feeds.conf"],
428 haltOnFailure = True))
429
430 factory.addStep(ShellCommand(
431 name = "installfeeds",
432 description = "Installing feeds",
433 workdir = "build/sdk",
434 command = ["./scripts/feeds", "install", "-a"],
435 haltOnFailure = True))
436
437 factory.addStep(ShellCommand(
438 name = "logclear",
439 description = "Clearing failure logs",
440 workdir = "build/sdk",
441 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
442 haltOnFailure = False
443 ))
444
445 factory.addStep(ShellCommand(
446 name = "compile",
447 description = "Building packages",
448 workdir = "build/sdk",
449 timeout = 3600,
450 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y"],
451 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
452 haltOnFailure = True))
453
454 factory.addStep(ShellCommand(
455 name = "mkfeedsconf",
456 description = "Generating pinned feeds.conf",
457 workdir = "build/sdk",
458 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
459
460 if gpg_key is not None or usign_key is not None:
461 factory.addStep(MasterShellCommand(
462 name = "signprepare",
463 description = "Preparing temporary signing directory",
464 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
465 haltOnFailure = True
466 ))
467
468 factory.addStep(ShellCommand(
469 name = "signpack",
470 description = "Packing files to sign",
471 workdir = "build/sdk",
472 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
473 haltOnFailure = True
474 ))
475
476 factory.addStep(FileUpload(
477 slavesrc = "sdk/sign.tar.gz",
478 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
479 haltOnFailure = True
480 ))
481
482 factory.addStep(MasterShellCommand(
483 name = "signfiles",
484 description = "Signing files",
485 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
486 env = {
487 'GPGKEY': gpg_key,
488 'GPGPASS': gpg_passphrase,
489 'GPGCOMMENT': gpg_comment,
490 'USIGNKEY': usign_key,
491 'USIGNCOMMENT': usign_comment
492 },
493 haltOnFailure = True
494 ))
495
496 factory.addStep(FileDownload(
497 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
498 slavedest = "sdk/sign.tar.gz",
499 haltOnFailure = True
500 ))
501
502 factory.addStep(ShellCommand(
503 name = "signunpack",
504 description = "Unpacking signed files",
505 workdir = "build/sdk",
506 command = ["tar", "-xzf", "sign.tar.gz"],
507 haltOnFailure = True
508 ))
509
510 factory.addStep(ShellCommand(
511 name = "uploadprepare",
512 description = "Preparing package directory",
513 workdir = "build/sdk",
514 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
515 env={'RSYNC_PASSWORD': rsync_bin_key},
516 haltOnFailure = True,
517 logEnviron = False
518 ))
519
520 factory.addStep(ShellCommand(
521 name = "packageupload",
522 description = "Uploading package files",
523 workdir = "build/sdk",
524 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
525 env={'RSYNC_PASSWORD': rsync_bin_key},
526 haltOnFailure = True,
527 logEnviron = False
528 ))
529
530 factory.addStep(ShellCommand(
531 name = "logprepare",
532 description = "Preparing log directory",
533 workdir = "build/sdk",
534 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
535 env={'RSYNC_PASSWORD': rsync_bin_key},
536 haltOnFailure = True,
537 logEnviron = False
538 ))
539
540 factory.addStep(ShellCommand(
541 name = "logfind",
542 description = "Finding failure logs",
543 workdir = "build/sdk/logs/package/feeds",
544 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
545 haltOnFailure = False
546 ))
547
548 factory.addStep(ShellCommand(
549 name = "logcollect",
550 description = "Collecting failure logs",
551 workdir = "build/sdk",
552 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
553 haltOnFailure = False
554 ))
555
556 factory.addStep(ShellCommand(
557 name = "logupload",
558 description = "Uploading failure logs",
559 workdir = "build/sdk",
560 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
561 env={'RSYNC_PASSWORD': rsync_bin_key},
562 haltOnFailure = False,
563 logEnviron = False
564 ))
565
566 if rsync_src_url is not None:
567 factory.addStep(ShellCommand(
568 name = "sourcelist",
569 description = "Finding source archives to upload",
570 workdir = "build/sdk",
571 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
572 haltOnFailure = True
573 ))
574
575 factory.addStep(ShellCommand(
576 name = "sourceupload",
577 description = "Uploading source archives",
578 workdir = "build/sdk",
579 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
580 WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
581 env={'RSYNC_PASSWORD': rsync_src_key},
582 haltOnFailure = False,
583 logEnviron = False
584 ))
585
586 factory.addStep(ShellCommand(
587 name = "df",
588 description = "Reporting disk usage",
589 command=["df", "-h", "."],
590 env={'LC_ALL': 'C'},
591 haltOnFailure = False,
592 alwaysRun = True
593 ))
594
595 from buildbot.config import BuilderConfig
596
597 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
598
599
600 ####### STATUS arches
601
602 # 'status' is a list of Status arches. The results of each build will be
603 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
604 # including web pages, email senders, and IRC bots.
605
606 c['status'] = []
607
608 from buildbot.status import html
609 from buildbot.status.web import authz, auth
610
611 if ini.has_option("status", "bind"):
612 if ini.has_option("status", "user") and ini.has_option("status", "password"):
613 authz_cfg=authz.Authz(
614 # change any of these to True to enable; see the manual for more
615 # options
616 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
617 gracefulShutdown = 'auth',
618 forceBuild = 'auth', # use this to test your slave once it is set up
619 forceAllBuilds = 'auth',
620 pingBuilder = False,
621 stopBuild = 'auth',
622 stopAllBuilds = 'auth',
623 cancelPendingBuild = 'auth',
624 )
625 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
626 else:
627 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
628
629 ####### PROJECT IDENTITY
630
631 # the 'title' string will appear at the top of this buildbot
632 # installation's html.WebStatus home page (linked to the
633 # 'titleURL') and is embedded in the title of the waterfall HTML page.
634
635 c['title'] = ini.get("general", "title")
636 c['titleURL'] = ini.get("general", "title_url")
637
638 # the 'buildbotURL' string should point to the location where the buildbot's
639 # internal web server (usually the html.WebStatus page) is visible. This
640 # typically uses the port number set in the Waterfall 'status' entry, but
641 # with an externally-visible host name which the buildbot cannot figure out
642 # without some help.
643
644 c['buildbotURL'] = buildbot_url
645
646 ####### DB URL
647
648 c['db'] = {
649 # This specifies what database buildbot uses to store its state. You can leave
650 # this at its default for all but the largest installations.
651 'db_url' : "sqlite:///state.sqlite",
652 }