2 # ex: set syntax=python:
12 from dateutil.tz import tzutc
13 from datetime import datetime, timedelta
15 from twisted.internet import defer
16 from twisted.python import log
18 from buildbot import locks
19 from buildbot.data import resultspec
20 from buildbot.changes import filter
21 from buildbot.changes.gitpoller import GitPoller
22 from buildbot.config import BuilderConfig
23 from buildbot.plugins import schedulers
24 from buildbot.plugins import steps
25 from buildbot.plugins import util
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Property
29 from buildbot.process.properties import WithProperties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetProperty
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
70 if ini.has_option("phase2", "port"):
71 worker_port = ini.get("phase2", "port")
73 if ini.has_option("phase2", "persistent"):
74 persistent = ini.getboolean("phase2", "persistent")
76 if ini.has_option("phase2", "other_builds"):
77 other_builds = ini.getint("phase2", "other_builds")
79 if ini.has_option("phase2", "expire"):
80 tree_expire = ini.getint("phase2", "expire")
82 if ini.has_option("general", "git_ssh"):
83 git_ssh = ini.getboolean("general", "git_ssh")
85 if ini.has_option("general", "git_ssh_key"):
86 git_ssh_key = ini.get("general", "git_ssh_key")
93 for section in ini.sections():
94 if section.startswith("worker "):
95 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
96 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
97 name = ini.get(section, "name")
98 password = ini.get(section, "password")
99 sl_props = { 'shared_wd': False }
102 if ini.has_option(section, "builds"):
103 max_builds[name] = ini.getint(section, "builds")
105 if max_builds[name] == 1:
106 sl_props['shared_wd'] = True
108 if ini.has_option(section, "shared_wd"):
109 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
110 if sl_props['shared_wd'] and (max_builds != 1):
111 raise ValueError('max_builds must be 1 with shared workdir!')
113 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
115 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
116 # This must match the value configured into the buildworkers (with their
118 c['protocols'] = {'pb': {'port': worker_port}}
121 c['collapseRequests'] = True
123 # Reduce amount of backlog data
124 c['configurators'] = [util.JanitorConfigurator(
125 logHorizon=timedelta(days=3),
129 ####### CHANGESOURCES
131 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
132 scripts_dir = os.path.abspath("../scripts")
134 rsync_bin_url = ini.get("rsync", "binary_url")
135 rsync_bin_key = ini.get("rsync", "binary_password")
140 if ini.has_option("rsync", "source_url"):
141 rsync_src_url = ini.get("rsync", "source_url")
142 rsync_src_key = ini.get("rsync", "source_password")
146 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
148 if ini.has_option("rsync", "sdk_url"):
149 rsync_sdk_url = ini.get("rsync", "sdk_url")
151 if ini.has_option("rsync", "sdk_password"):
152 rsync_sdk_key = ini.get("rsync", "sdk_password")
154 if ini.has_option("rsync", "sdk_pattern"):
155 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
157 repo_url = ini.get("repo", "url")
158 repo_branch = "master"
160 if ini.has_option("repo", "branch"):
161 repo_branch = ini.get("repo", "branch")
164 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
166 if ini.has_option("usign", "key"):
167 usign_key = ini.get("usign", "key")
169 if ini.has_option("usign", "comment"):
170 usign_comment = ini.get("usign", "comment")
177 if not os.path.isdir(work_dir+'/source.git'):
178 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
180 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
182 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
183 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
184 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
187 line = findarches.stdout.readline()
190 at = line.decode().strip().split()
192 archnames.append(at[0])
197 feedbranches = dict()
199 c['change_source'] = []
201 def parse_feed_entry(line):
202 parts = line.strip().split()
203 if parts[0].startswith("src-git"):
205 url = parts[2].strip().split(';')
206 branch = url[1] if len(url) > 1 else 'master'
207 feedbranches[url[0]] = branch
208 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
210 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
211 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
213 line = make.stdout.readline()
215 parse_feed_entry(str(line, 'utf-8'))
217 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
219 parse_feed_entry(line)
221 if len(c['change_source']) == 0:
222 log.err("FATAL ERROR: no change_sources defined, aborting!")
227 # Configure the Schedulers, which decide how to react to incoming changes. In this
228 # case, just kick off a 'basebuild' build
231 c['schedulers'].append(SingleBranchScheduler(
233 change_filter = filter.ChangeFilter(
234 filter_fn = lambda change: change.branch == feedbranches[change.repository]
236 treeStableTimer = 60,
237 builderNames = archnames))
239 c['schedulers'].append(ForceScheduler(
241 buttonName = "Force builds",
242 label = "Force build details",
243 builderNames = [ "00_force_build" ],
246 util.CodebaseParameter(
248 label = "Repository",
249 branch = util.FixedParameter(name = "branch", default = ""),
250 revision = util.FixedParameter(name = "revision", default = ""),
251 repository = util.FixedParameter(name = "repository", default = ""),
252 project = util.FixedParameter(name = "project", default = "")
256 reason = util.StringParameter(
259 default = "Trigger build",
265 util.NestedParameter(
267 label="Build Options",
270 util.ChoiceStringParameter(
271 name = "architecture",
272 label = "Build architecture",
274 choices = [ "all" ] + archnames
283 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
284 # what steps, and which workers can execute them. Note that any particular build will
285 # only take place on one worker.
287 def GetDirectorySuffix(props):
288 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
289 if props.hasProperty("release_version"):
290 m = verpat.match(props["release_version"])
292 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
295 def GetNumJobs(props):
296 if props.hasProperty("workername") and props.hasProperty("nproc"):
297 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
302 if props.hasProperty("builddir"):
303 return props["builddir"]
304 elif props.hasProperty("workdir"):
305 return props["workdir"]
309 def IsArchitectureSelected(target):
310 def CheckArchitectureProperty(step):
312 options = step.getProperty("options")
313 if type(options) is dict:
314 selected_arch = options.get("architecture", "all")
315 if selected_arch != "all" and selected_arch != target:
322 return CheckArchitectureProperty
324 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
326 seckey = base64.b64decode(seckey)
330 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
331 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
333 def IsSharedWorkdir(step):
334 return bool(step.getProperty("shared_wd"))
336 @defer.inlineCallbacks
337 def getNewestCompleteTime(bldr):
338 """Returns the complete_at of the latest completed and not SKIPPED
339 build request for this builder, or None if there are no such build
340 requests. We need to filter out SKIPPED requests because we're
341 using collapseRequests=True which is unfortunately marking all
342 previous requests as complete when new buildset is created.
344 @returns: datetime instance or None, via Deferred
347 bldrid = yield bldr.getBuilderId()
348 completed = yield bldr.master.data.get(
349 ('builders', bldrid, 'buildrequests'),
351 resultspec.Filter('complete', 'eq', [True]),
352 resultspec.Filter('results', 'ne', [results.SKIPPED]),
354 order=['-complete_at'], limit=1)
358 complete_at = completed[0]['complete_at']
360 last_build = yield bldr.master.data.get(
363 resultspec.Filter('builderid', 'eq', [bldrid]),
365 order=['-started_at'], limit=1)
367 if last_build and last_build[0]:
368 last_complete_at = last_build[0]['complete_at']
369 if last_complete_at and (last_complete_at > complete_at):
370 return last_complete_at
374 @defer.inlineCallbacks
375 def prioritizeBuilders(master, builders):
376 """Returns sorted list of builders by their last timestamp of completed and
379 @returns: list of sorted builders
382 def is_building(bldr):
383 return bool(bldr.building) or bool(bldr.old_building)
386 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
387 d.addCallback(lambda complete_at: (complete_at, bldr))
391 (complete_at, bldr) = item
395 complete_at = date.replace(tzinfo=tzutc())
397 if is_building(bldr):
399 complete_at = date.replace(tzinfo=tzutc())
401 return (complete_at, bldr.name)
403 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
404 results.sort(key=bldr_sort)
407 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
409 return [r[1] for r in results]
411 c['prioritizeBuilders'] = prioritizeBuilders
414 dlLock = locks.WorkerLock("worker_dl")
418 for worker in c['workers']:
419 workerNames.append(worker.workername)
421 force_factory = BuildFactory()
423 c['builders'].append(BuilderConfig(
424 name = "00_force_build",
425 workernames = workerNames,
426 factory = force_factory))
429 ts = arch[1].split('/')
431 factory = BuildFactory()
433 # setup shared work directory if required
434 factory.addStep(ShellCommand(
436 description = "Setting up shared work directory",
437 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
439 haltOnFailure = True,
440 doStepIf = IsSharedWorkdir))
442 # find number of cores
443 factory.addStep(SetProperty(
446 description = "Finding number of CPUs",
447 command = ["nproc"]))
450 factory.addStep(FileDownload(
451 mastersrc = scripts_dir + '/cleanup.sh',
452 workerdest = "../cleanup.sh",
456 factory.addStep(ShellCommand(
458 description = "Cleaning previous builds",
459 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
461 haltOnFailure = True,
464 factory.addStep(ShellCommand(
466 description = "Cleaning work area",
467 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
469 haltOnFailure = True,
472 # expire tree if needed
473 elif tree_expire > 0:
474 factory.addStep(FileDownload(
475 mastersrc = scripts_dir + '/expire.sh',
476 workerdest = "../expire.sh",
479 factory.addStep(ShellCommand(
481 description = "Checking for build tree expiry",
482 command = ["./expire.sh", str(tree_expire)],
484 haltOnFailure = True,
487 factory.addStep(ShellCommand(
489 description = "Preparing SDK directory",
490 command = ["mkdir", "-p", "sdk"],
491 haltOnFailure = True))
493 factory.addStep(ShellCommand(
494 name = "downloadsdk",
495 description = "Downloading SDK archive",
496 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
497 env={'RSYNC_PASSWORD': rsync_sdk_key},
498 haltOnFailure = True,
501 factory.addStep(ShellCommand(
503 description = "Unpacking SDK archive",
504 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
505 haltOnFailure = True))
507 factory.addStep(ShellCommand(
509 description = "Updating SDK",
510 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
511 haltOnFailure = True))
513 factory.addStep(ShellCommand(
514 name = "cleancmdlinks",
515 description = "Sanitizing host command symlinks",
516 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
517 haltOnFailure = True))
519 factory.addStep(StringDownload(
520 name = "writeversionmk",
521 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
522 workerdest = "sdk/getversion.mk",
525 factory.addStep(SetProperty(
527 property = "release_version",
528 description = "Finding SDK release version",
529 workdir = "build/sdk",
530 command = ["make", "-f", "getversion.mk"]))
533 if usign_key is not None:
534 factory.addStep(StringDownload(
535 name = "dlkeybuildpub",
536 s = UsignSec2Pub(usign_key, usign_comment),
537 workerdest = "sdk/key-build.pub",
540 factory.addStep(StringDownload(
542 s = "# fake private key",
543 workerdest = "sdk/key-build",
546 factory.addStep(StringDownload(
547 name = "dlkeybuilducert",
548 s = "# fake certificate",
549 workerdest = "sdk/key-build.ucert",
552 factory.addStep(ShellCommand(
554 description = "Preparing download directory",
555 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
556 haltOnFailure = True))
558 factory.addStep(ShellCommand(
560 description = "Preparing SDK configuration",
561 workdir = "build/sdk",
562 command = ["sh", "-c", "rm -f .config && make defconfig"]))
564 factory.addStep(FileDownload(
565 mastersrc = scripts_dir + '/ccache.sh',
566 workerdest = 'sdk/ccache.sh',
569 factory.addStep(ShellCommand(
571 description = "Preparing ccache",
572 workdir = "build/sdk",
573 command = ["./ccache.sh"],
574 haltOnFailure = True))
576 factory.addStep(ShellCommand(
577 name = "patchfeedsconfgitfull",
578 description = "Patching feeds.conf to use src-git-full",
579 workdir = "build/sdk",
580 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
581 haltOnFailure = True))
584 factory.addStep(StringDownload(
585 name = "dlgitclonekey",
587 workerdest = "../git-clone.key",
590 factory.addStep(ShellCommand(
591 name = "patchfeedsconf",
592 description = "Patching feeds.conf to use SSH cloning",
593 workdir = "build/sdk",
594 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
595 haltOnFailure = True))
597 factory.addStep(ShellCommand(
598 name = "updatefeeds",
599 description = "Updating feeds",
600 workdir = "build/sdk",
601 command = ["./scripts/feeds", "update", "-f"],
602 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
603 haltOnFailure = True))
606 factory.addStep(ShellCommand(
607 name = "rmfeedsconf",
608 description = "Removing feeds.conf",
609 workdir = "build/sdk",
610 command=["rm", "feeds.conf"],
611 haltOnFailure = True))
613 factory.addStep(ShellCommand(
614 name = "installfeeds",
615 description = "Installing feeds",
616 workdir = "build/sdk",
617 command = ["./scripts/feeds", "install", "-a"],
618 haltOnFailure = True))
620 factory.addStep(ShellCommand(
622 description = "Clearing failure logs",
623 workdir = "build/sdk",
624 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
625 haltOnFailure = False,
626 flunkOnFailure = False,
627 warnOnFailure = True,
630 factory.addStep(ShellCommand(
632 description = "Building packages",
633 workdir = "build/sdk",
635 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
636 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
637 haltOnFailure = True))
639 factory.addStep(ShellCommand(
640 name = "mkfeedsconf",
641 description = "Generating pinned feeds.conf",
642 workdir = "build/sdk",
643 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
645 if ini.has_option("gpg", "key") or usign_key is not None:
646 factory.addStep(MasterShellCommand(
647 name = "signprepare",
648 description = "Preparing temporary signing directory",
649 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
653 factory.addStep(ShellCommand(
655 description = "Packing files to sign",
656 workdir = "build/sdk",
657 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
661 factory.addStep(FileUpload(
662 workersrc = "sdk/sign.tar.gz",
663 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
667 factory.addStep(MasterShellCommand(
669 description = "Signing files",
670 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
671 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
675 factory.addStep(FileDownload(
676 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
677 workerdest = "sdk/sign.tar.gz",
681 factory.addStep(ShellCommand(
683 description = "Unpacking signed files",
684 workdir = "build/sdk",
685 command = ["tar", "-xzf", "sign.tar.gz"],
689 factory.addStep(ShellCommand(
690 name = "uploadprepare",
691 description = "Preparing package directory",
692 workdir = "build/sdk",
693 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
694 env={'RSYNC_PASSWORD': rsync_bin_key},
695 haltOnFailure = True,
699 factory.addStep(ShellCommand(
700 name = "packageupload",
701 description = "Uploading package files",
702 workdir = "build/sdk",
703 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
704 env={'RSYNC_PASSWORD': rsync_bin_key},
705 haltOnFailure = True,
709 factory.addStep(ShellCommand(
711 description = "Preparing log directory",
712 workdir = "build/sdk",
713 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
714 env={'RSYNC_PASSWORD': rsync_bin_key},
715 haltOnFailure = True,
719 factory.addStep(ShellCommand(
721 description = "Finding failure logs",
722 workdir = "build/sdk/logs/package/feeds",
723 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
724 haltOnFailure = False,
725 flunkOnFailure = False,
726 warnOnFailure = True,
729 factory.addStep(ShellCommand(
731 description = "Collecting failure logs",
732 workdir = "build/sdk",
733 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
734 haltOnFailure = False,
735 flunkOnFailure = False,
736 warnOnFailure = True,
739 factory.addStep(ShellCommand(
741 description = "Uploading failure logs",
742 workdir = "build/sdk",
743 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
744 env={'RSYNC_PASSWORD': rsync_bin_key},
745 haltOnFailure = False,
746 flunkOnFailure = False,
747 warnOnFailure = True,
751 if rsync_src_url is not None:
752 factory.addStep(ShellCommand(
754 description = "Finding source archives to upload",
755 workdir = "build/sdk",
756 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
760 factory.addStep(ShellCommand(
761 name = "sourceupload",
762 description = "Uploading source archives",
763 workdir = "build/sdk",
764 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
765 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
766 env={'RSYNC_PASSWORD': rsync_src_key},
767 haltOnFailure = False,
768 flunkOnFailure = False,
769 warnOnFailure = True,
773 factory.addStep(ShellCommand(
775 description = "Reporting disk usage",
776 command=["df", "-h", "."],
778 haltOnFailure = False,
779 flunkOnFailure = False,
780 warnOnFailure = False,
784 factory.addStep(ShellCommand(
786 description = "Reporting estimated file space usage",
787 command=["du", "-sh", "."],
789 haltOnFailure = False,
790 flunkOnFailure = False,
791 warnOnFailure = False,
795 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
797 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
798 force_factory.addStep(steps.Trigger(
799 name = "trigger_%s" % arch[0],
800 description = "Triggering %s build" % arch[0],
801 schedulerNames = [ "trigger_%s" % arch[0] ],
802 set_properties = { "reason": Property("reason") },
803 doStepIf = IsArchitectureSelected(arch[0])
806 ####### STATUS arches
808 # 'status' is a list of Status arches. The results of each build will be
809 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
810 # including web pages, email senders, and IRC bots.
812 if ini.has_option("phase2", "status_bind"):
814 'port': ini.get("phase2", "status_bind"),
816 'waterfall_view': True,
817 'console_view': True,
822 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
823 c['www']['auth'] = util.UserPasswordAuth([
824 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
826 c['www']['authz'] = util.Authz(
827 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
828 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
831 ####### PROJECT IDENTITY
833 # the 'title' string will appear at the top of this buildbot
834 # installation's html.WebStatus home page (linked to the
835 # 'titleURL') and is embedded in the title of the waterfall HTML page.
837 c['title'] = ini.get("general", "title")
838 c['titleURL'] = ini.get("general", "title_url")
840 # the 'buildbotURL' string should point to the location where the buildbot's
841 # internal web server (usually the html.WebStatus page) is visible. This
842 # typically uses the port number set in the Waterfall 'status' entry, but
843 # with an externally-visible host name which the buildbot cannot figure out
846 c['buildbotURL'] = buildbot_url
851 # This specifies what database buildbot uses to store its state. You can leave
852 # this at its default for all but the largest installations.
853 'db_url' : "sqlite:///state.sqlite",
856 c['buildbotNetUsageData'] = None