2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
114 c['protocols'] = {'pb': {'port': worker_port}}
117 c['collapseRequests'] = True
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
125 ####### CHANGESOURCES
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
153 repo_url = ini.get("repo", "url")
154 repo_branch = "master"
156 if ini.has_option("repo", "branch"):
157 repo_branch = ini.get("repo", "branch")
160 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
162 if ini.has_option("usign", "key"):
163 usign_key = ini.get("usign", "key")
165 if ini.has_option("usign", "comment"):
166 usign_comment = ini.get("usign", "comment")
173 if not os.path.isdir(work_dir+'/source.git'):
174 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
176 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
178 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
179 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
180 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
183 line = findarches.stdout.readline()
186 at = line.decode().strip().split()
188 archnames.append(at[0])
193 feedbranches = dict()
195 c['change_source'] = []
197 def parse_feed_entry(line):
198 parts = line.strip().split()
199 if parts[0].startswith("src-git"):
201 url = parts[2].strip().split(';')
202 branch = url[1] if len(url) > 1 else 'master'
203 feedbranches[url[0]] = branch
204 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
206 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
207 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
209 line = make.stdout.readline()
211 parse_feed_entry(str(line, 'utf-8'))
213 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
215 parse_feed_entry(line)
217 if len(c['change_source']) == 0:
218 log.err("FATAL ERROR: no change_sources defined, aborting!")
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
227 c['schedulers'].append(SingleBranchScheduler(
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
232 treeStableTimer = 60,
233 builderNames = archnames))
235 c['schedulers'].append(ForceScheduler(
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
242 util.CodebaseParameter(
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
252 reason = util.StringParameter(
255 default = "Trigger build",
261 util.NestedParameter(
263 label="Build Options",
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
270 choices = [ "all" ] + archnames
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
284 def GetDirectorySuffix(props):
285 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
286 if props.hasProperty("release_version"):
287 m = verpat.match(props["release_version"])
289 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
293 def GetNumJobs(props):
294 if props.hasProperty("workername") and props.hasProperty("nproc"):
295 return str(int(props["nproc"]) / max_builds[props["workername"]])
301 if props.hasProperty("builddir"):
302 return props["builddir"]
303 elif props.hasProperty("workdir"):
304 return props["workdir"]
308 def IsArchitectureSelected(target):
309 def CheckArchitectureProperty(step):
311 options = step.getProperty("options")
312 if type(options) is dict:
313 selected_arch = options.get("architecture", "all")
314 if selected_arch != "all" and selected_arch != target:
321 return CheckArchitectureProperty
323 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
325 seckey = base64.b64decode(seckey)
329 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
330 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
332 def IsSharedWorkdir(step):
333 return bool(step.getProperty("shared_wd"))
335 @defer.inlineCallbacks
336 def getNewestCompleteTime(bldr):
337 """Returns the complete_at of the latest completed and not SKIPPED
338 build request for this builder, or None if there are no such build
339 requests. We need to filter out SKIPPED requests because we're
340 using collapseRequests=True which is unfortunately marking all
341 previous requests as complete when new buildset is created.
343 @returns: datetime instance or None, via Deferred
346 bldrid = yield bldr.getBuilderId()
347 completed = yield bldr.master.data.get(
348 ('builders', bldrid, 'buildrequests'),
350 resultspec.Filter('complete', 'eq', [True]),
351 resultspec.Filter('results', 'ne', [results.SKIPPED]),
353 order=['-complete_at'], limit=1)
357 complete_at = completed[0]['complete_at']
359 last_build = yield bldr.master.data.get(
362 resultspec.Filter('builderid', 'eq', [bldrid]),
364 order=['-started_at'], limit=1)
366 if last_build and last_build[0]:
367 last_complete_at = last_build[0]['complete_at']
368 if last_complete_at and (last_complete_at > complete_at):
369 return last_complete_at
373 @defer.inlineCallbacks
374 def prioritizeBuilders(master, builders):
375 """Returns sorted list of builders by their last timestamp of completed and
378 @returns: list of sorted builders
381 def is_building(bldr):
382 return bool(bldr.building) or bool(bldr.old_building)
385 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
386 d.addCallback(lambda complete_at: (complete_at, bldr))
390 (complete_at, bldr) = item
394 complete_at = date.replace(tzinfo=tzutc())
396 if is_building(bldr):
398 complete_at = date.replace(tzinfo=tzutc())
400 return (complete_at, bldr.name)
402 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
403 results.sort(key=bldr_sort)
406 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
408 return [r[1] for r in results]
410 c['prioritizeBuilders'] = prioritizeBuilders
413 dlLock = locks.WorkerLock("worker_dl")
417 for worker in c['workers']:
418 workerNames.append(worker.workername)
420 force_factory = BuildFactory()
422 c['builders'].append(BuilderConfig(
423 name = "00_force_build",
424 workernames = workerNames,
425 factory = force_factory))
428 ts = arch[1].split('/')
430 factory = BuildFactory()
432 # setup shared work directory if required
433 factory.addStep(ShellCommand(
435 description = "Setting up shared work directory",
436 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
438 haltOnFailure = True,
439 doStepIf = IsSharedWorkdir))
441 # find number of cores
442 factory.addStep(SetPropertyFromCommand(
445 description = "Finding number of CPUs",
446 command = ["nproc"]))
449 factory.addStep(FileDownload(
450 mastersrc = scripts_dir + '/cleanup.sh',
451 workerdest = "../cleanup.sh",
455 factory.addStep(ShellCommand(
457 description = "Cleaning previous builds",
458 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
460 haltOnFailure = True,
463 factory.addStep(ShellCommand(
465 description = "Cleaning work area",
466 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
468 haltOnFailure = True,
471 # expire tree if needed
472 elif tree_expire > 0:
473 factory.addStep(FileDownload(
474 mastersrc = scripts_dir + '/expire.sh',
475 workerdest = "../expire.sh",
478 factory.addStep(ShellCommand(
480 description = "Checking for build tree expiry",
481 command = ["./expire.sh", str(tree_expire)],
483 haltOnFailure = True,
486 factory.addStep(ShellCommand(
488 description = "Preparing SDK directory",
489 command = ["mkdir", "-p", "sdk"],
490 haltOnFailure = True))
492 factory.addStep(ShellCommand(
493 name = "downloadsdk",
494 description = "Downloading SDK archive",
495 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
496 env={'RSYNC_PASSWORD': rsync_sdk_key},
497 haltOnFailure = True,
500 factory.addStep(ShellCommand(
502 description = "Unpacking SDK archive",
503 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
504 haltOnFailure = True))
506 factory.addStep(ShellCommand(
508 description = "Updating SDK",
509 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
510 haltOnFailure = True))
512 factory.addStep(ShellCommand(
513 name = "cleancmdlinks",
514 description = "Sanitizing host command symlinks",
515 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
516 haltOnFailure = True))
518 factory.addStep(StringDownload(
519 name = "writeversionmk",
520 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
521 workerdest = "sdk/getversion.mk",
524 factory.addStep(SetPropertyFromCommand(
526 property = "release_version",
527 description = "Finding SDK release version",
528 workdir = "build/sdk",
529 command = ["make", "-f", "getversion.mk"]))
532 if usign_key is not None:
533 factory.addStep(StringDownload(
534 name = "dlkeybuildpub",
535 s = UsignSec2Pub(usign_key, usign_comment),
536 workerdest = "sdk/key-build.pub",
539 factory.addStep(StringDownload(
541 s = "# fake private key",
542 workerdest = "sdk/key-build",
545 factory.addStep(StringDownload(
546 name = "dlkeybuilducert",
547 s = "# fake certificate",
548 workerdest = "sdk/key-build.ucert",
551 factory.addStep(ShellCommand(
553 description = "Preparing download directory",
554 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
555 haltOnFailure = True))
557 factory.addStep(ShellCommand(
559 description = "Preparing SDK configuration",
560 workdir = "build/sdk",
561 command = ["sh", "-c", "rm -f .config && make defconfig"]))
563 factory.addStep(FileDownload(
564 mastersrc = scripts_dir + '/ccache.sh',
565 workerdest = 'sdk/ccache.sh',
568 factory.addStep(ShellCommand(
570 description = "Preparing ccache",
571 workdir = "build/sdk",
572 command = ["./ccache.sh"],
573 haltOnFailure = True))
576 factory.addStep(StringDownload(
577 name = "dlgitclonekey",
579 workerdest = "../git-clone.key",
582 factory.addStep(ShellCommand(
583 name = "patchfeedsconf",
584 description = "Patching feeds.conf",
585 workdir = "build/sdk",
586 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
587 haltOnFailure = True))
589 factory.addStep(ShellCommand(
590 name = "updatefeeds",
591 description = "Updating feeds",
592 workdir = "build/sdk",
593 command = ["./scripts/feeds", "update", "-f"],
594 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
595 haltOnFailure = True))
598 factory.addStep(ShellCommand(
599 name = "rmfeedsconf",
600 description = "Removing feeds.conf",
601 workdir = "build/sdk",
602 command=["rm", "feeds.conf"],
603 haltOnFailure = True))
605 factory.addStep(ShellCommand(
606 name = "installfeeds",
607 description = "Installing feeds",
608 workdir = "build/sdk",
609 command = ["./scripts/feeds", "install", "-a"],
610 haltOnFailure = True))
612 factory.addStep(ShellCommand(
614 description = "Clearing failure logs",
615 workdir = "build/sdk",
616 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
617 haltOnFailure = False,
618 flunkOnFailure = False,
619 warnOnFailure = True,
622 factory.addStep(ShellCommand(
624 description = "Building packages",
625 workdir = "build/sdk",
627 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
628 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
629 haltOnFailure = True))
631 factory.addStep(ShellCommand(
632 name = "mkfeedsconf",
633 description = "Generating pinned feeds.conf",
634 workdir = "build/sdk",
635 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
637 if ini.has_option("gpg", "key") or usign_key is not None:
638 factory.addStep(MasterShellCommand(
639 name = "signprepare",
640 description = "Preparing temporary signing directory",
641 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
645 factory.addStep(ShellCommand(
647 description = "Packing files to sign",
648 workdir = "build/sdk",
649 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
653 factory.addStep(FileUpload(
654 workersrc = "sdk/sign.tar.gz",
655 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
659 factory.addStep(MasterShellCommand(
661 description = "Signing files",
662 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
663 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
667 factory.addStep(FileDownload(
668 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
669 workerdest = "sdk/sign.tar.gz",
673 factory.addStep(ShellCommand(
675 description = "Unpacking signed files",
676 workdir = "build/sdk",
677 command = ["tar", "-xzf", "sign.tar.gz"],
681 factory.addStep(ShellCommand(
682 name = "uploadprepare",
683 description = "Preparing package directory",
684 workdir = "build/sdk",
685 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
686 env={'RSYNC_PASSWORD': rsync_bin_key},
687 haltOnFailure = True,
691 factory.addStep(ShellCommand(
692 name = "packageupload",
693 description = "Uploading package files",
694 workdir = "build/sdk",
695 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
696 env={'RSYNC_PASSWORD': rsync_bin_key},
697 haltOnFailure = True,
701 factory.addStep(ShellCommand(
703 description = "Preparing log directory",
704 workdir = "build/sdk",
705 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
706 env={'RSYNC_PASSWORD': rsync_bin_key},
707 haltOnFailure = True,
711 factory.addStep(ShellCommand(
713 description = "Finding failure logs",
714 workdir = "build/sdk/logs/package/feeds",
715 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
716 haltOnFailure = False,
717 flunkOnFailure = False,
718 warnOnFailure = True,
721 factory.addStep(ShellCommand(
723 description = "Collecting failure logs",
724 workdir = "build/sdk",
725 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
726 haltOnFailure = False,
727 flunkOnFailure = False,
728 warnOnFailure = True,
731 factory.addStep(ShellCommand(
733 description = "Uploading failure logs",
734 workdir = "build/sdk",
735 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
736 env={'RSYNC_PASSWORD': rsync_bin_key},
737 haltOnFailure = False,
738 flunkOnFailure = False,
739 warnOnFailure = True,
743 if rsync_src_url is not None:
744 factory.addStep(ShellCommand(
746 description = "Finding source archives to upload",
747 workdir = "build/sdk",
748 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
752 factory.addStep(ShellCommand(
753 name = "sourceupload",
754 description = "Uploading source archives",
755 workdir = "build/sdk",
756 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
757 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
758 env={'RSYNC_PASSWORD': rsync_src_key},
759 haltOnFailure = False,
760 flunkOnFailure = False,
761 warnOnFailure = True,
765 factory.addStep(ShellCommand(
767 description = "Reporting disk usage",
768 command=["df", "-h", "."],
770 haltOnFailure = False,
771 flunkOnFailure = False,
772 warnOnFailure = False,
776 factory.addStep(ShellCommand(
778 description = "Reporting estimated file space usage",
779 command=["du", "-sh", "."],
781 haltOnFailure = False,
782 flunkOnFailure = False,
783 warnOnFailure = False,
787 factory.addStep(ShellCommand(
789 description = "Reporting ccache stats",
790 command=["ccache", "-s"],
792 haltOnFailure = False,
793 flunkOnFailure = False,
794 warnOnFailure = False,
798 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
800 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
801 force_factory.addStep(steps.Trigger(
802 name = "trigger_%s" % arch[0],
803 description = "Triggering %s build" % arch[0],
804 schedulerNames = [ "trigger_%s" % arch[0] ],
805 set_properties = { "reason": Property("reason") },
806 doStepIf = IsArchitectureSelected(arch[0])
809 ####### STATUS arches
811 # 'status' is a list of Status arches. The results of each build will be
812 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
813 # including web pages, email senders, and IRC bots.
815 if ini.has_option("phase2", "status_bind"):
817 'port': ini.get("phase2", "status_bind"),
819 'waterfall_view': True,
820 'console_view': True,
825 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
826 c['www']['auth'] = util.UserPasswordAuth([
827 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
829 c['www']['authz'] = util.Authz(
830 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
831 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
834 ####### PROJECT IDENTITY
836 # the 'title' string will appear at the top of this buildbot
837 # installation's html.WebStatus home page (linked to the
838 # 'titleURL') and is embedded in the title of the waterfall HTML page.
840 c['title'] = ini.get("general", "title")
841 c['titleURL'] = ini.get("general", "title_url")
843 # the 'buildbotURL' string should point to the location where the buildbot's
844 # internal web server (usually the html.WebStatus page) is visible. This
845 # typically uses the port number set in the Waterfall 'status' entry, but
846 # with an externally-visible host name which the buildbot cannot figure out
849 c['buildbotURL'] = buildbot_url
854 # This specifies what database buildbot uses to store its state. You can leave
855 # this at its default for all but the largest installations.
856 'db_url' : "sqlite:///state.sqlite",
859 c['buildbotNetUsageData'] = None