2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
66 if ini.has_option("phase2", "port"):
67 worker_port = ini.get("phase2", "port")
69 if ini.has_option("phase2", "persistent"):
70 persistent = ini.getboolean("phase2", "persistent")
74 for section in ini.sections():
75 if section.startswith("worker "):
76 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
77 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
78 name = ini.get(section, "name")
79 password = ini.get(section, "password")
80 sl_props = { 'shared_wd': True }
82 if ini.has_option(section, "shared_wd"):
83 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
85 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
87 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
88 # This must match the value configured into the buildworkers (with their
90 c['protocols'] = {'pb': {'port': worker_port}}
93 c['collapseRequests'] = True
95 # Reduce amount of backlog data
96 c['configurators'] = [util.JanitorConfigurator(
97 logHorizon=timedelta(days=3),
101 ####### CHANGESOURCES
103 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
104 scripts_dir = os.path.abspath("../scripts")
106 rsync_bin_url = ini.get("rsync", "binary_url")
107 rsync_bin_key = ini.get("rsync", "binary_password")
112 if ini.has_option("rsync", "source_url"):
113 rsync_src_url = ini.get("rsync", "source_url")
114 rsync_src_key = ini.get("rsync", "source_password")
118 rsync_sdk_pat = "openwrt-sdk-*.tar.*"
120 if ini.has_option("rsync", "sdk_url"):
121 rsync_sdk_url = ini.get("rsync", "sdk_url")
123 if ini.has_option("rsync", "sdk_password"):
124 rsync_sdk_key = ini.get("rsync", "sdk_password")
126 if ini.has_option("rsync", "sdk_pattern"):
127 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
129 rsync_defopts = ["-4", "-v", "--timeout=120"]
131 repo_url = ini.get("repo", "url")
132 repo_branch = "master"
134 if ini.has_option("repo", "branch"):
135 repo_branch = ini.get("repo", "branch")
138 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
140 if ini.has_option("usign", "key"):
141 usign_key = ini.get("usign", "key")
143 if ini.has_option("usign", "comment"):
144 usign_comment = ini.get("usign", "comment")
151 if not os.path.isdir(work_dir+'/source.git'):
152 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
154 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
156 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
157 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
158 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
161 line = findarches.stdout.readline()
164 at = line.decode().strip().split()
166 archnames.append(at[0])
171 feedbranches = dict()
173 c['change_source'] = []
175 def parse_feed_entry(line):
176 parts = line.strip().split()
177 if parts[0].startswith("src-git"):
179 url = parts[2].strip().split(';')
180 branch = url[1] if len(url) > 1 else 'master'
181 feedbranches[url[0]] = branch
182 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
184 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
185 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
187 line = make.stdout.readline()
189 parse_feed_entry(str(line, 'utf-8'))
191 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
193 parse_feed_entry(line)
195 if len(c['change_source']) == 0:
196 log.err("FATAL ERROR: no change_sources defined, aborting!")
201 # Configure the Schedulers, which decide how to react to incoming changes. In this
202 # case, just kick off a 'basebuild' build
205 c['schedulers'].append(SingleBranchScheduler(
207 change_filter = filter.ChangeFilter(
208 filter_fn = lambda change: change.branch == feedbranches[change.repository]
210 treeStableTimer = 60,
211 builderNames = archnames))
213 c['schedulers'].append(ForceScheduler(
215 buttonName = "Force builds",
216 label = "Force build details",
217 builderNames = [ "00_force_build" ],
220 util.CodebaseParameter(
222 label = "Repository",
223 branch = util.FixedParameter(name = "branch", default = ""),
224 revision = util.FixedParameter(name = "revision", default = ""),
225 repository = util.FixedParameter(name = "repository", default = ""),
226 project = util.FixedParameter(name = "project", default = "")
230 reason = util.StringParameter(
233 default = "Trigger build",
239 util.NestedParameter(
241 label="Build Options",
244 util.ChoiceStringParameter(
245 name = "architecture",
246 label = "Build architecture",
248 choices = [ "all" ] + archnames
257 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
258 # what steps, and which workers can execute them. Note that any particular build will
259 # only take place on one worker.
262 def GetDirectorySuffix(props):
263 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
264 if props.hasProperty("release_version"):
265 m = verpat.match(props["release_version"])
267 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
272 if props.hasProperty("builddir"):
273 return props["builddir"]
274 elif props.hasProperty("workdir"):
275 return props["workdir"]
279 def IsArchitectureSelected(target):
280 def CheckArchitectureProperty(step):
282 options = step.getProperty("options")
283 if isinstance(options, dict):
284 selected_arch = options.get("architecture", "all")
285 if selected_arch != "all" and selected_arch != target:
292 return CheckArchitectureProperty
294 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
296 seckey = base64.b64decode(seckey)
300 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
301 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
303 def IsSharedWorkdir(step):
304 return bool(step.getProperty("shared_wd"))
306 @defer.inlineCallbacks
307 def getNewestCompleteTime(bldr):
308 """Returns the complete_at of the latest completed and not SKIPPED
309 build request for this builder, or None if there are no such build
310 requests. We need to filter out SKIPPED requests because we're
311 using collapseRequests=True which is unfortunately marking all
312 previous requests as complete when new buildset is created.
314 @returns: datetime instance or None, via Deferred
317 bldrid = yield bldr.getBuilderId()
318 completed = yield bldr.master.data.get(
319 ('builders', bldrid, 'buildrequests'),
321 resultspec.Filter('complete', 'eq', [True]),
322 resultspec.Filter('results', 'ne', [results.SKIPPED]),
324 order=['-complete_at'], limit=1)
328 complete_at = completed[0]['complete_at']
330 last_build = yield bldr.master.data.get(
333 resultspec.Filter('builderid', 'eq', [bldrid]),
335 order=['-started_at'], limit=1)
337 if last_build and last_build[0]:
338 last_complete_at = last_build[0]['complete_at']
339 if last_complete_at and (last_complete_at > complete_at):
340 return last_complete_at
344 @defer.inlineCallbacks
345 def prioritizeBuilders(master, builders):
346 """Returns sorted list of builders by their last timestamp of completed and
349 @returns: list of sorted builders
352 def is_building(bldr):
353 return bool(bldr.building) or bool(bldr.old_building)
356 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
357 d.addCallback(lambda complete_at: (complete_at, bldr))
361 (complete_at, bldr) = item
365 complete_at = date.replace(tzinfo=tzutc())
367 if is_building(bldr):
369 complete_at = date.replace(tzinfo=tzutc())
371 return (complete_at, bldr.name)
373 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
374 results.sort(key=bldr_sort)
377 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
379 return [r[1] for r in results]
381 c['prioritizeBuilders'] = prioritizeBuilders
384 dlLock = locks.WorkerLock("worker_dl")
388 for worker in c['workers']:
389 workerNames.append(worker.workername)
391 force_factory = BuildFactory()
393 c['builders'].append(BuilderConfig(
394 name = "00_force_build",
395 workernames = workerNames,
396 factory = force_factory))
399 ts = arch[1].split('/')
401 factory = BuildFactory()
403 # setup shared work directory if required
404 factory.addStep(ShellCommand(
406 description = "Setting up shared work directory",
407 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
409 haltOnFailure = True,
410 doStepIf = IsSharedWorkdir))
412 # find number of cores
413 factory.addStep(SetPropertyFromCommand(
416 description = "Finding number of CPUs",
417 command = ["nproc"]))
420 factory.addStep(FileDownload(
421 mastersrc = scripts_dir + '/cleanup.sh',
422 workerdest = "../cleanup.sh",
426 factory.addStep(ShellCommand(
428 description = "Cleaning previous builds",
429 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
431 haltOnFailure = True,
434 factory.addStep(ShellCommand(
436 description = "Cleaning work area",
437 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
439 haltOnFailure = True,
442 factory.addStep(ShellCommand(
444 description = "Preparing SDK directory",
445 command = ["mkdir", "-p", "sdk"],
446 haltOnFailure = True))
448 factory.addStep(ShellCommand(
449 name = "downloadsdk",
450 description = "Downloading SDK archive",
451 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
452 env={'RSYNC_PASSWORD': rsync_sdk_key},
453 haltOnFailure = True,
456 factory.addStep(ShellCommand(
458 description = "Unpacking SDK archive",
459 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
460 haltOnFailure = True))
462 factory.addStep(ShellCommand(
464 description = "Updating SDK",
465 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
466 haltOnFailure = True))
468 factory.addStep(ShellCommand(
469 name = "cleancmdlinks",
470 description = "Sanitizing host command symlinks",
471 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
472 haltOnFailure = True))
474 factory.addStep(StringDownload(
475 name = "writeversionmk",
476 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
477 workerdest = "sdk/getversion.mk",
480 factory.addStep(SetPropertyFromCommand(
482 property = "release_version",
483 description = "Finding SDK release version",
484 workdir = "build/sdk",
485 command = ["make", "-f", "getversion.mk"]))
488 if usign_key is not None:
489 factory.addStep(StringDownload(
490 name = "dlkeybuildpub",
491 s = UsignSec2Pub(usign_key, usign_comment),
492 workerdest = "sdk/key-build.pub",
495 factory.addStep(StringDownload(
497 s = "# fake private key",
498 workerdest = "sdk/key-build",
501 factory.addStep(StringDownload(
502 name = "dlkeybuilducert",
503 s = "# fake certificate",
504 workerdest = "sdk/key-build.ucert",
507 factory.addStep(ShellCommand(
509 description = "Preparing download directory",
510 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
511 haltOnFailure = True))
513 factory.addStep(ShellCommand(
515 description = "Preparing SDK configuration",
516 workdir = "build/sdk",
517 command = ["sh", "-c", "rm -f .config && make defconfig"]))
519 factory.addStep(FileDownload(
520 mastersrc = scripts_dir + '/ccache.sh',
521 workerdest = 'sdk/ccache.sh',
524 factory.addStep(ShellCommand(
526 description = "Preparing ccache",
527 workdir = "build/sdk",
528 command = ["./ccache.sh"],
529 haltOnFailure = True))
531 factory.addStep(ShellCommand(
532 name = "updatefeeds",
533 description = "Updating feeds",
534 workdir = "build/sdk",
535 command = ["./scripts/feeds", "update", "-f"],
536 haltOnFailure = True))
538 factory.addStep(ShellCommand(
539 name = "installfeeds",
540 description = "Installing feeds",
541 workdir = "build/sdk",
542 command = ["./scripts/feeds", "install", "-a"],
543 haltOnFailure = True))
545 factory.addStep(ShellCommand(
547 description = "Clearing failure logs",
548 workdir = "build/sdk",
549 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
550 haltOnFailure = False,
551 flunkOnFailure = False,
552 warnOnFailure = True,
555 factory.addStep(ShellCommand(
557 description = "Building packages",
558 workdir = "build/sdk",
560 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
561 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
562 haltOnFailure = True))
564 factory.addStep(ShellCommand(
565 name = "mkfeedsconf",
566 description = "Generating pinned feeds.conf",
567 workdir = "build/sdk",
568 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
570 factory.addStep(ShellCommand(
572 description = "Calculating checksums",
573 descriptionDone="Checksums calculated",
574 workdir = "build/sdk",
575 command = "cd bin/packages/%s; " %(arch[0])
576 + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | "
577 + "sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | "
578 + r"sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums",
582 if ini.has_option("gpg", "key") or usign_key is not None:
583 factory.addStep(MasterShellCommand(
584 name = "signprepare",
585 description = "Preparing temporary signing directory",
586 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
590 factory.addStep(ShellCommand(
592 description = "Packing files to sign",
593 workdir = "build/sdk",
594 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
598 factory.addStep(FileUpload(
599 workersrc = "sdk/sign.tar.gz",
600 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
604 factory.addStep(MasterShellCommand(
606 description = "Signing files",
607 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
608 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
612 factory.addStep(FileDownload(
613 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
614 workerdest = "sdk/sign.tar.gz",
618 factory.addStep(ShellCommand(
620 description = "Unpacking signed files",
621 workdir = "build/sdk",
622 command = ["tar", "-xzf", "sign.tar.gz"],
626 # download remote sha256sums to 'target-sha256sums'
627 factory.addStep(ShellCommand(
628 name = "target-sha256sums",
629 description = "Fetching remote sha256sums for arch",
630 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
631 env={'RSYNC_PASSWORD': rsync_bin_key},
633 haltOnFailure = False,
634 flunkOnFailure = False,
635 warnOnFailure = False,
638 factory.addStep(FileDownload(
640 mastersrc = scripts_dir + "/rsync.sh",
641 workerdest = "../rsync.sh",
645 factory.addStep(FileDownload(
646 name = "dlsha2rsyncpl",
647 mastersrc = scripts_dir + "/sha2rsync.pl",
648 workerdest = "../sha2rsync.pl",
652 factory.addStep(ShellCommand(
654 description = "Building list of files to upload",
655 workdir = "build/sdk",
656 command = ["../../sha2rsync.pl", "../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
657 haltOnFailure = True,
660 factory.addStep(ShellCommand(
661 name = "uploadprepare",
662 description = "Preparing package directory",
663 workdir = "build/sdk",
664 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
665 env={'RSYNC_PASSWORD': rsync_bin_key},
666 haltOnFailure = True,
670 factory.addStep(ShellCommand(
671 name = "packageupload",
672 description = "Uploading package files",
673 workdir = "build/sdk",
674 command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
675 env={'RSYNC_PASSWORD': rsync_bin_key},
676 haltOnFailure = True,
680 factory.addStep(ShellCommand(
681 name = "packageprune",
682 description = "Pruning package files",
683 workdir = "build/sdk",
684 command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
685 env={'RSYNC_PASSWORD': rsync_bin_key},
686 haltOnFailure = True,
690 factory.addStep(ShellCommand(
692 description = "Preparing log directory",
693 workdir = "build/sdk",
694 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
695 env={'RSYNC_PASSWORD': rsync_bin_key},
696 haltOnFailure = True,
700 factory.addStep(ShellCommand(
702 description = "Finding failure logs",
703 workdir = "build/sdk/logs/package/feeds",
704 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
705 haltOnFailure = False,
706 flunkOnFailure = False,
707 warnOnFailure = True,
710 factory.addStep(ShellCommand(
712 description = "Collecting failure logs",
713 workdir = "build/sdk",
714 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
715 haltOnFailure = False,
716 flunkOnFailure = False,
717 warnOnFailure = True,
720 factory.addStep(ShellCommand(
722 description = "Uploading failure logs",
723 workdir = "build/sdk",
724 command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
725 env={'RSYNC_PASSWORD': rsync_bin_key},
726 haltOnFailure = False,
727 flunkOnFailure = False,
728 warnOnFailure = True,
732 if rsync_src_url is not None:
733 factory.addStep(ShellCommand(
735 description = "Finding source archives to upload",
736 workdir = "build/sdk",
737 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
741 factory.addStep(ShellCommand(
742 name = "sourceupload",
743 description = "Uploading source archives",
744 workdir = "build/sdk",
745 command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
746 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
747 env={'RSYNC_PASSWORD': rsync_src_key},
748 haltOnFailure = False,
749 flunkOnFailure = False,
750 warnOnFailure = True,
754 factory.addStep(ShellCommand(
756 description = "Reporting disk usage",
757 command=["df", "-h", "."],
759 haltOnFailure = False,
760 flunkOnFailure = False,
761 warnOnFailure = False,
765 factory.addStep(ShellCommand(
767 description = "Reporting estimated file space usage",
768 command=["du", "-sh", "."],
770 haltOnFailure = False,
771 flunkOnFailure = False,
772 warnOnFailure = False,
776 factory.addStep(ShellCommand(
778 description = "Reporting ccache stats",
779 command=["ccache", "-s"],
781 haltOnFailure = False,
782 flunkOnFailure = False,
783 warnOnFailure = False,
787 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
789 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
790 force_factory.addStep(steps.Trigger(
791 name = "trigger_%s" % arch[0],
792 description = "Triggering %s build" % arch[0],
793 schedulerNames = [ "trigger_%s" % arch[0] ],
794 set_properties = { "reason": Property("reason") },
795 doStepIf = IsArchitectureSelected(arch[0])
798 ####### STATUS arches
800 # 'status' is a list of Status arches. The results of each build will be
801 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
802 # including web pages, email senders, and IRC bots.
804 if ini.has_option("phase2", "status_bind"):
806 'port': ini.get("phase2", "status_bind"),
808 'waterfall_view': True,
809 'console_view': True,
814 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
815 c['www']['auth'] = util.UserPasswordAuth([
816 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
818 c['www']['authz'] = util.Authz(
819 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
820 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
823 ####### PROJECT IDENTITY
825 # the 'title' string will appear at the top of this buildbot
826 # installation's html.WebStatus home page (linked to the
827 # 'titleURL') and is embedded in the title of the waterfall HTML page.
829 c['title'] = ini.get("general", "title")
830 c['titleURL'] = ini.get("general", "title_url")
832 # the 'buildbotURL' string should point to the location where the buildbot's
833 # internal web server (usually the html.WebStatus page) is visible. This
834 # typically uses the port number set in the Waterfall 'status' entry, but
835 # with an externally-visible host name which the buildbot cannot figure out
838 c['buildbotURL'] = buildbot_url
843 # This specifies what database buildbot uses to store its state. You can leave
844 # this at its default for all but the largest installations.
845 'db_url' : "sqlite:///state.sqlite",
848 c['buildbotNetUsageData'] = None