2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
114 c['protocols'] = {'pb': {'port': worker_port}}
117 c['collapseRequests'] = True
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
125 ####### CHANGESOURCES
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
153 rsync_defopts = ["-4", "-v", "--timeout=120"]
155 repo_url = ini.get("repo", "url")
156 repo_branch = "master"
158 if ini.has_option("repo", "branch"):
159 repo_branch = ini.get("repo", "branch")
162 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
164 if ini.has_option("usign", "key"):
165 usign_key = ini.get("usign", "key")
167 if ini.has_option("usign", "comment"):
168 usign_comment = ini.get("usign", "comment")
175 if not os.path.isdir(work_dir+'/source.git'):
176 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
178 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
180 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
181 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
182 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
185 line = findarches.stdout.readline()
188 at = line.decode().strip().split()
190 archnames.append(at[0])
195 feedbranches = dict()
197 c['change_source'] = []
199 def parse_feed_entry(line):
200 parts = line.strip().split()
201 if parts[0].startswith("src-git"):
203 url = parts[2].strip().split(';')
204 branch = url[1] if len(url) > 1 else 'master'
205 feedbranches[url[0]] = branch
206 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
208 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
209 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
211 line = make.stdout.readline()
213 parse_feed_entry(str(line, 'utf-8'))
215 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
217 parse_feed_entry(line)
219 if len(c['change_source']) == 0:
220 log.err("FATAL ERROR: no change_sources defined, aborting!")
225 # Configure the Schedulers, which decide how to react to incoming changes. In this
226 # case, just kick off a 'basebuild' build
229 c['schedulers'].append(SingleBranchScheduler(
231 change_filter = filter.ChangeFilter(
232 filter_fn = lambda change: change.branch == feedbranches[change.repository]
234 treeStableTimer = 60,
235 builderNames = archnames))
237 c['schedulers'].append(ForceScheduler(
239 buttonName = "Force builds",
240 label = "Force build details",
241 builderNames = [ "00_force_build" ],
244 util.CodebaseParameter(
246 label = "Repository",
247 branch = util.FixedParameter(name = "branch", default = ""),
248 revision = util.FixedParameter(name = "revision", default = ""),
249 repository = util.FixedParameter(name = "repository", default = ""),
250 project = util.FixedParameter(name = "project", default = "")
254 reason = util.StringParameter(
257 default = "Trigger build",
263 util.NestedParameter(
265 label="Build Options",
268 util.ChoiceStringParameter(
269 name = "architecture",
270 label = "Build architecture",
272 choices = [ "all" ] + archnames
281 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
282 # what steps, and which workers can execute them. Note that any particular build will
283 # only take place on one worker.
286 def GetDirectorySuffix(props):
287 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
288 if props.hasProperty("release_version"):
289 m = verpat.match(props["release_version"])
291 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
295 def GetNumJobs(props):
296 if props.hasProperty("workername") and props.hasProperty("nproc"):
297 return str(int(props["nproc"]) / max_builds[props["workername"]])
303 if props.hasProperty("builddir"):
304 return props["builddir"]
305 elif props.hasProperty("workdir"):
306 return props["workdir"]
310 def IsArchitectureSelected(target):
311 def CheckArchitectureProperty(step):
313 options = step.getProperty("options")
314 if type(options) is dict:
315 selected_arch = options.get("architecture", "all")
316 if selected_arch != "all" and selected_arch != target:
323 return CheckArchitectureProperty
325 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
327 seckey = base64.b64decode(seckey)
331 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
332 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
334 def IsSharedWorkdir(step):
335 return bool(step.getProperty("shared_wd"))
337 @defer.inlineCallbacks
338 def getNewestCompleteTime(bldr):
339 """Returns the complete_at of the latest completed and not SKIPPED
340 build request for this builder, or None if there are no such build
341 requests. We need to filter out SKIPPED requests because we're
342 using collapseRequests=True which is unfortunately marking all
343 previous requests as complete when new buildset is created.
345 @returns: datetime instance or None, via Deferred
348 bldrid = yield bldr.getBuilderId()
349 completed = yield bldr.master.data.get(
350 ('builders', bldrid, 'buildrequests'),
352 resultspec.Filter('complete', 'eq', [True]),
353 resultspec.Filter('results', 'ne', [results.SKIPPED]),
355 order=['-complete_at'], limit=1)
359 complete_at = completed[0]['complete_at']
361 last_build = yield bldr.master.data.get(
364 resultspec.Filter('builderid', 'eq', [bldrid]),
366 order=['-started_at'], limit=1)
368 if last_build and last_build[0]:
369 last_complete_at = last_build[0]['complete_at']
370 if last_complete_at and (last_complete_at > complete_at):
371 return last_complete_at
375 @defer.inlineCallbacks
376 def prioritizeBuilders(master, builders):
377 """Returns sorted list of builders by their last timestamp of completed and
380 @returns: list of sorted builders
383 def is_building(bldr):
384 return bool(bldr.building) or bool(bldr.old_building)
387 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
388 d.addCallback(lambda complete_at: (complete_at, bldr))
392 (complete_at, bldr) = item
396 complete_at = date.replace(tzinfo=tzutc())
398 if is_building(bldr):
400 complete_at = date.replace(tzinfo=tzutc())
402 return (complete_at, bldr.name)
404 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
405 results.sort(key=bldr_sort)
408 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
410 return [r[1] for r in results]
412 c['prioritizeBuilders'] = prioritizeBuilders
415 dlLock = locks.WorkerLock("worker_dl")
419 for worker in c['workers']:
420 workerNames.append(worker.workername)
422 force_factory = BuildFactory()
424 c['builders'].append(BuilderConfig(
425 name = "00_force_build",
426 workernames = workerNames,
427 factory = force_factory))
430 ts = arch[1].split('/')
432 factory = BuildFactory()
434 # setup shared work directory if required
435 factory.addStep(ShellCommand(
437 description = "Setting up shared work directory",
438 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
440 haltOnFailure = True,
441 doStepIf = IsSharedWorkdir))
443 # find number of cores
444 factory.addStep(SetPropertyFromCommand(
447 description = "Finding number of CPUs",
448 command = ["nproc"]))
451 factory.addStep(FileDownload(
452 mastersrc = scripts_dir + '/cleanup.sh',
453 workerdest = "../cleanup.sh",
457 factory.addStep(ShellCommand(
459 description = "Cleaning previous builds",
460 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
462 haltOnFailure = True,
465 factory.addStep(ShellCommand(
467 description = "Cleaning work area",
468 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
470 haltOnFailure = True,
473 # expire tree if needed
474 elif tree_expire > 0:
475 factory.addStep(FileDownload(
476 mastersrc = scripts_dir + '/expire.sh',
477 workerdest = "../expire.sh",
480 factory.addStep(ShellCommand(
482 description = "Checking for build tree expiry",
483 command = ["./expire.sh", str(tree_expire)],
485 haltOnFailure = True,
488 factory.addStep(ShellCommand(
490 description = "Preparing SDK directory",
491 command = ["mkdir", "-p", "sdk"],
492 haltOnFailure = True))
494 factory.addStep(ShellCommand(
495 name = "downloadsdk",
496 description = "Downloading SDK archive",
497 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
498 env={'RSYNC_PASSWORD': rsync_sdk_key},
499 haltOnFailure = True,
502 factory.addStep(ShellCommand(
504 description = "Unpacking SDK archive",
505 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
506 haltOnFailure = True))
508 factory.addStep(ShellCommand(
510 description = "Updating SDK",
511 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
512 haltOnFailure = True))
514 factory.addStep(ShellCommand(
515 name = "cleancmdlinks",
516 description = "Sanitizing host command symlinks",
517 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
518 haltOnFailure = True))
520 factory.addStep(StringDownload(
521 name = "writeversionmk",
522 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
523 workerdest = "sdk/getversion.mk",
526 factory.addStep(SetPropertyFromCommand(
528 property = "release_version",
529 description = "Finding SDK release version",
530 workdir = "build/sdk",
531 command = ["make", "-f", "getversion.mk"]))
534 if usign_key is not None:
535 factory.addStep(StringDownload(
536 name = "dlkeybuildpub",
537 s = UsignSec2Pub(usign_key, usign_comment),
538 workerdest = "sdk/key-build.pub",
541 factory.addStep(StringDownload(
543 s = "# fake private key",
544 workerdest = "sdk/key-build",
547 factory.addStep(StringDownload(
548 name = "dlkeybuilducert",
549 s = "# fake certificate",
550 workerdest = "sdk/key-build.ucert",
553 factory.addStep(ShellCommand(
555 description = "Preparing download directory",
556 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
557 haltOnFailure = True))
559 factory.addStep(ShellCommand(
561 description = "Preparing SDK configuration",
562 workdir = "build/sdk",
563 command = ["sh", "-c", "rm -f .config && make defconfig"]))
565 factory.addStep(FileDownload(
566 mastersrc = scripts_dir + '/ccache.sh',
567 workerdest = 'sdk/ccache.sh',
570 factory.addStep(ShellCommand(
572 description = "Preparing ccache",
573 workdir = "build/sdk",
574 command = ["./ccache.sh"],
575 haltOnFailure = True))
578 factory.addStep(StringDownload(
579 name = "dlgitclonekey",
581 workerdest = "../git-clone.key",
584 factory.addStep(ShellCommand(
585 name = "patchfeedsconf",
586 description = "Patching feeds.conf",
587 workdir = "build/sdk",
588 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
589 haltOnFailure = True))
591 factory.addStep(ShellCommand(
592 name = "updatefeeds",
593 description = "Updating feeds",
594 workdir = "build/sdk",
595 command = ["./scripts/feeds", "update", "-f"],
596 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
597 haltOnFailure = True))
600 factory.addStep(ShellCommand(
601 name = "rmfeedsconf",
602 description = "Removing feeds.conf",
603 workdir = "build/sdk",
604 command=["rm", "feeds.conf"],
605 haltOnFailure = True))
607 factory.addStep(ShellCommand(
608 name = "installfeeds",
609 description = "Installing feeds",
610 workdir = "build/sdk",
611 command = ["./scripts/feeds", "install", "-a"],
612 haltOnFailure = True))
614 factory.addStep(ShellCommand(
616 description = "Clearing failure logs",
617 workdir = "build/sdk",
618 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
619 haltOnFailure = False,
620 flunkOnFailure = False,
621 warnOnFailure = True,
624 factory.addStep(ShellCommand(
626 description = "Building packages",
627 workdir = "build/sdk",
629 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
630 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
631 haltOnFailure = True))
633 factory.addStep(ShellCommand(
634 name = "mkfeedsconf",
635 description = "Generating pinned feeds.conf",
636 workdir = "build/sdk",
637 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
639 factory.addStep(ShellCommand(
641 description = "Calculating checksums",
642 descriptionDone="Checksums calculated",
643 workdir = "build/sdk",
644 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
648 if ini.has_option("gpg", "key") or usign_key is not None:
649 factory.addStep(MasterShellCommand(
650 name = "signprepare",
651 description = "Preparing temporary signing directory",
652 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
656 factory.addStep(ShellCommand(
658 description = "Packing files to sign",
659 workdir = "build/sdk",
660 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
664 factory.addStep(FileUpload(
665 workersrc = "sdk/sign.tar.gz",
666 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
670 factory.addStep(MasterShellCommand(
672 description = "Signing files",
673 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
674 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
678 factory.addStep(FileDownload(
679 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
680 workerdest = "sdk/sign.tar.gz",
684 factory.addStep(ShellCommand(
686 description = "Unpacking signed files",
687 workdir = "build/sdk",
688 command = ["tar", "-xzf", "sign.tar.gz"],
692 factory.addStep(FileDownload(
694 mastersrc = scripts_dir + "/rsync.sh",
695 workerdest = "../rsync.sh",
699 factory.addStep(ShellCommand(
700 name = "uploadprepare",
701 description = "Preparing package directory",
702 workdir = "build/sdk",
703 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
704 env={'RSYNC_PASSWORD': rsync_bin_key},
705 haltOnFailure = True,
709 factory.addStep(ShellCommand(
710 name = "packageupload",
711 description = "Uploading package files",
712 workdir = "build/sdk",
713 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
714 env={'RSYNC_PASSWORD': rsync_bin_key},
715 haltOnFailure = True,
719 factory.addStep(ShellCommand(
721 description = "Preparing log directory",
722 workdir = "build/sdk",
723 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
724 env={'RSYNC_PASSWORD': rsync_bin_key},
725 haltOnFailure = True,
729 factory.addStep(ShellCommand(
731 description = "Finding failure logs",
732 workdir = "build/sdk/logs/package/feeds",
733 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
734 haltOnFailure = False,
735 flunkOnFailure = False,
736 warnOnFailure = True,
739 factory.addStep(ShellCommand(
741 description = "Collecting failure logs",
742 workdir = "build/sdk",
743 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
744 haltOnFailure = False,
745 flunkOnFailure = False,
746 warnOnFailure = True,
749 factory.addStep(ShellCommand(
751 description = "Uploading failure logs",
752 workdir = "build/sdk",
753 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
754 env={'RSYNC_PASSWORD': rsync_bin_key},
755 haltOnFailure = False,
756 flunkOnFailure = False,
757 warnOnFailure = True,
761 if rsync_src_url is not None:
762 factory.addStep(ShellCommand(
764 description = "Finding source archives to upload",
765 workdir = "build/sdk",
766 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
770 factory.addStep(ShellCommand(
771 name = "sourceupload",
772 description = "Uploading source archives",
773 workdir = "build/sdk",
774 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--checksum", "--delay-updates",
775 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
776 env={'RSYNC_PASSWORD': rsync_src_key},
777 haltOnFailure = False,
778 flunkOnFailure = False,
779 warnOnFailure = True,
783 factory.addStep(ShellCommand(
785 description = "Reporting disk usage",
786 command=["df", "-h", "."],
788 haltOnFailure = False,
789 flunkOnFailure = False,
790 warnOnFailure = False,
794 factory.addStep(ShellCommand(
796 description = "Reporting estimated file space usage",
797 command=["du", "-sh", "."],
799 haltOnFailure = False,
800 flunkOnFailure = False,
801 warnOnFailure = False,
805 factory.addStep(ShellCommand(
807 description = "Reporting ccache stats",
808 command=["ccache", "-s"],
810 haltOnFailure = False,
811 flunkOnFailure = False,
812 warnOnFailure = False,
816 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
818 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
819 force_factory.addStep(steps.Trigger(
820 name = "trigger_%s" % arch[0],
821 description = "Triggering %s build" % arch[0],
822 schedulerNames = [ "trigger_%s" % arch[0] ],
823 set_properties = { "reason": Property("reason") },
824 doStepIf = IsArchitectureSelected(arch[0])
827 ####### STATUS arches
829 # 'status' is a list of Status arches. The results of each build will be
830 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
831 # including web pages, email senders, and IRC bots.
833 if ini.has_option("phase2", "status_bind"):
835 'port': ini.get("phase2", "status_bind"),
837 'waterfall_view': True,
838 'console_view': True,
843 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
844 c['www']['auth'] = util.UserPasswordAuth([
845 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
847 c['www']['authz'] = util.Authz(
848 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
849 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
852 ####### PROJECT IDENTITY
854 # the 'title' string will appear at the top of this buildbot
855 # installation's html.WebStatus home page (linked to the
856 # 'titleURL') and is embedded in the title of the waterfall HTML page.
858 c['title'] = ini.get("general", "title")
859 c['titleURL'] = ini.get("general", "title_url")
861 # the 'buildbotURL' string should point to the location where the buildbot's
862 # internal web server (usually the html.WebStatus page) is visible. This
863 # typically uses the port number set in the Waterfall 'status' entry, but
864 # with an externally-visible host name which the buildbot cannot figure out
867 c['buildbotURL'] = buildbot_url
872 # This specifies what database buildbot uses to store its state. You can leave
873 # this at its default for all but the largest installations.
874 'db_url' : "sqlite:///state.sqlite",
877 c['buildbotNetUsageData'] = None