2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
66 if ini.has_option("phase2", "port"):
67 worker_port = ini.get("phase2", "port")
69 if ini.has_option("phase2", "persistent"):
70 persistent = ini.getboolean("phase2", "persistent")
75 for section in ini.sections():
76 if section.startswith("worker "):
77 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
78 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
79 name = ini.get(section, "name")
80 password = ini.get(section, "password")
81 sl_props = { 'shared_wd': False }
84 if ini.has_option(section, "builds"):
85 max_builds[name] = ini.getint(section, "builds")
87 if max_builds[name] == 1:
88 sl_props['shared_wd'] = True
90 if ini.has_option(section, "shared_wd"):
91 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
92 if sl_props['shared_wd'] and (max_builds != 1):
93 raise ValueError('max_builds must be 1 with shared workdir!')
95 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
97 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
98 # This must match the value configured into the buildworkers (with their
100 c['protocols'] = {'pb': {'port': worker_port}}
103 c['collapseRequests'] = True
105 # Reduce amount of backlog data
106 c['configurators'] = [util.JanitorConfigurator(
107 logHorizon=timedelta(days=3),
111 ####### CHANGESOURCES
113 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
114 scripts_dir = os.path.abspath("../scripts")
116 rsync_bin_url = ini.get("rsync", "binary_url")
117 rsync_bin_key = ini.get("rsync", "binary_password")
122 if ini.has_option("rsync", "source_url"):
123 rsync_src_url = ini.get("rsync", "source_url")
124 rsync_src_key = ini.get("rsync", "source_password")
128 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
130 if ini.has_option("rsync", "sdk_url"):
131 rsync_sdk_url = ini.get("rsync", "sdk_url")
133 if ini.has_option("rsync", "sdk_password"):
134 rsync_sdk_key = ini.get("rsync", "sdk_password")
136 if ini.has_option("rsync", "sdk_pattern"):
137 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
139 rsync_defopts = ["-4", "-v", "--timeout=120"]
141 repo_url = ini.get("repo", "url")
142 repo_branch = "master"
144 if ini.has_option("repo", "branch"):
145 repo_branch = ini.get("repo", "branch")
148 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
150 if ini.has_option("usign", "key"):
151 usign_key = ini.get("usign", "key")
153 if ini.has_option("usign", "comment"):
154 usign_comment = ini.get("usign", "comment")
161 if not os.path.isdir(work_dir+'/source.git'):
162 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
164 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
166 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
167 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
168 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
171 line = findarches.stdout.readline()
174 at = line.decode().strip().split()
176 archnames.append(at[0])
181 feedbranches = dict()
183 c['change_source'] = []
185 def parse_feed_entry(line):
186 parts = line.strip().split()
187 if parts[0].startswith("src-git"):
189 url = parts[2].strip().split(';')
190 branch = url[1] if len(url) > 1 else 'master'
191 feedbranches[url[0]] = branch
192 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
194 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
195 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
197 line = make.stdout.readline()
199 parse_feed_entry(str(line, 'utf-8'))
201 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
203 parse_feed_entry(line)
205 if len(c['change_source']) == 0:
206 log.err("FATAL ERROR: no change_sources defined, aborting!")
211 # Configure the Schedulers, which decide how to react to incoming changes. In this
212 # case, just kick off a 'basebuild' build
215 c['schedulers'].append(SingleBranchScheduler(
217 change_filter = filter.ChangeFilter(
218 filter_fn = lambda change: change.branch == feedbranches[change.repository]
220 treeStableTimer = 60,
221 builderNames = archnames))
223 c['schedulers'].append(ForceScheduler(
225 buttonName = "Force builds",
226 label = "Force build details",
227 builderNames = [ "00_force_build" ],
230 util.CodebaseParameter(
232 label = "Repository",
233 branch = util.FixedParameter(name = "branch", default = ""),
234 revision = util.FixedParameter(name = "revision", default = ""),
235 repository = util.FixedParameter(name = "repository", default = ""),
236 project = util.FixedParameter(name = "project", default = "")
240 reason = util.StringParameter(
243 default = "Trigger build",
249 util.NestedParameter(
251 label="Build Options",
254 util.ChoiceStringParameter(
255 name = "architecture",
256 label = "Build architecture",
258 choices = [ "all" ] + archnames
267 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
268 # what steps, and which workers can execute them. Note that any particular build will
269 # only take place on one worker.
272 def GetDirectorySuffix(props):
273 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
274 if props.hasProperty("release_version"):
275 m = verpat.match(props["release_version"])
277 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
281 def GetNumJobs(props):
282 if props.hasProperty("workername") and props.hasProperty("nproc"):
283 return str(int(props["nproc"]) / max_builds[props["workername"]])
289 if props.hasProperty("builddir"):
290 return props["builddir"]
291 elif props.hasProperty("workdir"):
292 return props["workdir"]
296 def IsArchitectureSelected(target):
297 def CheckArchitectureProperty(step):
299 options = step.getProperty("options")
300 if type(options) is dict:
301 selected_arch = options.get("architecture", "all")
302 if selected_arch != "all" and selected_arch != target:
309 return CheckArchitectureProperty
311 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
313 seckey = base64.b64decode(seckey)
317 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
318 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
320 def IsSharedWorkdir(step):
321 return bool(step.getProperty("shared_wd"))
323 @defer.inlineCallbacks
324 def getNewestCompleteTime(bldr):
325 """Returns the complete_at of the latest completed and not SKIPPED
326 build request for this builder, or None if there are no such build
327 requests. We need to filter out SKIPPED requests because we're
328 using collapseRequests=True which is unfortunately marking all
329 previous requests as complete when new buildset is created.
331 @returns: datetime instance or None, via Deferred
334 bldrid = yield bldr.getBuilderId()
335 completed = yield bldr.master.data.get(
336 ('builders', bldrid, 'buildrequests'),
338 resultspec.Filter('complete', 'eq', [True]),
339 resultspec.Filter('results', 'ne', [results.SKIPPED]),
341 order=['-complete_at'], limit=1)
345 complete_at = completed[0]['complete_at']
347 last_build = yield bldr.master.data.get(
350 resultspec.Filter('builderid', 'eq', [bldrid]),
352 order=['-started_at'], limit=1)
354 if last_build and last_build[0]:
355 last_complete_at = last_build[0]['complete_at']
356 if last_complete_at and (last_complete_at > complete_at):
357 return last_complete_at
361 @defer.inlineCallbacks
362 def prioritizeBuilders(master, builders):
363 """Returns sorted list of builders by their last timestamp of completed and
366 @returns: list of sorted builders
369 def is_building(bldr):
370 return bool(bldr.building) or bool(bldr.old_building)
373 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
374 d.addCallback(lambda complete_at: (complete_at, bldr))
378 (complete_at, bldr) = item
382 complete_at = date.replace(tzinfo=tzutc())
384 if is_building(bldr):
386 complete_at = date.replace(tzinfo=tzutc())
388 return (complete_at, bldr.name)
390 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
391 results.sort(key=bldr_sort)
394 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
396 return [r[1] for r in results]
398 c['prioritizeBuilders'] = prioritizeBuilders
401 dlLock = locks.WorkerLock("worker_dl")
405 for worker in c['workers']:
406 workerNames.append(worker.workername)
408 force_factory = BuildFactory()
410 c['builders'].append(BuilderConfig(
411 name = "00_force_build",
412 workernames = workerNames,
413 factory = force_factory))
416 ts = arch[1].split('/')
418 factory = BuildFactory()
420 # setup shared work directory if required
421 factory.addStep(ShellCommand(
423 description = "Setting up shared work directory",
424 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
426 haltOnFailure = True,
427 doStepIf = IsSharedWorkdir))
429 # find number of cores
430 factory.addStep(SetPropertyFromCommand(
433 description = "Finding number of CPUs",
434 command = ["nproc"]))
437 factory.addStep(FileDownload(
438 mastersrc = scripts_dir + '/cleanup.sh',
439 workerdest = "../cleanup.sh",
443 factory.addStep(ShellCommand(
445 description = "Cleaning previous builds",
446 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
448 haltOnFailure = True,
451 factory.addStep(ShellCommand(
453 description = "Cleaning work area",
454 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
456 haltOnFailure = True,
459 factory.addStep(ShellCommand(
461 description = "Preparing SDK directory",
462 command = ["mkdir", "-p", "sdk"],
463 haltOnFailure = True))
465 factory.addStep(ShellCommand(
466 name = "downloadsdk",
467 description = "Downloading SDK archive",
468 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
469 env={'RSYNC_PASSWORD': rsync_sdk_key},
470 haltOnFailure = True,
473 factory.addStep(ShellCommand(
475 description = "Unpacking SDK archive",
476 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
477 haltOnFailure = True))
479 factory.addStep(ShellCommand(
481 description = "Updating SDK",
482 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
483 haltOnFailure = True))
485 factory.addStep(ShellCommand(
486 name = "cleancmdlinks",
487 description = "Sanitizing host command symlinks",
488 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
489 haltOnFailure = True))
491 factory.addStep(StringDownload(
492 name = "writeversionmk",
493 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
494 workerdest = "sdk/getversion.mk",
497 factory.addStep(SetPropertyFromCommand(
499 property = "release_version",
500 description = "Finding SDK release version",
501 workdir = "build/sdk",
502 command = ["make", "-f", "getversion.mk"]))
505 if usign_key is not None:
506 factory.addStep(StringDownload(
507 name = "dlkeybuildpub",
508 s = UsignSec2Pub(usign_key, usign_comment),
509 workerdest = "sdk/key-build.pub",
512 factory.addStep(StringDownload(
514 s = "# fake private key",
515 workerdest = "sdk/key-build",
518 factory.addStep(StringDownload(
519 name = "dlkeybuilducert",
520 s = "# fake certificate",
521 workerdest = "sdk/key-build.ucert",
524 factory.addStep(ShellCommand(
526 description = "Preparing download directory",
527 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
528 haltOnFailure = True))
530 factory.addStep(ShellCommand(
532 description = "Preparing SDK configuration",
533 workdir = "build/sdk",
534 command = ["sh", "-c", "rm -f .config && make defconfig"]))
536 factory.addStep(FileDownload(
537 mastersrc = scripts_dir + '/ccache.sh',
538 workerdest = 'sdk/ccache.sh',
541 factory.addStep(ShellCommand(
543 description = "Preparing ccache",
544 workdir = "build/sdk",
545 command = ["./ccache.sh"],
546 haltOnFailure = True))
548 factory.addStep(ShellCommand(
549 name = "updatefeeds",
550 description = "Updating feeds",
551 workdir = "build/sdk",
552 command = ["./scripts/feeds", "update", "-f"],
553 haltOnFailure = True))
555 factory.addStep(ShellCommand(
556 name = "installfeeds",
557 description = "Installing feeds",
558 workdir = "build/sdk",
559 command = ["./scripts/feeds", "install", "-a"],
560 haltOnFailure = True))
562 factory.addStep(ShellCommand(
564 description = "Clearing failure logs",
565 workdir = "build/sdk",
566 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
567 haltOnFailure = False,
568 flunkOnFailure = False,
569 warnOnFailure = True,
572 factory.addStep(ShellCommand(
574 description = "Building packages",
575 workdir = "build/sdk",
577 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
578 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
579 haltOnFailure = True))
581 factory.addStep(ShellCommand(
582 name = "mkfeedsconf",
583 description = "Generating pinned feeds.conf",
584 workdir = "build/sdk",
585 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
587 factory.addStep(ShellCommand(
589 description = "Calculating checksums",
590 descriptionDone="Checksums calculated",
591 workdir = "build/sdk",
592 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
596 if ini.has_option("gpg", "key") or usign_key is not None:
597 factory.addStep(MasterShellCommand(
598 name = "signprepare",
599 description = "Preparing temporary signing directory",
600 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
604 factory.addStep(ShellCommand(
606 description = "Packing files to sign",
607 workdir = "build/sdk",
608 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
612 factory.addStep(FileUpload(
613 workersrc = "sdk/sign.tar.gz",
614 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
618 factory.addStep(MasterShellCommand(
620 description = "Signing files",
621 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
622 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
626 factory.addStep(FileDownload(
627 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
628 workerdest = "sdk/sign.tar.gz",
632 factory.addStep(ShellCommand(
634 description = "Unpacking signed files",
635 workdir = "build/sdk",
636 command = ["tar", "-xzf", "sign.tar.gz"],
640 # download remote sha256sums to 'target-sha256sums'
641 factory.addStep(ShellCommand(
642 name = "target-sha256sums",
643 description = "Fetching remote sha256sums for arch",
644 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
645 env={'RSYNC_PASSWORD': rsync_bin_key},
647 haltOnFailure = False,
648 flunkOnFailure = False,
649 warnOnFailure = False,
652 factory.addStep(FileDownload(
654 mastersrc = scripts_dir + "/rsync.sh",
655 workerdest = "../rsync.sh",
659 factory.addStep(FileDownload(
660 name = "dlsha2rsyncpl",
661 mastersrc = "sha2rsync.pl",
662 workerdest = "../sha2rsync.pl",
666 factory.addStep(ShellCommand(
668 description = "Building list of files to upload",
669 workdir = "build/sdk",
670 command = ["../../../sha2rsync.pl", "../../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
671 haltOnFailure = True,
674 factory.addStep(ShellCommand(
675 name = "uploadprepare",
676 description = "Preparing package directory",
677 workdir = "build/sdk",
678 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
679 env={'RSYNC_PASSWORD': rsync_bin_key},
680 haltOnFailure = True,
684 factory.addStep(ShellCommand(
685 name = "packageupload",
686 description = "Uploading package files",
687 workdir = "build/sdk",
688 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
689 env={'RSYNC_PASSWORD': rsync_bin_key},
690 haltOnFailure = True,
694 factory.addStep(ShellCommand(
695 name = "packageprune",
696 description = "Pruning package files",
697 workdir = "build/sdk",
698 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
699 env={'RSYNC_PASSWORD': rsync_bin_key},
700 haltOnFailure = True,
704 factory.addStep(ShellCommand(
706 description = "Preparing log directory",
707 workdir = "build/sdk",
708 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
709 env={'RSYNC_PASSWORD': rsync_bin_key},
710 haltOnFailure = True,
714 factory.addStep(ShellCommand(
716 description = "Finding failure logs",
717 workdir = "build/sdk/logs/package/feeds",
718 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
719 haltOnFailure = False,
720 flunkOnFailure = False,
721 warnOnFailure = True,
724 factory.addStep(ShellCommand(
726 description = "Collecting failure logs",
727 workdir = "build/sdk",
728 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
729 haltOnFailure = False,
730 flunkOnFailure = False,
731 warnOnFailure = True,
734 factory.addStep(ShellCommand(
736 description = "Uploading failure logs",
737 workdir = "build/sdk",
738 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
739 env={'RSYNC_PASSWORD': rsync_bin_key},
740 haltOnFailure = False,
741 flunkOnFailure = False,
742 warnOnFailure = True,
746 if rsync_src_url is not None:
747 factory.addStep(ShellCommand(
749 description = "Finding source archives to upload",
750 workdir = "build/sdk",
751 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
755 factory.addStep(ShellCommand(
756 name = "sourceupload",
757 description = "Uploading source archives",
758 workdir = "build/sdk",
759 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
760 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
761 env={'RSYNC_PASSWORD': rsync_src_key},
762 haltOnFailure = False,
763 flunkOnFailure = False,
764 warnOnFailure = True,
768 factory.addStep(ShellCommand(
770 description = "Reporting disk usage",
771 command=["df", "-h", "."],
773 haltOnFailure = False,
774 flunkOnFailure = False,
775 warnOnFailure = False,
779 factory.addStep(ShellCommand(
781 description = "Reporting estimated file space usage",
782 command=["du", "-sh", "."],
784 haltOnFailure = False,
785 flunkOnFailure = False,
786 warnOnFailure = False,
790 factory.addStep(ShellCommand(
792 description = "Reporting ccache stats",
793 command=["ccache", "-s"],
795 haltOnFailure = False,
796 flunkOnFailure = False,
797 warnOnFailure = False,
801 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
803 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
804 force_factory.addStep(steps.Trigger(
805 name = "trigger_%s" % arch[0],
806 description = "Triggering %s build" % arch[0],
807 schedulerNames = [ "trigger_%s" % arch[0] ],
808 set_properties = { "reason": Property("reason") },
809 doStepIf = IsArchitectureSelected(arch[0])
812 ####### STATUS arches
814 # 'status' is a list of Status arches. The results of each build will be
815 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
816 # including web pages, email senders, and IRC bots.
818 if ini.has_option("phase2", "status_bind"):
820 'port': ini.get("phase2", "status_bind"),
822 'waterfall_view': True,
823 'console_view': True,
828 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
829 c['www']['auth'] = util.UserPasswordAuth([
830 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
832 c['www']['authz'] = util.Authz(
833 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
834 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
837 ####### PROJECT IDENTITY
839 # the 'title' string will appear at the top of this buildbot
840 # installation's html.WebStatus home page (linked to the
841 # 'titleURL') and is embedded in the title of the waterfall HTML page.
843 c['title'] = ini.get("general", "title")
844 c['titleURL'] = ini.get("general", "title_url")
846 # the 'buildbotURL' string should point to the location where the buildbot's
847 # internal web server (usually the html.WebStatus page) is visible. This
848 # typically uses the port number set in the Waterfall 'status' entry, but
849 # with an externally-visible host name which the buildbot cannot figure out
852 c['buildbotURL'] = buildbot_url
857 # This specifies what database buildbot uses to store its state. You can leave
858 # this at its default for all but the largest installations.
859 'db_url' : "sqlite:///state.sqlite",
862 c['buildbotNetUsageData'] = None