2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
48 buildbot_url = ini.get("phase2", "buildbot_url")
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
68 if ini.has_option("phase2", "port"):
69 worker_port = ini.get("phase2", "port")
71 if ini.has_option("phase2", "persistent"):
72 persistent = ini.getboolean("phase2", "persistent")
74 if ini.has_option("general", "git_ssh"):
75 git_ssh = ini.getboolean("general", "git_ssh")
77 if ini.has_option("general", "git_ssh_key"):
78 git_ssh_key = ini.get("general", "git_ssh_key")
85 for section in ini.sections():
86 if section.startswith("worker "):
87 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
88 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
89 name = ini.get(section, "name")
90 password = ini.get(section, "password")
91 sl_props = { 'shared_wd': False }
94 if ini.has_option(section, "builds"):
95 max_builds[name] = ini.getint(section, "builds")
97 if max_builds[name] == 1:
98 sl_props['shared_wd'] = True
100 if ini.has_option(section, "shared_wd"):
101 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
102 if sl_props['shared_wd'] and (max_builds != 1):
103 raise ValueError('max_builds must be 1 with shared workdir!')
105 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
107 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
108 # This must match the value configured into the buildworkers (with their
110 c['protocols'] = {'pb': {'port': worker_port}}
113 c['collapseRequests'] = True
115 # Reduce amount of backlog data
116 c['configurators'] = [util.JanitorConfigurator(
117 logHorizon=timedelta(days=3),
121 ####### CHANGESOURCES
123 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
124 scripts_dir = os.path.abspath("../scripts")
126 rsync_bin_url = ini.get("rsync", "binary_url")
127 rsync_bin_key = ini.get("rsync", "binary_password")
132 if ini.has_option("rsync", "source_url"):
133 rsync_src_url = ini.get("rsync", "source_url")
134 rsync_src_key = ini.get("rsync", "source_password")
138 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
140 if ini.has_option("rsync", "sdk_url"):
141 rsync_sdk_url = ini.get("rsync", "sdk_url")
143 if ini.has_option("rsync", "sdk_password"):
144 rsync_sdk_key = ini.get("rsync", "sdk_password")
146 if ini.has_option("rsync", "sdk_pattern"):
147 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
149 rsync_defopts = ["-4", "-v", "--timeout=120"]
151 repo_url = ini.get("repo", "url")
152 repo_branch = "master"
154 if ini.has_option("repo", "branch"):
155 repo_branch = ini.get("repo", "branch")
158 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
160 if ini.has_option("usign", "key"):
161 usign_key = ini.get("usign", "key")
163 if ini.has_option("usign", "comment"):
164 usign_comment = ini.get("usign", "comment")
171 if not os.path.isdir(work_dir+'/source.git'):
172 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
174 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
176 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
177 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
178 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
181 line = findarches.stdout.readline()
184 at = line.decode().strip().split()
186 archnames.append(at[0])
191 feedbranches = dict()
193 c['change_source'] = []
195 def parse_feed_entry(line):
196 parts = line.strip().split()
197 if parts[0].startswith("src-git"):
199 url = parts[2].strip().split(';')
200 branch = url[1] if len(url) > 1 else 'master'
201 feedbranches[url[0]] = branch
202 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
204 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
205 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
207 line = make.stdout.readline()
209 parse_feed_entry(str(line, 'utf-8'))
211 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
213 parse_feed_entry(line)
215 if len(c['change_source']) == 0:
216 log.err("FATAL ERROR: no change_sources defined, aborting!")
221 # Configure the Schedulers, which decide how to react to incoming changes. In this
222 # case, just kick off a 'basebuild' build
225 c['schedulers'].append(SingleBranchScheduler(
227 change_filter = filter.ChangeFilter(
228 filter_fn = lambda change: change.branch == feedbranches[change.repository]
230 treeStableTimer = 60,
231 builderNames = archnames))
233 c['schedulers'].append(ForceScheduler(
235 buttonName = "Force builds",
236 label = "Force build details",
237 builderNames = [ "00_force_build" ],
240 util.CodebaseParameter(
242 label = "Repository",
243 branch = util.FixedParameter(name = "branch", default = ""),
244 revision = util.FixedParameter(name = "revision", default = ""),
245 repository = util.FixedParameter(name = "repository", default = ""),
246 project = util.FixedParameter(name = "project", default = "")
250 reason = util.StringParameter(
253 default = "Trigger build",
259 util.NestedParameter(
261 label="Build Options",
264 util.ChoiceStringParameter(
265 name = "architecture",
266 label = "Build architecture",
268 choices = [ "all" ] + archnames
277 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
278 # what steps, and which workers can execute them. Note that any particular build will
279 # only take place on one worker.
282 def GetDirectorySuffix(props):
283 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
284 if props.hasProperty("release_version"):
285 m = verpat.match(props["release_version"])
287 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
291 def GetNumJobs(props):
292 if props.hasProperty("workername") and props.hasProperty("nproc"):
293 return str(int(props["nproc"]) / max_builds[props["workername"]])
299 if props.hasProperty("builddir"):
300 return props["builddir"]
301 elif props.hasProperty("workdir"):
302 return props["workdir"]
306 def IsArchitectureSelected(target):
307 def CheckArchitectureProperty(step):
309 options = step.getProperty("options")
310 if type(options) is dict:
311 selected_arch = options.get("architecture", "all")
312 if selected_arch != "all" and selected_arch != target:
319 return CheckArchitectureProperty
321 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
323 seckey = base64.b64decode(seckey)
327 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
328 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
330 def IsSharedWorkdir(step):
331 return bool(step.getProperty("shared_wd"))
333 @defer.inlineCallbacks
334 def getNewestCompleteTime(bldr):
335 """Returns the complete_at of the latest completed and not SKIPPED
336 build request for this builder, or None if there are no such build
337 requests. We need to filter out SKIPPED requests because we're
338 using collapseRequests=True which is unfortunately marking all
339 previous requests as complete when new buildset is created.
341 @returns: datetime instance or None, via Deferred
344 bldrid = yield bldr.getBuilderId()
345 completed = yield bldr.master.data.get(
346 ('builders', bldrid, 'buildrequests'),
348 resultspec.Filter('complete', 'eq', [True]),
349 resultspec.Filter('results', 'ne', [results.SKIPPED]),
351 order=['-complete_at'], limit=1)
355 complete_at = completed[0]['complete_at']
357 last_build = yield bldr.master.data.get(
360 resultspec.Filter('builderid', 'eq', [bldrid]),
362 order=['-started_at'], limit=1)
364 if last_build and last_build[0]:
365 last_complete_at = last_build[0]['complete_at']
366 if last_complete_at and (last_complete_at > complete_at):
367 return last_complete_at
371 @defer.inlineCallbacks
372 def prioritizeBuilders(master, builders):
373 """Returns sorted list of builders by their last timestamp of completed and
376 @returns: list of sorted builders
379 def is_building(bldr):
380 return bool(bldr.building) or bool(bldr.old_building)
383 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
384 d.addCallback(lambda complete_at: (complete_at, bldr))
388 (complete_at, bldr) = item
392 complete_at = date.replace(tzinfo=tzutc())
394 if is_building(bldr):
396 complete_at = date.replace(tzinfo=tzutc())
398 return (complete_at, bldr.name)
400 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
401 results.sort(key=bldr_sort)
404 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
406 return [r[1] for r in results]
408 c['prioritizeBuilders'] = prioritizeBuilders
411 dlLock = locks.WorkerLock("worker_dl")
415 for worker in c['workers']:
416 workerNames.append(worker.workername)
418 force_factory = BuildFactory()
420 c['builders'].append(BuilderConfig(
421 name = "00_force_build",
422 workernames = workerNames,
423 factory = force_factory))
426 ts = arch[1].split('/')
428 factory = BuildFactory()
430 # setup shared work directory if required
431 factory.addStep(ShellCommand(
433 description = "Setting up shared work directory",
434 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
436 haltOnFailure = True,
437 doStepIf = IsSharedWorkdir))
439 # find number of cores
440 factory.addStep(SetPropertyFromCommand(
443 description = "Finding number of CPUs",
444 command = ["nproc"]))
447 factory.addStep(FileDownload(
448 mastersrc = scripts_dir + '/cleanup.sh',
449 workerdest = "../cleanup.sh",
453 factory.addStep(ShellCommand(
455 description = "Cleaning previous builds",
456 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
458 haltOnFailure = True,
461 factory.addStep(ShellCommand(
463 description = "Cleaning work area",
464 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
466 haltOnFailure = True,
469 factory.addStep(ShellCommand(
471 description = "Preparing SDK directory",
472 command = ["mkdir", "-p", "sdk"],
473 haltOnFailure = True))
475 factory.addStep(ShellCommand(
476 name = "downloadsdk",
477 description = "Downloading SDK archive",
478 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
479 env={'RSYNC_PASSWORD': rsync_sdk_key},
480 haltOnFailure = True,
483 factory.addStep(ShellCommand(
485 description = "Unpacking SDK archive",
486 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
487 haltOnFailure = True))
489 factory.addStep(ShellCommand(
491 description = "Updating SDK",
492 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
493 haltOnFailure = True))
495 factory.addStep(ShellCommand(
496 name = "cleancmdlinks",
497 description = "Sanitizing host command symlinks",
498 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
499 haltOnFailure = True))
501 factory.addStep(StringDownload(
502 name = "writeversionmk",
503 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
504 workerdest = "sdk/getversion.mk",
507 factory.addStep(SetPropertyFromCommand(
509 property = "release_version",
510 description = "Finding SDK release version",
511 workdir = "build/sdk",
512 command = ["make", "-f", "getversion.mk"]))
515 if usign_key is not None:
516 factory.addStep(StringDownload(
517 name = "dlkeybuildpub",
518 s = UsignSec2Pub(usign_key, usign_comment),
519 workerdest = "sdk/key-build.pub",
522 factory.addStep(StringDownload(
524 s = "# fake private key",
525 workerdest = "sdk/key-build",
528 factory.addStep(StringDownload(
529 name = "dlkeybuilducert",
530 s = "# fake certificate",
531 workerdest = "sdk/key-build.ucert",
534 factory.addStep(ShellCommand(
536 description = "Preparing download directory",
537 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
538 haltOnFailure = True))
540 factory.addStep(ShellCommand(
542 description = "Preparing SDK configuration",
543 workdir = "build/sdk",
544 command = ["sh", "-c", "rm -f .config && make defconfig"]))
546 factory.addStep(FileDownload(
547 mastersrc = scripts_dir + '/ccache.sh',
548 workerdest = 'sdk/ccache.sh',
551 factory.addStep(ShellCommand(
553 description = "Preparing ccache",
554 workdir = "build/sdk",
555 command = ["./ccache.sh"],
556 haltOnFailure = True))
559 factory.addStep(StringDownload(
560 name = "dlgitclonekey",
562 workerdest = "../git-clone.key",
565 factory.addStep(ShellCommand(
566 name = "patchfeedsconf",
567 description = "Patching feeds.conf",
568 workdir = "build/sdk",
569 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
570 haltOnFailure = True))
572 factory.addStep(ShellCommand(
573 name = "updatefeeds",
574 description = "Updating feeds",
575 workdir = "build/sdk",
576 command = ["./scripts/feeds", "update", "-f"],
577 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
578 haltOnFailure = True))
581 factory.addStep(ShellCommand(
582 name = "rmfeedsconf",
583 description = "Removing feeds.conf",
584 workdir = "build/sdk",
585 command=["rm", "feeds.conf"],
586 haltOnFailure = True))
588 factory.addStep(ShellCommand(
589 name = "installfeeds",
590 description = "Installing feeds",
591 workdir = "build/sdk",
592 command = ["./scripts/feeds", "install", "-a"],
593 haltOnFailure = True))
595 factory.addStep(ShellCommand(
597 description = "Clearing failure logs",
598 workdir = "build/sdk",
599 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
600 haltOnFailure = False,
601 flunkOnFailure = False,
602 warnOnFailure = True,
605 factory.addStep(ShellCommand(
607 description = "Building packages",
608 workdir = "build/sdk",
610 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
611 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
612 haltOnFailure = True))
614 factory.addStep(ShellCommand(
615 name = "mkfeedsconf",
616 description = "Generating pinned feeds.conf",
617 workdir = "build/sdk",
618 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
620 factory.addStep(ShellCommand(
622 description = "Calculating checksums",
623 descriptionDone="Checksums calculated",
624 workdir = "build/sdk",
625 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
629 if ini.has_option("gpg", "key") or usign_key is not None:
630 factory.addStep(MasterShellCommand(
631 name = "signprepare",
632 description = "Preparing temporary signing directory",
633 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
637 factory.addStep(ShellCommand(
639 description = "Packing files to sign",
640 workdir = "build/sdk",
641 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
645 factory.addStep(FileUpload(
646 workersrc = "sdk/sign.tar.gz",
647 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
651 factory.addStep(MasterShellCommand(
653 description = "Signing files",
654 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
655 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
659 factory.addStep(FileDownload(
660 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
661 workerdest = "sdk/sign.tar.gz",
665 factory.addStep(ShellCommand(
667 description = "Unpacking signed files",
668 workdir = "build/sdk",
669 command = ["tar", "-xzf", "sign.tar.gz"],
673 # download remote sha256sums to 'target-sha256sums'
674 factory.addStep(ShellCommand(
675 name = "target-sha256sums",
676 description = "Fetching remote sha256sums for arch",
677 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
678 env={'RSYNC_PASSWORD': rsync_bin_key},
680 haltOnFailure = False,
681 flunkOnFailure = False,
682 warnOnFailure = False,
685 factory.addStep(FileDownload(
687 mastersrc = scripts_dir + "/rsync.sh",
688 workerdest = "../rsync.sh",
692 factory.addStep(FileDownload(
693 name = "dlsha2rsyncpl",
694 mastersrc = "sha2rsync.pl",
695 workerdest = "../sha2rsync.pl",
699 factory.addStep(ShellCommand(
701 description = "Building list of files to upload",
702 workdir = "build/sdk",
703 command = ["../../../sha2rsync.pl", "../../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
704 haltOnFailure = True,
707 factory.addStep(ShellCommand(
708 name = "uploadprepare",
709 description = "Preparing package directory",
710 workdir = "build/sdk",
711 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
712 env={'RSYNC_PASSWORD': rsync_bin_key},
713 haltOnFailure = True,
717 factory.addStep(ShellCommand(
718 name = "packageupload",
719 description = "Uploading package files",
720 workdir = "build/sdk",
721 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
722 env={'RSYNC_PASSWORD': rsync_bin_key},
723 haltOnFailure = True,
727 factory.addStep(ShellCommand(
728 name = "packageprune",
729 description = "Pruning package files",
730 workdir = "build/sdk",
731 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
732 env={'RSYNC_PASSWORD': rsync_bin_key},
733 haltOnFailure = True,
737 factory.addStep(ShellCommand(
739 description = "Preparing log directory",
740 workdir = "build/sdk",
741 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
742 env={'RSYNC_PASSWORD': rsync_bin_key},
743 haltOnFailure = True,
747 factory.addStep(ShellCommand(
749 description = "Finding failure logs",
750 workdir = "build/sdk/logs/package/feeds",
751 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
752 haltOnFailure = False,
753 flunkOnFailure = False,
754 warnOnFailure = True,
757 factory.addStep(ShellCommand(
759 description = "Collecting failure logs",
760 workdir = "build/sdk",
761 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
762 haltOnFailure = False,
763 flunkOnFailure = False,
764 warnOnFailure = True,
767 factory.addStep(ShellCommand(
769 description = "Uploading failure logs",
770 workdir = "build/sdk",
771 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
772 env={'RSYNC_PASSWORD': rsync_bin_key},
773 haltOnFailure = False,
774 flunkOnFailure = False,
775 warnOnFailure = True,
779 if rsync_src_url is not None:
780 factory.addStep(ShellCommand(
782 description = "Finding source archives to upload",
783 workdir = "build/sdk",
784 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
788 factory.addStep(ShellCommand(
789 name = "sourceupload",
790 description = "Uploading source archives",
791 workdir = "build/sdk",
792 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
793 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
794 env={'RSYNC_PASSWORD': rsync_src_key},
795 haltOnFailure = False,
796 flunkOnFailure = False,
797 warnOnFailure = True,
801 factory.addStep(ShellCommand(
803 description = "Reporting disk usage",
804 command=["df", "-h", "."],
806 haltOnFailure = False,
807 flunkOnFailure = False,
808 warnOnFailure = False,
812 factory.addStep(ShellCommand(
814 description = "Reporting estimated file space usage",
815 command=["du", "-sh", "."],
817 haltOnFailure = False,
818 flunkOnFailure = False,
819 warnOnFailure = False,
823 factory.addStep(ShellCommand(
825 description = "Reporting ccache stats",
826 command=["ccache", "-s"],
828 haltOnFailure = False,
829 flunkOnFailure = False,
830 warnOnFailure = False,
834 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
836 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
837 force_factory.addStep(steps.Trigger(
838 name = "trigger_%s" % arch[0],
839 description = "Triggering %s build" % arch[0],
840 schedulerNames = [ "trigger_%s" % arch[0] ],
841 set_properties = { "reason": Property("reason") },
842 doStepIf = IsArchitectureSelected(arch[0])
845 ####### STATUS arches
847 # 'status' is a list of Status arches. The results of each build will be
848 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
849 # including web pages, email senders, and IRC bots.
851 if ini.has_option("phase2", "status_bind"):
853 'port': ini.get("phase2", "status_bind"),
855 'waterfall_view': True,
856 'console_view': True,
861 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
862 c['www']['auth'] = util.UserPasswordAuth([
863 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
865 c['www']['authz'] = util.Authz(
866 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
867 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
870 ####### PROJECT IDENTITY
872 # the 'title' string will appear at the top of this buildbot
873 # installation's html.WebStatus home page (linked to the
874 # 'titleURL') and is embedded in the title of the waterfall HTML page.
876 c['title'] = ini.get("general", "title")
877 c['titleURL'] = ini.get("general", "title_url")
879 # the 'buildbotURL' string should point to the location where the buildbot's
880 # internal web server (usually the html.WebStatus page) is visible. This
881 # typically uses the port number set in the Waterfall 'status' entry, but
882 # with an externally-visible host name which the buildbot cannot figure out
885 c['buildbotURL'] = buildbot_url
890 # This specifies what database buildbot uses to store its state. You can leave
891 # this at its default for all but the largest installations.
892 'db_url' : "sqlite:///state.sqlite",
895 c['buildbotNetUsageData'] = None