2 # ex: set syntax=python:
9 from buildbot import locks
11 # This is a sample buildmaster config file. It must be installed as
12 # 'master.cfg' in your buildmaster's base directory.
14 ini = ConfigParser.ConfigParser()
15 ini.read("./config.ini")
17 # This is the dictionary that the buildmaster pays attention to. We also use
18 # a shorter alias to save typing.
19 c = BuildmasterConfig = {}
21 ####### PROJECT IDENTITY
23 # the 'title' string will appear at the top of this buildbot
24 # installation's html.WebStatus home page (linked to the
25 # 'titleURL') and is embedded in the title of the waterfall HTML page.
27 c['title'] = ini.get("general", "title")
28 c['titleURL'] = ini.get("general", "title_url")
30 # the 'buildbotURL' string should point to the location where the buildbot's
31 # internal web server (usually the html.WebStatus page) is visible. This
32 # typically uses the port number set in the Waterfall 'status' entry, but
33 # with an externally-visible host name which the buildbot cannot figure out
36 c['buildbotURL'] = ini.get("general", "buildbot_url")
40 # The 'slaves' list defines the set of recognized buildslaves. Each element is
41 # a BuildSlave object, specifying a unique slave name and password. The same
42 # slave name and password must be configured on the slave.
43 from buildbot.buildslave import BuildSlave
47 if ini.has_option("general", "port"):
48 slave_port = ini.getint("general", "port")
53 for section in ini.sections():
54 if section.startswith("slave "):
55 if ini.has_option(section, "name") and ini.has_option(section, "password"):
56 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
60 if ini.has_option(section, "builds"):
61 max_builds = ini.getint(section, "builds")
62 sl_props['max_builds'] = max_builds
63 if ini.has_option(section, "cleanup"):
64 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
65 if ini.has_option(section, "dl_lock"):
66 lockname = ini.get(section, "dl_lock")
67 sl_props['dl_lock'] = lockname
68 if lockname not in NetLocks:
69 NetLocks[lockname] = locks.MasterLock(lockname)
70 if ini.has_option(section, "ul_lock"):
71 lockname = ini.get(section, "dl_lock")
72 sl_props['ul_lock'] = lockname
73 if lockname not in NetLocks:
74 NetLocks[lockname] = locks.MasterLock(lockname)
75 if ini.has_option(section, "shared_wd"):
76 shared_wd = ini.getboolean(section, "shared_wd")
77 sl_props['shared_wd'] = shared_wd
78 if shared_wd and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
82 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
83 # This must match the value configured into the buildslaves (with their
85 c['slavePortnum'] = slave_port
88 c['mergeRequests'] = True
90 # Reduce amount of backlog data
91 c['buildHorizon'] = 30
96 home_dir = os.path.abspath(ini.get("general", "homedir"))
104 if ini.has_option("general", "expire"):
105 tree_expire = ini.getint("general", "expire")
107 if ini.has_option("general", "other_builds"):
108 other_builds = ini.getint("general", "other_builds")
110 if ini.has_option("general", "cc_version"):
111 cc_version = ini.get("general", "cc_version").split()
112 if len(cc_version) == 1:
113 cc_version = ["eq", cc_version[0]]
115 repo_url = ini.get("repo", "url")
116 repo_branch = "master"
118 if ini.has_option("repo", "branch"):
119 repo_branch = ini.get("repo", "branch")
121 rsync_bin_url = ini.get("rsync", "binary_url")
122 rsync_bin_key = ini.get("rsync", "binary_password")
127 if ini.has_option("rsync", "source_url"):
128 rsync_src_url = ini.get("rsync", "source_url")
129 rsync_src_key = ini.get("rsync", "source_password")
131 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
133 gpg_home = "~/.gnupg"
135 gpg_comment = "Unattended build signature"
136 gpg_passfile = "/dev/null"
138 if ini.has_option("gpg", "home"):
139 gpg_home = ini.get("gpg", "home")
141 if ini.has_option("gpg", "keyid"):
142 gpg_keyid = ini.get("gpg", "keyid")
144 if ini.has_option("gpg", "comment"):
145 gpg_comment = ini.get("gpg", "comment")
147 if ini.has_option("gpg", "passfile"):
148 gpg_passfile = ini.get("gpg", "passfile")
150 enable_kmod_archive = True
156 if not os.path.isdir(home_dir+'/source.git'):
157 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
159 subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
161 findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
162 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
165 line = findtargets.stdout.readline()
168 ta = line.strip().split(' ')
169 targets.append(ta[0])
172 # the 'change_source' setting tells the buildmaster how it should find out
173 # about source code changes. Here we point to the buildbot clone of pyflakes.
175 from buildbot.changes.gitpoller import GitPoller
176 c['change_source'] = []
177 c['change_source'].append(GitPoller(
179 workdir=home_dir+'/work.git', branch=repo_branch,
184 # Configure the Schedulers, which decide how to react to incoming changes. In this
185 # case, just kick off a 'basebuild' build
187 from buildbot.schedulers.basic import SingleBranchScheduler
188 from buildbot.schedulers.forcesched import ForceScheduler
189 from buildbot.changes import filter
191 c['schedulers'].append(SingleBranchScheduler(
193 change_filter=filter.ChangeFilter(branch=repo_branch),
195 builderNames=targets))
197 c['schedulers'].append(ForceScheduler(
199 builderNames=targets))
203 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
204 # what steps, and which slaves can execute them. Note that any particular build will
205 # only take place on one slave.
207 from buildbot.process.factory import BuildFactory
208 from buildbot.steps.source.git import Git
209 from buildbot.steps.shell import ShellCommand
210 from buildbot.steps.shell import SetPropertyFromCommand
211 from buildbot.steps.transfer import FileUpload
212 from buildbot.steps.transfer import FileDownload
213 from buildbot.steps.transfer import StringDownload
214 from buildbot.steps.master import MasterShellCommand
215 from buildbot.process.properties import Interpolate
216 from buildbot.process import properties
220 [ "tools", "tools/clean" ],
221 [ "chain", "toolchain/clean" ],
222 [ "linux", "target/linux/clean" ],
223 [ "dir", "dirclean" ],
224 [ "dist", "distclean" ]
227 def IsMakeCleanRequested(pattern):
228 def CheckCleanProperty(step):
229 val = step.getProperty("clean")
230 if val and re.match(pattern, val):
235 return CheckCleanProperty
237 def IsCleanupRequested(step):
238 shared_wd = step.getProperty("shared_wd")
241 do_cleanup = step.getProperty("do_cleanup")
247 def IsExpireRequested(step):
248 shared_wd = step.getProperty("shared_wd")
252 return not IsCleanupRequested(step)
254 def IsGitFreshRequested(step):
255 do_cleanup = step.getProperty("do_cleanup")
261 def IsGitCleanRequested(step):
262 return not IsGitFreshRequested(step)
264 def IsTaggingRequested(step):
265 val = step.getProperty("tag")
266 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
271 def IsNoTaggingRequested(step):
272 return not IsTaggingRequested(step)
274 def IsNoMasterBuild(step):
275 return repo_branch != "master"
277 def GetBaseVersion():
278 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
279 return repo_branch.split('-')[1]
284 def GetVersionPrefix(props):
285 basever = GetBaseVersion()
286 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
287 return "%s/" % props["tag"]
288 elif basever != "master":
289 return "%s-SNAPSHOT/" % basever
294 def GetNumJobs(props):
295 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
296 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
302 if props.hasProperty("cc_command"):
303 return props["cc_command"]
309 if props.hasProperty("cxx_command"):
310 return props["cxx_command"]
316 if props.hasProperty("builddir"):
317 return props["builddir"]
318 elif props.hasProperty("workdir"):
319 return props["workdir"]
324 def GetCCache(props):
325 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
326 return props["ccache_command"]
330 def GetNextBuild(builder, requests):
332 if r.properties and r.properties.hasProperty("tag"):
336 def MakeEnv(overrides=None, tryccache=False):
338 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
339 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
342 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
343 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
344 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
346 env['CC'] = env['CCC']
347 env['CXX'] = env['CCXX']
349 if overrides is not None:
350 env.update(overrides)
354 def NetLockDl(props):
356 if props.hasProperty("dl_lock"):
357 lock = NetLocks[props["dl_lock"]]
359 return [lock.access('exclusive')]
364 def NetLockUl(props):
366 if props.hasProperty("ul_lock"):
367 lock = NetLocks[props["ul_lock"]]
369 return [lock.access('exclusive')]
375 dlLock = locks.SlaveLock("slave_dl")
377 checkBuiltin = re.sub('[\t\n ]+', ' ', """
379 local symbol op path file;
380 for file in $CHANGED_FILES; do
386 while read symbol op path; do
387 case "$symbol" in package-*)
388 symbol="${symbol##*(}";
389 symbol="${symbol%)}";
390 for file in $CHANGED_FILES; do
391 case "$file" in "package/$path/"*)
392 grep -qsx "$symbol=y" .config && return 0
396 done < tmp/.packagedeps;
402 class IfBuiltinShellCommand(ShellCommand):
403 def _quote(self, str):
404 if re.search("[^a-zA-Z0-9/_.-]", str):
405 return "'%s'" %(re.sub("'", "'\"'\"'", str))
408 def setCommand(self, command):
409 if not isinstance(command, (str, unicode)):
410 command = ' '.join(map(self._quote, command))
413 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
416 def setupEnvironment(self, cmd):
417 slaveEnv = self.slaveEnvironment
421 for request in self.build.requests:
422 for source in request.sources:
423 for change in source.changes:
424 for file in change.files:
425 changedFiles[file] = True
426 fullSlaveEnv = slaveEnv.copy()
427 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
428 cmd.args['env'] = fullSlaveEnv
432 for slave in c['slaves']:
433 slaveNames.append(slave.slavename)
435 for target in targets:
436 ts = target.split('/')
438 factory = BuildFactory()
440 # find number of cores
441 factory.addStep(SetPropertyFromCommand(
444 description = "Finding number of CPUs",
445 command = ["nproc"]))
447 # find gcc and g++ compilers
448 if cc_version is not None:
449 factory.addStep(FileDownload(
450 name = "dlfindbinpl",
451 mastersrc = "findbin.pl",
452 slavedest = "../findbin.pl",
455 factory.addStep(SetPropertyFromCommand(
457 property = "cc_command",
458 description = "Finding gcc command",
459 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
460 haltOnFailure = True))
462 factory.addStep(SetPropertyFromCommand(
464 property = "cxx_command",
465 description = "Finding g++ command",
466 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
467 haltOnFailure = True))
469 # see if ccache is available
470 factory.addStep(SetPropertyFromCommand(
471 property = "ccache_command",
472 command = ["which", "ccache"],
473 description = "Testing for ccache command",
474 haltOnFailure = False,
475 flunkOnFailure = False,
476 warnOnFailure = False,
479 # expire tree if needed
481 factory.addStep(FileDownload(
483 doStepIf = IsExpireRequested,
484 mastersrc = "expire.sh",
485 slavedest = "../expire.sh",
488 factory.addStep(ShellCommand(
490 description = "Checking for build tree expiry",
491 command = ["./expire.sh", str(tree_expire)],
493 haltOnFailure = True,
494 doStepIf = IsExpireRequested,
497 # cleanup.sh if needed
498 factory.addStep(FileDownload(
499 name = "dlcleanupsh",
500 mastersrc = "cleanup.sh",
501 slavedest = "../cleanup.sh",
503 doStepIf = IsCleanupRequested))
505 factory.addStep(ShellCommand(
507 description = "Cleaning previous builds",
508 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
510 haltOnFailure = True,
511 doStepIf = IsCleanupRequested,
514 factory.addStep(ShellCommand(
516 description = "Cleaning work area",
517 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
519 haltOnFailure = True,
520 doStepIf = IsCleanupRequested,
523 # user-requested clean targets
524 for tuple in CleanTargetMap:
525 factory.addStep(ShellCommand(
527 description = 'User-requested "make %s"' % tuple[1],
528 command = ["make", tuple[1], "V=s"],
530 doStepIf = IsMakeCleanRequested(tuple[0])
533 # check out the source
535 # if repo doesn't exist: 'git clone repourl'
536 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
537 # 'git fetch -t repourl branch; git reset --hard revision'
538 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
539 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
543 branch = repo_branch,
546 haltOnFailure = True,
547 doStepIf = IsGitCleanRequested,
553 branch = repo_branch,
556 haltOnFailure = True,
557 doStepIf = IsGitFreshRequested,
561 factory.addStep(ShellCommand(
563 description = "Fetching Git remote refs",
564 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
569 factory.addStep(ShellCommand(
571 description = "Checking out Git tag",
572 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
573 haltOnFailure = True,
574 doStepIf = IsTaggingRequested
577 factory.addStep(ShellCommand(
579 description = "Remove tmp folder",
580 command=["rm", "-rf", "tmp/"]))
583 # factory.addStep(ShellCommand(
584 # name = "feedsconf",
585 # description = "Copy the feeds.conf",
586 # command='''cp ~/feeds.conf ./feeds.conf''' ))
589 factory.addStep(ShellCommand(
590 name = "rmfeedlinks",
591 description = "Remove feed symlinks",
592 command=["rm", "-rf", "package/feeds/"]))
594 factory.addStep(StringDownload(
596 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
597 slavedest = "../ccache_cc.sh",
601 factory.addStep(StringDownload(
603 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
604 slavedest = "../ccache_cxx.sh",
609 factory.addStep(ShellCommand(
610 name = "updatefeeds",
611 description = "Updating feeds",
612 command=["./scripts/feeds", "update"],
613 env = MakeEnv(tryccache=True),
617 factory.addStep(ShellCommand(
618 name = "installfeeds",
619 description = "Installing feeds",
620 command=["./scripts/feeds", "install", "-a"],
621 env = MakeEnv(tryccache=True)))
624 factory.addStep(FileDownload(
625 name = "dlconfigseed",
626 mastersrc = "config.seed",
627 slavedest = ".config",
632 factory.addStep(ShellCommand(
634 description = "Seeding .config",
635 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
638 factory.addStep(ShellCommand(
640 description = "Removing output directory",
641 command = ["rm", "-rf", "bin/"]
644 factory.addStep(ShellCommand(
646 description = "Populating .config",
647 command = ["make", "defconfig"],
652 factory.addStep(ShellCommand(
654 description = "Checking architecture",
655 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
663 factory.addStep(SetPropertyFromCommand(
666 description = "Finding libc suffix",
667 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
670 factory.addStep(FileDownload(name="dlkeybuild", mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
671 factory.addStep(FileDownload(name="dlkeybuildpub", mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
674 factory.addStep(ShellCommand(
676 description = "Preparing dl/",
677 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
683 factory.addStep(ShellCommand(
685 description = "Building and installing GNU tar",
686 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
687 env = MakeEnv(tryccache=True),
692 factory.addStep(ShellCommand(
694 description = "Populating dl/",
695 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
698 locks = [dlLock.access('exclusive')],
701 factory.addStep(ShellCommand(
703 description = "Cleaning base-files",
704 command=["make", "package/base-files/clean", "V=s"]
708 factory.addStep(ShellCommand(
710 description = "Building and installing tools",
711 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
712 env = MakeEnv(tryccache=True),
716 factory.addStep(ShellCommand(
718 description = "Building and installing toolchain",
719 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
724 factory.addStep(ShellCommand(
726 description = "Building kmods",
727 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
729 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
733 # find kernel version
734 factory.addStep(SetPropertyFromCommand(
735 name = "kernelversion",
736 property = "kernelversion",
737 description = "Finding the effective Kernel version",
738 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
739 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
742 factory.addStep(ShellCommand(
744 description = "Cleaning up package build",
745 command=["make", "package/cleanup", "V=s"]
748 factory.addStep(ShellCommand(
750 description = "Building packages",
751 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
753 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
757 # factory.addStep(IfBuiltinShellCommand(
758 factory.addStep(ShellCommand(
760 description = "Installing packages",
761 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
766 factory.addStep(ShellCommand(
768 description = "Indexing packages",
769 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s"],
774 if enable_kmod_archive:
775 # embed kmod repository. Must happen before 'images'
777 # find rootfs staging directory
778 factory.addStep(SetPropertyFromCommand(
780 property = "stageroot",
781 description = "Finding the rootfs staging directory",
782 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
783 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
786 factory.addStep(ShellCommand(
788 description = "Creating file overlay directory",
789 command=["mkdir", "-p", "files/etc/opkg"],
793 factory.addStep(ShellCommand(
795 description = "Embedding kmod repository configuration",
796 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
797 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
801 #factory.addStep(IfBuiltinShellCommand(
802 factory.addStep(ShellCommand(
804 description = "Building and installing images",
805 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
810 factory.addStep(ShellCommand(
812 description = "Generating config.seed",
813 command=["make", "-j1", "diffconfig", "V=s"],
818 factory.addStep(ShellCommand(
820 description = "Calculating checksums",
821 command=["make", "-j1", "checksum", "V=s"],
826 if enable_kmod_archive:
827 factory.addStep(ShellCommand(
829 description = "Creating kmod directory",
830 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
834 factory.addStep(ShellCommand(
835 name = "kmodprepare",
836 description = "Preparing kmod archive",
837 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
838 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
839 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
843 factory.addStep(ShellCommand(
845 description = "Indexing kmod archive",
846 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s",
847 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
853 if gpg_keyid is not None:
854 factory.addStep(MasterShellCommand(
855 name = "signprepare",
856 description = "Preparing temporary signing directory",
857 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
861 factory.addStep(ShellCommand(
863 description = "Packing files to sign",
864 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
868 factory.addStep(FileUpload(
869 slavesrc = "sign.tar.gz",
870 masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
874 factory.addStep(MasterShellCommand(
876 description = "Signing files",
877 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
878 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
882 factory.addStep(FileDownload(
883 name = "dlsigntargz",
884 mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
885 slavedest = "sign.tar.gz",
889 factory.addStep(ShellCommand(
891 description = "Unpacking signed files",
892 command = ["tar", "-xzf", "sign.tar.gz"],
897 factory.addStep(ShellCommand(
899 description = "Preparing upload directory structure",
900 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
904 factory.addStep(ShellCommand(
905 name = "linkprepare",
906 description = "Preparing repository symlink",
907 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
908 doStepIf = IsNoMasterBuild,
912 if enable_kmod_archive:
913 factory.addStep(ShellCommand(
914 name = "kmoddirprepare",
915 description = "Preparing kmod archive upload directory",
916 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
920 factory.addStep(ShellCommand(
922 description = "Uploading directory structure",
923 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
924 env={'RSYNC_PASSWORD': rsync_bin_key},
925 haltOnFailure = True,
929 # download remote sha256sums to 'target-sha256sums'
930 factory.addStep(ShellCommand(
931 name = "target-sha256sums",
932 description = "Fetching remote sha256sums for target",
933 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/targets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1]), "target-sha256sums"],
934 env={'RSYNC_PASSWORD': rsync_bin_key},
936 haltOnFailure = False,
937 flunkOnFailure = False,
938 warnOnFailure = False,
941 # build list of files to upload
942 factory.addStep(FileDownload(
943 name = "dlsha2rsyncpl",
944 mastersrc = "sha2rsync.pl",
945 slavedest = "../sha2rsync.pl",
949 factory.addStep(ShellCommand(
951 description = "Building list of files to upload",
952 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
953 haltOnFailure = True,
956 factory.addStep(FileDownload(
958 mastersrc = "rsync.sh",
959 slavedest = "../rsync.sh",
963 # upload new files and update existing ones
964 factory.addStep(ShellCommand(
965 name = "targetupload",
966 description = "Uploading target files",
967 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
968 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
969 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
970 env={'RSYNC_PASSWORD': rsync_bin_key},
971 haltOnFailure = True,
975 # delete files which don't exist locally
976 factory.addStep(ShellCommand(
977 name = "targetprune",
978 description = "Pruning target files",
979 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
980 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
981 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
982 env={'RSYNC_PASSWORD': rsync_bin_key},
983 haltOnFailure = True,
987 if enable_kmod_archive:
988 factory.addStep(ShellCommand(
990 description = "Uploading kmod archive",
991 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
992 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
993 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
994 env={'RSYNC_PASSWORD': rsync_bin_key},
995 haltOnFailure = True,
999 if rsync_src_url is not None:
1000 factory.addStep(ShellCommand(
1001 name = "sourceupload",
1002 description = "Uploading source archives",
1003 command=["../rsync.sh", "--size-only", "--delay-updates"] + rsync_defopts +
1004 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1005 env={'RSYNC_PASSWORD': rsync_src_key},
1006 haltOnFailure = True,
1011 factory.addStep(ShellCommand(
1012 name = "packageupload",
1013 description = "Uploading package files",
1014 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1015 env={'RSYNC_PASSWORD': rsync_bin_key},
1016 haltOnFailure = False,
1022 factory.addStep(ShellCommand(
1024 description = "Uploading logs",
1025 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1026 env={'RSYNC_PASSWORD': rsync_bin_key},
1027 haltOnFailure = False,
1032 factory.addStep(ShellCommand(
1034 description = "Reporting disk usage",
1035 command=["df", "-h", "."],
1036 env={'LC_ALL': 'C'},
1037 haltOnFailure = False,
1041 factory.addStep(ShellCommand(
1042 name = "ccachestat",
1043 description = "Reporting ccache stats",
1044 command=["ccache", "-s"],
1045 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1046 want_stderr = False,
1047 haltOnFailure = False,
1048 flunkOnFailure = False,
1049 warnOnFailure = False,
1053 from buildbot.config import BuilderConfig
1055 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1058 ####### STATUS TARGETS
1060 # 'status' is a list of Status Targets. The results of each build will be
1061 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1062 # including web pages, email senders, and IRC bots.
1066 from buildbot.status import html
1067 from buildbot.status.web import authz, auth
1069 if ini.has_option("status", "bind"):
1070 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1071 authz_cfg=authz.Authz(
1072 # change any of these to True to enable; see the manual for more
1074 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1075 gracefulShutdown = 'auth',
1076 forceBuild = 'auth', # use this to test your slave once it is set up
1077 forceAllBuilds = 'auth',
1078 pingBuilder = False,
1080 stopAllBuilds = 'auth',
1081 cancelPendingBuild = 'auth',
1083 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1085 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1088 from buildbot.status import words
1090 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1091 irc_host = ini.get("irc", "host")
1093 irc_chan = ini.get("irc", "channel")
1094 irc_nick = ini.get("irc", "nickname")
1097 if ini.has_option("irc", "port"):
1098 irc_port = ini.getint("irc", "port")
1100 if ini.has_option("irc", "password"):
1101 irc_pass = ini.get("irc", "password")
1103 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1104 channels = [{ "channel": irc_chan }],
1107 'successToFailure': 1,
1108 'failureToSuccess': 1
1112 c['status'].append(irc)
1117 # This specifies what database buildbot uses to store its state. You can leave
1118 # this at its default for all but the largest installations.
1119 'db_url' : "sqlite:///state.sqlite",