phase1: regroup common rsync options
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 # This is a sample buildmaster config file. It must be installed as
12 # 'master.cfg' in your buildmaster's base directory.
13
14 ini = ConfigParser.ConfigParser()
15 ini.read("./config.ini")
16
17 # This is the dictionary that the buildmaster pays attention to. We also use
18 # a shorter alias to save typing.
19 c = BuildmasterConfig = {}
20
21 ####### PROJECT IDENTITY
22
23 # the 'title' string will appear at the top of this buildbot
24 # installation's html.WebStatus home page (linked to the
25 # 'titleURL') and is embedded in the title of the waterfall HTML page.
26
27 c['title'] = ini.get("general", "title")
28 c['titleURL'] = ini.get("general", "title_url")
29
30 # the 'buildbotURL' string should point to the location where the buildbot's
31 # internal web server (usually the html.WebStatus page) is visible. This
32 # typically uses the port number set in the Waterfall 'status' entry, but
33 # with an externally-visible host name which the buildbot cannot figure out
34 # without some help.
35
36 c['buildbotURL'] = ini.get("general", "buildbot_url")
37
38 ####### BUILDSLAVES
39
40 # The 'slaves' list defines the set of recognized buildslaves. Each element is
41 # a BuildSlave object, specifying a unique slave name and password. The same
42 # slave name and password must be configured on the slave.
43 from buildbot.buildslave import BuildSlave
44
45 slave_port = 9989
46
47 if ini.has_option("general", "port"):
48 slave_port = ini.getint("general", "port")
49
50 c['slaves'] = []
51 NetLocks = dict()
52
53 for section in ini.sections():
54 if section.startswith("slave "):
55 if ini.has_option(section, "name") and ini.has_option(section, "password"):
56 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
59 max_builds = 1
60 if ini.has_option(section, "builds"):
61 max_builds = ini.getint(section, "builds")
62 sl_props['max_builds'] = max_builds
63 if ini.has_option(section, "cleanup"):
64 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
65 if ini.has_option(section, "dl_lock"):
66 lockname = ini.get(section, "dl_lock")
67 sl_props['dl_lock'] = lockname
68 if lockname not in NetLocks:
69 NetLocks[lockname] = locks.MasterLock(lockname)
70 if ini.has_option(section, "ul_lock"):
71 lockname = ini.get(section, "dl_lock")
72 sl_props['ul_lock'] = lockname
73 if lockname not in NetLocks:
74 NetLocks[lockname] = locks.MasterLock(lockname)
75 if ini.has_option(section, "shared_wd"):
76 shared_wd = ini.getboolean(section, "shared_wd")
77 sl_props['shared_wd'] = shared_wd
78 if shared_wd and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
81
82 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
83 # This must match the value configured into the buildslaves (with their
84 # --master option)
85 c['slavePortnum'] = slave_port
86
87 # coalesce builds
88 c['mergeRequests'] = True
89
90 # Reduce amount of backlog data
91 c['buildHorizon'] = 30
92 c['logHorizon'] = 20
93
94 ####### CHANGESOURCES
95
96 home_dir = os.path.abspath(ini.get("general", "homedir"))
97 tree_expire = 0
98 other_builds = 0
99 cc_version = None
100
101 cc_command = "gcc"
102 cxx_command = "g++"
103
104 if ini.has_option("general", "expire"):
105 tree_expire = ini.getint("general", "expire")
106
107 if ini.has_option("general", "other_builds"):
108 other_builds = ini.getint("general", "other_builds")
109
110 if ini.has_option("general", "cc_version"):
111 cc_version = ini.get("general", "cc_version").split()
112 if len(cc_version) == 1:
113 cc_version = ["eq", cc_version[0]]
114
115 repo_url = ini.get("repo", "url")
116 repo_branch = "master"
117
118 if ini.has_option("repo", "branch"):
119 repo_branch = ini.get("repo", "branch")
120
121 rsync_bin_url = ini.get("rsync", "binary_url")
122 rsync_bin_key = ini.get("rsync", "binary_password")
123
124 rsync_src_url = None
125 rsync_src_key = None
126
127 if ini.has_option("rsync", "source_url"):
128 rsync_src_url = ini.get("rsync", "source_url")
129 rsync_src_key = ini.get("rsync", "source_password")
130
131 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
132
133 gpg_home = "~/.gnupg"
134 gpg_keyid = None
135 gpg_comment = "Unattended build signature"
136 gpg_passfile = "/dev/null"
137
138 if ini.has_option("gpg", "home"):
139 gpg_home = ini.get("gpg", "home")
140
141 if ini.has_option("gpg", "keyid"):
142 gpg_keyid = ini.get("gpg", "keyid")
143
144 if ini.has_option("gpg", "comment"):
145 gpg_comment = ini.get("gpg", "comment")
146
147 if ini.has_option("gpg", "passfile"):
148 gpg_passfile = ini.get("gpg", "passfile")
149
150 enable_kmod_archive = True
151
152
153 # find targets
154 targets = [ ]
155
156 if not os.path.isdir(home_dir+'/source.git'):
157 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
158 else:
159 subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
160
161 findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
162 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
163
164 while True:
165 line = findtargets.stdout.readline()
166 if not line:
167 break
168 ta = line.strip().split(' ')
169 targets.append(ta[0])
170
171
172 # the 'change_source' setting tells the buildmaster how it should find out
173 # about source code changes. Here we point to the buildbot clone of pyflakes.
174
175 from buildbot.changes.gitpoller import GitPoller
176 c['change_source'] = []
177 c['change_source'].append(GitPoller(
178 repo_url,
179 workdir=home_dir+'/work.git', branch=repo_branch,
180 pollinterval=300))
181
182 ####### SCHEDULERS
183
184 # Configure the Schedulers, which decide how to react to incoming changes. In this
185 # case, just kick off a 'basebuild' build
186
187 from buildbot.schedulers.basic import SingleBranchScheduler
188 from buildbot.schedulers.forcesched import ForceScheduler
189 from buildbot.changes import filter
190 c['schedulers'] = []
191 c['schedulers'].append(SingleBranchScheduler(
192 name="all",
193 change_filter=filter.ChangeFilter(branch=repo_branch),
194 treeStableTimer=60,
195 builderNames=targets))
196
197 c['schedulers'].append(ForceScheduler(
198 name="force",
199 builderNames=targets))
200
201 ####### BUILDERS
202
203 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
204 # what steps, and which slaves can execute them. Note that any particular build will
205 # only take place on one slave.
206
207 from buildbot.process.factory import BuildFactory
208 from buildbot.steps.source.git import Git
209 from buildbot.steps.shell import ShellCommand
210 from buildbot.steps.shell import SetPropertyFromCommand
211 from buildbot.steps.transfer import FileUpload
212 from buildbot.steps.transfer import FileDownload
213 from buildbot.steps.transfer import StringDownload
214 from buildbot.steps.master import MasterShellCommand
215 from buildbot.process.properties import Interpolate
216 from buildbot.process import properties
217
218
219 CleanTargetMap = [
220 [ "tools", "tools/clean" ],
221 [ "chain", "toolchain/clean" ],
222 [ "linux", "target/linux/clean" ],
223 [ "dir", "dirclean" ],
224 [ "dist", "distclean" ]
225 ]
226
227 def IsMakeCleanRequested(pattern):
228 def CheckCleanProperty(step):
229 val = step.getProperty("clean")
230 if val and re.match(pattern, val):
231 return True
232 else:
233 return False
234
235 return CheckCleanProperty
236
237 def IsCleanupRequested(step):
238 shared_wd = step.getProperty("shared_wd")
239 if shared_wd:
240 return False
241 do_cleanup = step.getProperty("do_cleanup")
242 if do_cleanup:
243 return True
244 else:
245 return False
246
247 def IsExpireRequested(step):
248 shared_wd = step.getProperty("shared_wd")
249 if shared_wd:
250 return False
251 else:
252 return not IsCleanupRequested(step)
253
254 def IsGitFreshRequested(step):
255 do_cleanup = step.getProperty("do_cleanup")
256 if do_cleanup:
257 return True
258 else:
259 return False
260
261 def IsGitCleanRequested(step):
262 return not IsGitFreshRequested(step)
263
264 def IsTaggingRequested(step):
265 val = step.getProperty("tag")
266 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
267 return True
268 else:
269 return False
270
271 def IsNoTaggingRequested(step):
272 return not IsTaggingRequested(step)
273
274 def IsNoMasterBuild(step):
275 return repo_branch != "master"
276
277 def GetBaseVersion():
278 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
279 return repo_branch.split('-')[1]
280 else:
281 return "master"
282
283 @properties.renderer
284 def GetVersionPrefix(props):
285 basever = GetBaseVersion()
286 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
287 return "%s/" % props["tag"]
288 elif basever != "master":
289 return "%s-SNAPSHOT/" % basever
290 else:
291 return ""
292
293 @properties.renderer
294 def GetNumJobs(props):
295 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
296 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
297 else:
298 return "1"
299
300 @properties.renderer
301 def GetCC(props):
302 if props.hasProperty("cc_command"):
303 return props["cc_command"]
304 else:
305 return "gcc"
306
307 @properties.renderer
308 def GetCXX(props):
309 if props.hasProperty("cxx_command"):
310 return props["cxx_command"]
311 else:
312 return "g++"
313
314 @properties.renderer
315 def GetCwd(props):
316 if props.hasProperty("builddir"):
317 return props["builddir"]
318 elif props.hasProperty("workdir"):
319 return props["workdir"]
320 else:
321 return "/"
322
323 @properties.renderer
324 def GetCCache(props):
325 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
326 return props["ccache_command"]
327 else:
328 return ""
329
330 def GetNextBuild(builder, requests):
331 for r in requests:
332 if r.properties and r.properties.hasProperty("tag"):
333 return r
334 return requests[0]
335
336 def MakeEnv(overrides=None, tryccache=False):
337 if tryccache:
338 envcc = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
339 envcxx = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
340 envccache = Interpolate("%(kw:ccache)s", ccache=GetCCache)
341 envccc = Interpolate("%(kw:cc)s", cc=GetCC)
342 envccxx = Interpolate("%(kw:cxx)s", cxx=GetCXX)
343 else:
344 envcc = Interpolate("%(kw:cc)s", cc=GetCC)
345 envcxx = Interpolate("%(kw:cxx)s", cxx=GetCXX)
346 envccache = ""
347 envccc = ""
348 envccxx = ""
349 env = {
350 'CC': envcc,
351 'CXX': envcxx,
352 'CCACHE': envccache,
353 'CCC': envccc,
354 'CCXX': envccxx,
355 }
356 if overrides is not None:
357 env.update(overrides)
358 return env
359
360 @properties.renderer
361 def NetLockDl(props):
362 lock = None
363 if props.hasProperty("dl_lock"):
364 lock = NetLocks[props["dl_lock"]]
365 if lock is not None:
366 return [lock.access('exclusive')]
367 else:
368 return []
369
370 @properties.renderer
371 def NetLockUl(props):
372 lock = None
373 if props.hasProperty("ul_lock"):
374 lock = NetLocks[props["ul_lock"]]
375 if lock is not None:
376 return [lock.access('exclusive')]
377 else:
378 return []
379
380 c['builders'] = []
381
382 dlLock = locks.SlaveLock("slave_dl")
383
384 checkBuiltin = re.sub('[\t\n ]+', ' ', """
385 checkBuiltin() {
386 local symbol op path file;
387 for file in $CHANGED_FILES; do
388 case "$file" in
389 package/*/*) : ;;
390 *) return 0 ;;
391 esac;
392 done;
393 while read symbol op path; do
394 case "$symbol" in package-*)
395 symbol="${symbol##*(}";
396 symbol="${symbol%)}";
397 for file in $CHANGED_FILES; do
398 case "$file" in "package/$path/"*)
399 grep -qsx "$symbol=y" .config && return 0
400 ;; esac;
401 done;
402 esac;
403 done < tmp/.packagedeps;
404 return 1;
405 }
406 """).strip()
407
408
409 class IfBuiltinShellCommand(ShellCommand):
410 def _quote(self, str):
411 if re.search("[^a-zA-Z0-9/_.-]", str):
412 return "'%s'" %(re.sub("'", "'\"'\"'", str))
413 return str
414
415 def setCommand(self, command):
416 if not isinstance(command, (str, unicode)):
417 command = ' '.join(map(self._quote, command))
418 self.command = [
419 '/bin/sh', '-c',
420 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
421 ]
422
423 def setupEnvironment(self, cmd):
424 slaveEnv = self.slaveEnvironment
425 if slaveEnv is None:
426 slaveEnv = { }
427 changedFiles = { }
428 for request in self.build.requests:
429 for source in request.sources:
430 for change in source.changes:
431 for file in change.files:
432 changedFiles[file] = True
433 fullSlaveEnv = slaveEnv.copy()
434 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
435 cmd.args['env'] = fullSlaveEnv
436
437 slaveNames = [ ]
438
439 for slave in c['slaves']:
440 slaveNames.append(slave.slavename)
441
442 for target in targets:
443 ts = target.split('/')
444
445 factory = BuildFactory()
446
447 # find number of cores
448 factory.addStep(SetPropertyFromCommand(
449 name = "nproc",
450 property = "nproc",
451 description = "Finding number of CPUs",
452 command = ["nproc"]))
453
454 # find gcc and g++ compilers
455 if cc_version is not None:
456 factory.addStep(FileDownload(
457 name = "dlfindbinpl",
458 mastersrc = "findbin.pl",
459 slavedest = "../findbin.pl",
460 mode = 0755))
461
462 factory.addStep(SetPropertyFromCommand(
463 name = "gcc",
464 property = "cc_command",
465 description = "Finding gcc command",
466 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
467 haltOnFailure = True))
468
469 factory.addStep(SetPropertyFromCommand(
470 name = "g++",
471 property = "cxx_command",
472 description = "Finding g++ command",
473 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
474 haltOnFailure = True))
475
476 # see if ccache is available
477 factory.addStep(SetPropertyFromCommand(
478 property = "ccache_command",
479 command = ["which", "ccache"],
480 description = "Testing for ccache command",
481 haltOnFailure = False,
482 flunkOnFailure = False,
483 warnOnFailure = False,
484 ))
485
486 # expire tree if needed
487 if tree_expire > 0:
488 factory.addStep(FileDownload(
489 name = "dlexpiresh",
490 doStepIf = IsExpireRequested,
491 mastersrc = "expire.sh",
492 slavedest = "../expire.sh",
493 mode = 0755))
494
495 factory.addStep(ShellCommand(
496 name = "expire",
497 description = "Checking for build tree expiry",
498 command = ["./expire.sh", str(tree_expire)],
499 workdir = ".",
500 haltOnFailure = True,
501 doStepIf = IsExpireRequested,
502 timeout = 2400))
503
504 # cleanup.sh if needed
505 factory.addStep(FileDownload(
506 name = "dlcleanupsh",
507 mastersrc = "cleanup.sh",
508 slavedest = "../cleanup.sh",
509 mode = 0755,
510 doStepIf = IsCleanupRequested))
511
512 factory.addStep(ShellCommand(
513 name = "cleanold",
514 description = "Cleaning previous builds",
515 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
516 workdir = ".",
517 haltOnFailure = True,
518 doStepIf = IsCleanupRequested,
519 timeout = 2400))
520
521 factory.addStep(ShellCommand(
522 name = "cleanup",
523 description = "Cleaning work area",
524 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
525 workdir = ".",
526 haltOnFailure = True,
527 doStepIf = IsCleanupRequested,
528 timeout = 2400))
529
530 # user-requested clean targets
531 for tuple in CleanTargetMap:
532 factory.addStep(ShellCommand(
533 name = tuple[1],
534 description = 'User-requested "make %s"' % tuple[1],
535 command = ["make", tuple[1], "V=s"],
536 env = MakeEnv(),
537 doStepIf = IsMakeCleanRequested(tuple[0])
538 ))
539
540 # check out the source
541 # Git() runs:
542 # if repo doesn't exist: 'git clone repourl'
543 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
544 # 'git fetch -t repourl branch; git reset --hard revision'
545 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
546 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
547 factory.addStep(Git(
548 name = "gitclean",
549 repourl = repo_url,
550 branch = repo_branch,
551 mode = 'full',
552 method = 'clean',
553 haltOnFailure = True,
554 doStepIf = IsGitCleanRequested,
555 ))
556
557 factory.addStep(Git(
558 name = "gitfresh",
559 repourl = repo_url,
560 branch = repo_branch,
561 mode = 'full',
562 method = 'fresh',
563 haltOnFailure = True,
564 doStepIf = IsGitFreshRequested,
565 ))
566
567 # update remote refs
568 factory.addStep(ShellCommand(
569 name = "fetchrefs",
570 description = "Fetching Git remote refs",
571 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
572 haltOnFailure = True
573 ))
574
575 # switch to tag
576 factory.addStep(ShellCommand(
577 name = "switchtag",
578 description = "Checking out Git tag",
579 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
580 haltOnFailure = True,
581 doStepIf = IsTaggingRequested
582 ))
583
584 factory.addStep(ShellCommand(
585 name = "rmtmp",
586 description = "Remove tmp folder",
587 command=["rm", "-rf", "tmp/"]))
588
589 # feed
590 # factory.addStep(ShellCommand(
591 # name = "feedsconf",
592 # description = "Copy the feeds.conf",
593 # command='''cp ~/feeds.conf ./feeds.conf''' ))
594
595 # feed
596 factory.addStep(ShellCommand(
597 name = "rmfeedlinks",
598 description = "Remove feed symlinks",
599 command=["rm", "-rf", "package/feeds/"]))
600
601 # feed
602 factory.addStep(ShellCommand(
603 name = "updatefeeds",
604 description = "Updating feeds",
605 command=["./scripts/feeds", "update"],
606 env = MakeEnv(),
607 ))
608
609 # feed
610 factory.addStep(ShellCommand(
611 name = "installfeeds",
612 description = "Installing feeds",
613 command=["./scripts/feeds", "install", "-a"],
614 env = MakeEnv()))
615
616 # seed config
617 factory.addStep(FileDownload(
618 name = "dlconfigseed",
619 mastersrc = "config.seed",
620 slavedest = ".config",
621 mode = 0644
622 ))
623
624 # configure
625 factory.addStep(ShellCommand(
626 name = "newconfig",
627 description = "Seeding .config",
628 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
629 ))
630
631 factory.addStep(ShellCommand(
632 name = "delbin",
633 description = "Removing output directory",
634 command = ["rm", "-rf", "bin/"]
635 ))
636
637 factory.addStep(ShellCommand(
638 name = "defconfig",
639 description = "Populating .config",
640 command = ["make", "defconfig"],
641 env = MakeEnv()
642 ))
643
644 # check arch
645 factory.addStep(ShellCommand(
646 name = "checkarch",
647 description = "Checking architecture",
648 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
649 logEnviron = False,
650 want_stdout = False,
651 want_stderr = False,
652 haltOnFailure = True
653 ))
654
655 # find libc suffix
656 factory.addStep(SetPropertyFromCommand(
657 name = "libc",
658 property = "libc",
659 description = "Finding libc suffix",
660 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
661
662 # install build key
663 factory.addStep(FileDownload(name="dlkeybuild", mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
664 factory.addStep(FileDownload(name="dlkeybuildpub", mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
665
666 # prepare dl
667 factory.addStep(ShellCommand(
668 name = "dldir",
669 description = "Preparing dl/",
670 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
671 logEnviron = False,
672 want_stdout = False
673 ))
674
675 factory.addStep(StringDownload(
676 name = "ccachecc",
677 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
678 slavedest = "../ccache_cc.sh",
679 mode = 0755,
680 ))
681
682 factory.addStep(StringDownload(
683 name = "ccachecxx",
684 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
685 slavedest = "../ccache_cxx.sh",
686 mode = 0755,
687 ))
688
689 # prepare tar
690 factory.addStep(ShellCommand(
691 name = "dltar",
692 description = "Building and installing GNU tar",
693 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
694 env = MakeEnv(tryccache=True),
695 haltOnFailure = True
696 ))
697
698 # populate dl
699 factory.addStep(ShellCommand(
700 name = "dlrun",
701 description = "Populating dl/",
702 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
703 env = MakeEnv(),
704 logEnviron = False,
705 locks = [dlLock.access('exclusive')],
706 ))
707
708 factory.addStep(ShellCommand(
709 name = "cleanbase",
710 description = "Cleaning base-files",
711 command=["make", "package/base-files/clean", "V=s"]
712 ))
713
714 # build
715 factory.addStep(ShellCommand(
716 name = "tools",
717 description = "Building and installing tools",
718 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
719 env = MakeEnv(tryccache=True),
720 haltOnFailure = True
721 ))
722
723 factory.addStep(ShellCommand(
724 name = "toolchain",
725 description = "Building and installing toolchain",
726 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
727 env = MakeEnv(),
728 haltOnFailure = True
729 ))
730
731 factory.addStep(ShellCommand(
732 name = "kmods",
733 description = "Building kmods",
734 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
735 env = MakeEnv(),
736 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
737 haltOnFailure = True
738 ))
739
740 # find kernel version
741 factory.addStep(SetPropertyFromCommand(
742 name = "kernelversion",
743 property = "kernelversion",
744 description = "Finding the effective Kernel version",
745 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
746 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
747 ))
748
749 factory.addStep(ShellCommand(
750 name = "pkgclean",
751 description = "Cleaning up package build",
752 command=["make", "package/cleanup", "V=s"]
753 ))
754
755 factory.addStep(ShellCommand(
756 name = "pkgbuild",
757 description = "Building packages",
758 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
759 env = MakeEnv(),
760 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
761 haltOnFailure = True
762 ))
763
764 # factory.addStep(IfBuiltinShellCommand(
765 factory.addStep(ShellCommand(
766 name = "pkginstall",
767 description = "Installing packages",
768 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
769 env = MakeEnv(),
770 haltOnFailure = True
771 ))
772
773 factory.addStep(ShellCommand(
774 name = "pkgindex",
775 description = "Indexing packages",
776 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s"],
777 env = MakeEnv(),
778 haltOnFailure = True
779 ))
780
781 if enable_kmod_archive:
782 # embed kmod repository. Must happen before 'images'
783
784 # find rootfs staging directory
785 factory.addStep(SetPropertyFromCommand(
786 name = "stageroot",
787 property = "stageroot",
788 description = "Finding the rootfs staging directory",
789 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
790 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
791 ))
792
793 factory.addStep(ShellCommand(
794 name = "filesdir",
795 description = "Creating file overlay directory",
796 command=["mkdir", "-p", "files/etc/opkg"],
797 haltOnFailure = True
798 ))
799
800 factory.addStep(ShellCommand(
801 name = "kmodconfig",
802 description = "Embedding kmod repository configuration",
803 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
804 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
805 haltOnFailure = True
806 ))
807
808 #factory.addStep(IfBuiltinShellCommand(
809 factory.addStep(ShellCommand(
810 name = "images",
811 description = "Building and installing images",
812 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
813 env = MakeEnv(),
814 haltOnFailure = True
815 ))
816
817 factory.addStep(ShellCommand(
818 name = "diffconfig",
819 description = "Generating config.seed",
820 command=["make", "-j1", "diffconfig", "V=s"],
821 env = MakeEnv(),
822 haltOnFailure = True
823 ))
824
825 factory.addStep(ShellCommand(
826 name = "checksums",
827 description = "Calculating checksums",
828 command=["make", "-j1", "checksum", "V=s"],
829 env = MakeEnv(),
830 haltOnFailure = True
831 ))
832
833 if enable_kmod_archive:
834 factory.addStep(ShellCommand(
835 name = "kmoddir",
836 description = "Creating kmod directory",
837 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
838 haltOnFailure = True
839 ))
840
841 factory.addStep(ShellCommand(
842 name = "kmodprepare",
843 description = "Preparing kmod archive",
844 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
845 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
846 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
847 haltOnFailure = True
848 ))
849
850 factory.addStep(ShellCommand(
851 name = "kmodindex",
852 description = "Indexing kmod archive",
853 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s",
854 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
855 env = MakeEnv(),
856 haltOnFailure = True
857 ))
858
859 # sign
860 if gpg_keyid is not None:
861 factory.addStep(MasterShellCommand(
862 name = "signprepare",
863 description = "Preparing temporary signing directory",
864 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
865 haltOnFailure = True
866 ))
867
868 factory.addStep(ShellCommand(
869 name = "signpack",
870 description = "Packing files to sign",
871 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
872 haltOnFailure = True
873 ))
874
875 factory.addStep(FileUpload(
876 slavesrc = "sign.tar.gz",
877 masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
878 haltOnFailure = True
879 ))
880
881 factory.addStep(MasterShellCommand(
882 name = "signfiles",
883 description = "Signing files",
884 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
885 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
886 haltOnFailure = True
887 ))
888
889 factory.addStep(FileDownload(
890 name = "dlsigntargz",
891 mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
892 slavedest = "sign.tar.gz",
893 haltOnFailure = True
894 ))
895
896 factory.addStep(ShellCommand(
897 name = "signunpack",
898 description = "Unpacking signed files",
899 command = ["tar", "-xzf", "sign.tar.gz"],
900 haltOnFailure = True
901 ))
902
903 # upload
904 factory.addStep(ShellCommand(
905 name = "dirprepare",
906 description = "Preparing upload directory structure",
907 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
908 haltOnFailure = True
909 ))
910
911 factory.addStep(ShellCommand(
912 name = "linkprepare",
913 description = "Preparing repository symlink",
914 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
915 doStepIf = IsNoMasterBuild,
916 haltOnFailure = True
917 ))
918
919 if enable_kmod_archive:
920 factory.addStep(ShellCommand(
921 name = "kmoddirprepare",
922 description = "Preparing kmod archive upload directory",
923 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
924 haltOnFailure = True
925 ))
926
927 factory.addStep(ShellCommand(
928 name = "dirupload",
929 description = "Uploading directory structure",
930 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
931 env={'RSYNC_PASSWORD': rsync_bin_key},
932 haltOnFailure = True,
933 logEnviron = False,
934 ))
935
936 # download remote sha256sums to 'target-sha256sums'
937 factory.addStep(ShellCommand(
938 name = "target-sha256sums",
939 description = "Fetching remote sha256sums for target",
940 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/targets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1]), "target-sha256sums"],
941 env={'RSYNC_PASSWORD': rsync_bin_key},
942 logEnviron = False,
943 haltOnFailure = False,
944 flunkOnFailure = False,
945 warnOnFailure = False,
946 ))
947
948 # build list of files to upload
949 factory.addStep(FileDownload(
950 name = "dlsha2rsyncpl",
951 mastersrc = "sha2rsync.pl",
952 slavedest = "../sha2rsync.pl",
953 mode = 0755,
954 ))
955
956 factory.addStep(ShellCommand(
957 name = "buildlist",
958 description = "Building list of files to upload",
959 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
960 haltOnFailure = True,
961 ))
962
963 factory.addStep(FileDownload(
964 name = "dlrsync.sh",
965 mastersrc = "rsync.sh",
966 slavedest = "../rsync.sh",
967 mode = 0755
968 ))
969
970 # upload new files and update existing ones
971 factory.addStep(ShellCommand(
972 name = "targetupload",
973 description = "Uploading target files",
974 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
975 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
976 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
977 env={'RSYNC_PASSWORD': rsync_bin_key},
978 haltOnFailure = True,
979 logEnviron = False,
980 ))
981
982 # delete files which don't exist locally
983 factory.addStep(ShellCommand(
984 name = "targetprune",
985 description = "Pruning target files",
986 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
987 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
988 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
989 env={'RSYNC_PASSWORD': rsync_bin_key},
990 haltOnFailure = True,
991 logEnviron = False,
992 ))
993
994 if enable_kmod_archive:
995 factory.addStep(ShellCommand(
996 name = "kmodupload",
997 description = "Uploading kmod archive",
998 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
999 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1000 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1001 env={'RSYNC_PASSWORD': rsync_bin_key},
1002 haltOnFailure = True,
1003 logEnviron = False,
1004 ))
1005
1006 if rsync_src_url is not None:
1007 factory.addStep(ShellCommand(
1008 name = "sourceupload",
1009 description = "Uploading source archives",
1010 command=["../rsync.sh", "--size-only", "--delay-updates"] + rsync_defopts +
1011 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1012 env={'RSYNC_PASSWORD': rsync_src_key},
1013 haltOnFailure = True,
1014 logEnviron = False,
1015 ))
1016
1017 if False:
1018 factory.addStep(ShellCommand(
1019 name = "packageupload",
1020 description = "Uploading package files",
1021 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1022 env={'RSYNC_PASSWORD': rsync_bin_key},
1023 haltOnFailure = False,
1024 logEnviron = False,
1025 ))
1026
1027 # logs
1028 if False:
1029 factory.addStep(ShellCommand(
1030 name = "upload",
1031 description = "Uploading logs",
1032 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1033 env={'RSYNC_PASSWORD': rsync_bin_key},
1034 haltOnFailure = False,
1035 alwaysRun = True,
1036 logEnviron = False,
1037 ))
1038
1039 factory.addStep(ShellCommand(
1040 name = "df",
1041 description = "Reporting disk usage",
1042 command=["df", "-h", "."],
1043 env={'LC_ALL': 'C'},
1044 haltOnFailure = False,
1045 alwaysRun = True
1046 ))
1047
1048 factory.addStep(ShellCommand(
1049 name = "ccachestat",
1050 description = "Reporting ccache stats",
1051 command=["ccache", "-s"],
1052 env = MakeEnv(overrides={ 'PATH': ["./staging_dir/host/bin", "${PATH}"] }),
1053 want_stderr = False,
1054 haltOnFailure = False,
1055 flunkOnFailure = False,
1056 warnOnFailure = False,
1057 alwaysRun = True,
1058 ))
1059
1060 from buildbot.config import BuilderConfig
1061
1062 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1063
1064
1065 ####### STATUS TARGETS
1066
1067 # 'status' is a list of Status Targets. The results of each build will be
1068 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1069 # including web pages, email senders, and IRC bots.
1070
1071 c['status'] = []
1072
1073 from buildbot.status import html
1074 from buildbot.status.web import authz, auth
1075
1076 if ini.has_option("status", "bind"):
1077 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1078 authz_cfg=authz.Authz(
1079 # change any of these to True to enable; see the manual for more
1080 # options
1081 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1082 gracefulShutdown = 'auth',
1083 forceBuild = 'auth', # use this to test your slave once it is set up
1084 forceAllBuilds = 'auth',
1085 pingBuilder = False,
1086 stopBuild = 'auth',
1087 stopAllBuilds = 'auth',
1088 cancelPendingBuild = 'auth',
1089 )
1090 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1091 else:
1092 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1093
1094
1095 from buildbot.status import words
1096
1097 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1098 irc_host = ini.get("irc", "host")
1099 irc_port = 6667
1100 irc_chan = ini.get("irc", "channel")
1101 irc_nick = ini.get("irc", "nickname")
1102 irc_pass = None
1103
1104 if ini.has_option("irc", "port"):
1105 irc_port = ini.getint("irc", "port")
1106
1107 if ini.has_option("irc", "password"):
1108 irc_pass = ini.get("irc", "password")
1109
1110 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1111 channels = [{ "channel": irc_chan }],
1112 notify_events = {
1113 'exception': 1,
1114 'successToFailure': 1,
1115 'failureToSuccess': 1
1116 }
1117 )
1118
1119 c['status'].append(irc)
1120
1121 ####### DB URL
1122
1123 c['db'] = {
1124 # This specifies what database buildbot uses to store its state. You can leave
1125 # this at its default for all but the largest installations.
1126 'db_url' : "sqlite:///state.sqlite",
1127 }