phase1: build toolchain without ccache
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 # This is a sample buildmaster config file. It must be installed as
12 # 'master.cfg' in your buildmaster's base directory.
13
14 ini = ConfigParser.ConfigParser()
15 ini.read("./config.ini")
16
17 # This is the dictionary that the buildmaster pays attention to. We also use
18 # a shorter alias to save typing.
19 c = BuildmasterConfig = {}
20
21 ####### PROJECT IDENTITY
22
23 # the 'title' string will appear at the top of this buildbot
24 # installation's html.WebStatus home page (linked to the
25 # 'titleURL') and is embedded in the title of the waterfall HTML page.
26
27 c['title'] = ini.get("general", "title")
28 c['titleURL'] = ini.get("general", "title_url")
29
30 # the 'buildbotURL' string should point to the location where the buildbot's
31 # internal web server (usually the html.WebStatus page) is visible. This
32 # typically uses the port number set in the Waterfall 'status' entry, but
33 # with an externally-visible host name which the buildbot cannot figure out
34 # without some help.
35
36 c['buildbotURL'] = ini.get("general", "buildbot_url")
37
38 ####### BUILDSLAVES
39
40 # The 'slaves' list defines the set of recognized buildslaves. Each element is
41 # a BuildSlave object, specifying a unique slave name and password. The same
42 # slave name and password must be configured on the slave.
43 from buildbot.buildslave import BuildSlave
44
45 slave_port = 9989
46
47 if ini.has_option("general", "port"):
48 slave_port = ini.getint("general", "port")
49
50 c['slaves'] = []
51 NetLocks = dict()
52
53 for section in ini.sections():
54 if section.startswith("slave "):
55 if ini.has_option(section, "name") and ini.has_option(section, "password"):
56 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
59 max_builds = 1
60 if ini.has_option(section, "builds"):
61 max_builds = ini.getint(section, "builds")
62 sl_props['max_builds'] = max_builds
63 if ini.has_option(section, "cleanup"):
64 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
65 if ini.has_option(section, "dl_lock"):
66 lockname = ini.get(section, "dl_lock")
67 sl_props['dl_lock'] = lockname
68 if lockname not in NetLocks:
69 NetLocks[lockname] = locks.MasterLock(lockname)
70 if ini.has_option(section, "ul_lock"):
71 lockname = ini.get(section, "dl_lock")
72 sl_props['ul_lock'] = lockname
73 if lockname not in NetLocks:
74 NetLocks[lockname] = locks.MasterLock(lockname)
75 if ini.has_option(section, "shared_wd"):
76 shared_wd = ini.getboolean(section, "shared_wd")
77 sl_props['shared_wd'] = shared_wd
78 if shared_wd and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
81
82 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
83 # This must match the value configured into the buildslaves (with their
84 # --master option)
85 c['slavePortnum'] = slave_port
86
87 # coalesce builds
88 c['mergeRequests'] = True
89
90 # Reduce amount of backlog data
91 c['buildHorizon'] = 30
92 c['logHorizon'] = 20
93
94 ####### CHANGESOURCES
95
96 home_dir = os.path.abspath(ini.get("general", "homedir"))
97 tree_expire = 0
98 other_builds = 0
99 cc_version = None
100
101 cc_command = "gcc"
102 cxx_command = "g++"
103
104 if ini.has_option("general", "expire"):
105 tree_expire = ini.getint("general", "expire")
106
107 if ini.has_option("general", "other_builds"):
108 other_builds = ini.getint("general", "other_builds")
109
110 if ini.has_option("general", "cc_version"):
111 cc_version = ini.get("general", "cc_version").split()
112 if len(cc_version) == 1:
113 cc_version = ["eq", cc_version[0]]
114
115 repo_url = ini.get("repo", "url")
116 repo_branch = "master"
117
118 if ini.has_option("repo", "branch"):
119 repo_branch = ini.get("repo", "branch")
120
121 rsync_bin_url = ini.get("rsync", "binary_url")
122 rsync_bin_key = ini.get("rsync", "binary_password")
123
124 rsync_src_url = None
125 rsync_src_key = None
126
127 if ini.has_option("rsync", "source_url"):
128 rsync_src_url = ini.get("rsync", "source_url")
129 rsync_src_key = ini.get("rsync", "source_password")
130
131 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
132
133 gpg_home = "~/.gnupg"
134 gpg_keyid = None
135 gpg_comment = "Unattended build signature"
136 gpg_passfile = "/dev/null"
137
138 if ini.has_option("gpg", "home"):
139 gpg_home = ini.get("gpg", "home")
140
141 if ini.has_option("gpg", "keyid"):
142 gpg_keyid = ini.get("gpg", "keyid")
143
144 if ini.has_option("gpg", "comment"):
145 gpg_comment = ini.get("gpg", "comment")
146
147 if ini.has_option("gpg", "passfile"):
148 gpg_passfile = ini.get("gpg", "passfile")
149
150 enable_kmod_archive = True
151
152
153 # find targets
154 targets = [ ]
155
156 if not os.path.isdir(home_dir+'/source.git'):
157 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
158 else:
159 subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
160
161 findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
162 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
163
164 while True:
165 line = findtargets.stdout.readline()
166 if not line:
167 break
168 ta = line.strip().split(' ')
169 targets.append(ta[0])
170
171
172 # the 'change_source' setting tells the buildmaster how it should find out
173 # about source code changes. Here we point to the buildbot clone of pyflakes.
174
175 from buildbot.changes.gitpoller import GitPoller
176 c['change_source'] = []
177 c['change_source'].append(GitPoller(
178 repo_url,
179 workdir=home_dir+'/work.git', branch=repo_branch,
180 pollinterval=300))
181
182 ####### SCHEDULERS
183
184 # Configure the Schedulers, which decide how to react to incoming changes. In this
185 # case, just kick off a 'basebuild' build
186
187 from buildbot.schedulers.basic import SingleBranchScheduler
188 from buildbot.schedulers.forcesched import ForceScheduler
189 from buildbot.changes import filter
190 c['schedulers'] = []
191 c['schedulers'].append(SingleBranchScheduler(
192 name="all",
193 change_filter=filter.ChangeFilter(branch=repo_branch),
194 treeStableTimer=60,
195 builderNames=targets))
196
197 c['schedulers'].append(ForceScheduler(
198 name="force",
199 builderNames=targets))
200
201 ####### BUILDERS
202
203 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
204 # what steps, and which slaves can execute them. Note that any particular build will
205 # only take place on one slave.
206
207 from buildbot.process.factory import BuildFactory
208 from buildbot.steps.source.git import Git
209 from buildbot.steps.shell import ShellCommand
210 from buildbot.steps.shell import SetPropertyFromCommand
211 from buildbot.steps.transfer import FileUpload
212 from buildbot.steps.transfer import FileDownload
213 from buildbot.steps.transfer import StringDownload
214 from buildbot.steps.master import MasterShellCommand
215 from buildbot.process.properties import Interpolate
216 from buildbot.process import properties
217
218
219 CleanTargetMap = [
220 [ "tools", "tools/clean" ],
221 [ "chain", "toolchain/clean" ],
222 [ "linux", "target/linux/clean" ],
223 [ "dir", "dirclean" ],
224 [ "dist", "distclean" ]
225 ]
226
227 def IsMakeCleanRequested(pattern):
228 def CheckCleanProperty(step):
229 val = step.getProperty("clean")
230 if val and re.match(pattern, val):
231 return True
232 else:
233 return False
234
235 return CheckCleanProperty
236
237 def IsCleanupRequested(step):
238 shared_wd = step.getProperty("shared_wd")
239 if shared_wd:
240 return False
241 do_cleanup = step.getProperty("do_cleanup")
242 if do_cleanup:
243 return True
244 else:
245 return False
246
247 def IsExpireRequested(step):
248 shared_wd = step.getProperty("shared_wd")
249 if shared_wd:
250 return False
251 else:
252 return not IsCleanupRequested(step)
253
254 def IsGitFreshRequested(step):
255 do_cleanup = step.getProperty("do_cleanup")
256 if do_cleanup:
257 return True
258 else:
259 return False
260
261 def IsGitCleanRequested(step):
262 return not IsGitFreshRequested(step)
263
264 def IsTaggingRequested(step):
265 val = step.getProperty("tag")
266 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
267 return True
268 else:
269 return False
270
271 def IsNoTaggingRequested(step):
272 return not IsTaggingRequested(step)
273
274 def IsNoMasterBuild(step):
275 return repo_branch != "master"
276
277 def GetBaseVersion():
278 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
279 return repo_branch.split('-')[1]
280 else:
281 return "master"
282
283 @properties.renderer
284 def GetVersionPrefix(props):
285 basever = GetBaseVersion()
286 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
287 return "%s/" % props["tag"]
288 elif basever != "master":
289 return "%s-SNAPSHOT/" % basever
290 else:
291 return ""
292
293 @properties.renderer
294 def GetNumJobs(props):
295 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
296 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
297 else:
298 return "1"
299
300 @properties.renderer
301 def GetCC(props):
302 if props.hasProperty("cc_command"):
303 return props["cc_command"]
304 else:
305 return "gcc"
306
307 @properties.renderer
308 def GetCXX(props):
309 if props.hasProperty("cxx_command"):
310 return props["cxx_command"]
311 else:
312 return "g++"
313
314 @properties.renderer
315 def GetCwd(props):
316 if props.hasProperty("builddir"):
317 return props["builddir"]
318 elif props.hasProperty("workdir"):
319 return props["workdir"]
320 else:
321 return "/"
322
323 @properties.renderer
324 def GetCCache(props):
325 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
326 return props["ccache_command"]
327 else:
328 return ""
329
330 def GetNextBuild(builder, requests):
331 for r in requests:
332 if r.properties and r.properties.hasProperty("tag"):
333 return r
334 return requests[0]
335
336 def MakeEnv(overrides=None, tryccache=False):
337 env = {
338 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
339 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
340 }
341 if tryccache:
342 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
343 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
344 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
345 else:
346 env['CC'] = env['CCC']
347 env['CXX'] = env['CCXX']
348 env['CCACHE'] = ''
349 if overrides is not None:
350 env.update(overrides)
351 return env
352
353 @properties.renderer
354 def NetLockDl(props):
355 lock = None
356 if props.hasProperty("dl_lock"):
357 lock = NetLocks[props["dl_lock"]]
358 if lock is not None:
359 return [lock.access('exclusive')]
360 else:
361 return []
362
363 @properties.renderer
364 def NetLockUl(props):
365 lock = None
366 if props.hasProperty("ul_lock"):
367 lock = NetLocks[props["ul_lock"]]
368 if lock is not None:
369 return [lock.access('exclusive')]
370 else:
371 return []
372
373 c['builders'] = []
374
375 dlLock = locks.SlaveLock("slave_dl")
376
377 checkBuiltin = re.sub('[\t\n ]+', ' ', """
378 checkBuiltin() {
379 local symbol op path file;
380 for file in $CHANGED_FILES; do
381 case "$file" in
382 package/*/*) : ;;
383 *) return 0 ;;
384 esac;
385 done;
386 while read symbol op path; do
387 case "$symbol" in package-*)
388 symbol="${symbol##*(}";
389 symbol="${symbol%)}";
390 for file in $CHANGED_FILES; do
391 case "$file" in "package/$path/"*)
392 grep -qsx "$symbol=y" .config && return 0
393 ;; esac;
394 done;
395 esac;
396 done < tmp/.packagedeps;
397 return 1;
398 }
399 """).strip()
400
401
402 class IfBuiltinShellCommand(ShellCommand):
403 def _quote(self, str):
404 if re.search("[^a-zA-Z0-9/_.-]", str):
405 return "'%s'" %(re.sub("'", "'\"'\"'", str))
406 return str
407
408 def setCommand(self, command):
409 if not isinstance(command, (str, unicode)):
410 command = ' '.join(map(self._quote, command))
411 self.command = [
412 '/bin/sh', '-c',
413 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
414 ]
415
416 def setupEnvironment(self, cmd):
417 slaveEnv = self.slaveEnvironment
418 if slaveEnv is None:
419 slaveEnv = { }
420 changedFiles = { }
421 for request in self.build.requests:
422 for source in request.sources:
423 for change in source.changes:
424 for file in change.files:
425 changedFiles[file] = True
426 fullSlaveEnv = slaveEnv.copy()
427 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
428 cmd.args['env'] = fullSlaveEnv
429
430 slaveNames = [ ]
431
432 for slave in c['slaves']:
433 slaveNames.append(slave.slavename)
434
435 for target in targets:
436 ts = target.split('/')
437
438 factory = BuildFactory()
439
440 # find number of cores
441 factory.addStep(SetPropertyFromCommand(
442 name = "nproc",
443 property = "nproc",
444 description = "Finding number of CPUs",
445 command = ["nproc"]))
446
447 # find gcc and g++ compilers
448 if cc_version is not None:
449 factory.addStep(FileDownload(
450 name = "dlfindbinpl",
451 mastersrc = "findbin.pl",
452 slavedest = "../findbin.pl",
453 mode = 0755))
454
455 factory.addStep(SetPropertyFromCommand(
456 name = "gcc",
457 property = "cc_command",
458 description = "Finding gcc command",
459 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
460 haltOnFailure = True))
461
462 factory.addStep(SetPropertyFromCommand(
463 name = "g++",
464 property = "cxx_command",
465 description = "Finding g++ command",
466 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
467 haltOnFailure = True))
468
469 # see if ccache is available
470 factory.addStep(SetPropertyFromCommand(
471 property = "ccache_command",
472 command = ["which", "ccache"],
473 description = "Testing for ccache command",
474 haltOnFailure = False,
475 flunkOnFailure = False,
476 warnOnFailure = False,
477 ))
478
479 # expire tree if needed
480 if tree_expire > 0:
481 factory.addStep(FileDownload(
482 name = "dlexpiresh",
483 doStepIf = IsExpireRequested,
484 mastersrc = "expire.sh",
485 slavedest = "../expire.sh",
486 mode = 0755))
487
488 factory.addStep(ShellCommand(
489 name = "expire",
490 description = "Checking for build tree expiry",
491 command = ["./expire.sh", str(tree_expire)],
492 workdir = ".",
493 haltOnFailure = True,
494 doStepIf = IsExpireRequested,
495 timeout = 2400))
496
497 # cleanup.sh if needed
498 factory.addStep(FileDownload(
499 name = "dlcleanupsh",
500 mastersrc = "cleanup.sh",
501 slavedest = "../cleanup.sh",
502 mode = 0755,
503 doStepIf = IsCleanupRequested))
504
505 factory.addStep(ShellCommand(
506 name = "cleanold",
507 description = "Cleaning previous builds",
508 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
509 workdir = ".",
510 haltOnFailure = True,
511 doStepIf = IsCleanupRequested,
512 timeout = 2400))
513
514 factory.addStep(ShellCommand(
515 name = "cleanup",
516 description = "Cleaning work area",
517 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
518 workdir = ".",
519 haltOnFailure = True,
520 doStepIf = IsCleanupRequested,
521 timeout = 2400))
522
523 # user-requested clean targets
524 for tuple in CleanTargetMap:
525 factory.addStep(ShellCommand(
526 name = tuple[1],
527 description = 'User-requested "make %s"' % tuple[1],
528 command = ["make", tuple[1], "V=s"],
529 env = MakeEnv(),
530 doStepIf = IsMakeCleanRequested(tuple[0])
531 ))
532
533 # check out the source
534 # Git() runs:
535 # if repo doesn't exist: 'git clone repourl'
536 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
537 # 'git fetch -t repourl branch; git reset --hard revision'
538 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
539 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
540 factory.addStep(Git(
541 name = "gitclean",
542 repourl = repo_url,
543 branch = repo_branch,
544 mode = 'full',
545 method = 'clean',
546 haltOnFailure = True,
547 doStepIf = IsGitCleanRequested,
548 ))
549
550 factory.addStep(Git(
551 name = "gitfresh",
552 repourl = repo_url,
553 branch = repo_branch,
554 mode = 'full',
555 method = 'fresh',
556 haltOnFailure = True,
557 doStepIf = IsGitFreshRequested,
558 ))
559
560 # update remote refs
561 factory.addStep(ShellCommand(
562 name = "fetchrefs",
563 description = "Fetching Git remote refs",
564 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
565 haltOnFailure = True
566 ))
567
568 # switch to tag
569 factory.addStep(ShellCommand(
570 name = "switchtag",
571 description = "Checking out Git tag",
572 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
573 haltOnFailure = True,
574 doStepIf = IsTaggingRequested
575 ))
576
577 factory.addStep(ShellCommand(
578 name = "rmtmp",
579 description = "Remove tmp folder",
580 command=["rm", "-rf", "tmp/"]))
581
582 # feed
583 # factory.addStep(ShellCommand(
584 # name = "feedsconf",
585 # description = "Copy the feeds.conf",
586 # command='''cp ~/feeds.conf ./feeds.conf''' ))
587
588 # feed
589 factory.addStep(ShellCommand(
590 name = "rmfeedlinks",
591 description = "Remove feed symlinks",
592 command=["rm", "-rf", "package/feeds/"]))
593
594 factory.addStep(StringDownload(
595 name = "ccachecc",
596 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
597 slavedest = "../ccache_cc.sh",
598 mode = 0755,
599 ))
600
601 factory.addStep(StringDownload(
602 name = "ccachecxx",
603 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
604 slavedest = "../ccache_cxx.sh",
605 mode = 0755,
606 ))
607
608 # feed
609 factory.addStep(ShellCommand(
610 name = "updatefeeds",
611 description = "Updating feeds",
612 command=["./scripts/feeds", "update"],
613 env = MakeEnv(tryccache=True),
614 ))
615
616 # feed
617 factory.addStep(ShellCommand(
618 name = "installfeeds",
619 description = "Installing feeds",
620 command=["./scripts/feeds", "install", "-a"],
621 env = MakeEnv(tryccache=True)))
622
623 # seed config
624 factory.addStep(FileDownload(
625 name = "dlconfigseed",
626 mastersrc = "config.seed",
627 slavedest = ".config",
628 mode = 0644
629 ))
630
631 # configure
632 factory.addStep(ShellCommand(
633 name = "newconfig",
634 description = "Seeding .config",
635 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
636 ))
637
638 factory.addStep(ShellCommand(
639 name = "delbin",
640 description = "Removing output directory",
641 command = ["rm", "-rf", "bin/"]
642 ))
643
644 factory.addStep(ShellCommand(
645 name = "defconfig",
646 description = "Populating .config",
647 command = ["make", "defconfig"],
648 env = MakeEnv()
649 ))
650
651 # check arch
652 factory.addStep(ShellCommand(
653 name = "checkarch",
654 description = "Checking architecture",
655 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
656 logEnviron = False,
657 want_stdout = False,
658 want_stderr = False,
659 haltOnFailure = True
660 ))
661
662 # find libc suffix
663 factory.addStep(SetPropertyFromCommand(
664 name = "libc",
665 property = "libc",
666 description = "Finding libc suffix",
667 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
668
669 # install build key
670 factory.addStep(FileDownload(name="dlkeybuild", mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
671 factory.addStep(FileDownload(name="dlkeybuildpub", mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
672
673 # prepare dl
674 factory.addStep(ShellCommand(
675 name = "dldir",
676 description = "Preparing dl/",
677 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
678 logEnviron = False,
679 want_stdout = False
680 ))
681
682 # prepare tar
683 factory.addStep(ShellCommand(
684 name = "dltar",
685 description = "Building and installing GNU tar",
686 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
687 env = MakeEnv(tryccache=True),
688 haltOnFailure = True
689 ))
690
691 # populate dl
692 factory.addStep(ShellCommand(
693 name = "dlrun",
694 description = "Populating dl/",
695 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
696 env = MakeEnv(),
697 logEnviron = False,
698 locks = [dlLock.access('exclusive')],
699 ))
700
701 factory.addStep(ShellCommand(
702 name = "cleanbase",
703 description = "Cleaning base-files",
704 command=["make", "package/base-files/clean", "V=s"]
705 ))
706
707 # build
708 factory.addStep(ShellCommand(
709 name = "tools",
710 description = "Building and installing tools",
711 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
712 env = MakeEnv(tryccache=True),
713 haltOnFailure = True
714 ))
715
716 factory.addStep(ShellCommand(
717 name = "toolchain",
718 description = "Building and installing toolchain",
719 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
720 env = MakeEnv(),
721 haltOnFailure = True
722 ))
723
724 factory.addStep(ShellCommand(
725 name = "kmods",
726 description = "Building kmods",
727 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
728 env = MakeEnv(),
729 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
730 haltOnFailure = True
731 ))
732
733 # find kernel version
734 factory.addStep(SetPropertyFromCommand(
735 name = "kernelversion",
736 property = "kernelversion",
737 description = "Finding the effective Kernel version",
738 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
739 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
740 ))
741
742 factory.addStep(ShellCommand(
743 name = "pkgclean",
744 description = "Cleaning up package build",
745 command=["make", "package/cleanup", "V=s"]
746 ))
747
748 factory.addStep(ShellCommand(
749 name = "pkgbuild",
750 description = "Building packages",
751 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
752 env = MakeEnv(),
753 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
754 haltOnFailure = True
755 ))
756
757 # factory.addStep(IfBuiltinShellCommand(
758 factory.addStep(ShellCommand(
759 name = "pkginstall",
760 description = "Installing packages",
761 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
762 env = MakeEnv(),
763 haltOnFailure = True
764 ))
765
766 factory.addStep(ShellCommand(
767 name = "pkgindex",
768 description = "Indexing packages",
769 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s"],
770 env = MakeEnv(),
771 haltOnFailure = True
772 ))
773
774 if enable_kmod_archive:
775 # embed kmod repository. Must happen before 'images'
776
777 # find rootfs staging directory
778 factory.addStep(SetPropertyFromCommand(
779 name = "stageroot",
780 property = "stageroot",
781 description = "Finding the rootfs staging directory",
782 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
783 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
784 ))
785
786 factory.addStep(ShellCommand(
787 name = "filesdir",
788 description = "Creating file overlay directory",
789 command=["mkdir", "-p", "files/etc/opkg"],
790 haltOnFailure = True
791 ))
792
793 factory.addStep(ShellCommand(
794 name = "kmodconfig",
795 description = "Embedding kmod repository configuration",
796 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
797 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
798 haltOnFailure = True
799 ))
800
801 #factory.addStep(IfBuiltinShellCommand(
802 factory.addStep(ShellCommand(
803 name = "images",
804 description = "Building and installing images",
805 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
806 env = MakeEnv(),
807 haltOnFailure = True
808 ))
809
810 factory.addStep(ShellCommand(
811 name = "diffconfig",
812 description = "Generating config.seed",
813 command=["make", "-j1", "diffconfig", "V=s"],
814 env = MakeEnv(),
815 haltOnFailure = True
816 ))
817
818 factory.addStep(ShellCommand(
819 name = "checksums",
820 description = "Calculating checksums",
821 command=["make", "-j1", "checksum", "V=s"],
822 env = MakeEnv(),
823 haltOnFailure = True
824 ))
825
826 if enable_kmod_archive:
827 factory.addStep(ShellCommand(
828 name = "kmoddir",
829 description = "Creating kmod directory",
830 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
831 haltOnFailure = True
832 ))
833
834 factory.addStep(ShellCommand(
835 name = "kmodprepare",
836 description = "Preparing kmod archive",
837 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
838 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
839 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
840 haltOnFailure = True
841 ))
842
843 factory.addStep(ShellCommand(
844 name = "kmodindex",
845 description = "Indexing kmod archive",
846 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s",
847 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
848 env = MakeEnv(),
849 haltOnFailure = True
850 ))
851
852 # sign
853 if gpg_keyid is not None:
854 factory.addStep(MasterShellCommand(
855 name = "signprepare",
856 description = "Preparing temporary signing directory",
857 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
858 haltOnFailure = True
859 ))
860
861 factory.addStep(ShellCommand(
862 name = "signpack",
863 description = "Packing files to sign",
864 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
865 haltOnFailure = True
866 ))
867
868 factory.addStep(FileUpload(
869 slavesrc = "sign.tar.gz",
870 masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
871 haltOnFailure = True
872 ))
873
874 factory.addStep(MasterShellCommand(
875 name = "signfiles",
876 description = "Signing files",
877 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
878 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
879 haltOnFailure = True
880 ))
881
882 factory.addStep(FileDownload(
883 name = "dlsigntargz",
884 mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
885 slavedest = "sign.tar.gz",
886 haltOnFailure = True
887 ))
888
889 factory.addStep(ShellCommand(
890 name = "signunpack",
891 description = "Unpacking signed files",
892 command = ["tar", "-xzf", "sign.tar.gz"],
893 haltOnFailure = True
894 ))
895
896 # upload
897 factory.addStep(ShellCommand(
898 name = "dirprepare",
899 description = "Preparing upload directory structure",
900 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
901 haltOnFailure = True
902 ))
903
904 factory.addStep(ShellCommand(
905 name = "linkprepare",
906 description = "Preparing repository symlink",
907 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
908 doStepIf = IsNoMasterBuild,
909 haltOnFailure = True
910 ))
911
912 if enable_kmod_archive:
913 factory.addStep(ShellCommand(
914 name = "kmoddirprepare",
915 description = "Preparing kmod archive upload directory",
916 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
917 haltOnFailure = True
918 ))
919
920 factory.addStep(ShellCommand(
921 name = "dirupload",
922 description = "Uploading directory structure",
923 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
924 env={'RSYNC_PASSWORD': rsync_bin_key},
925 haltOnFailure = True,
926 logEnviron = False,
927 ))
928
929 # download remote sha256sums to 'target-sha256sums'
930 factory.addStep(ShellCommand(
931 name = "target-sha256sums",
932 description = "Fetching remote sha256sums for target",
933 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/targets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1]), "target-sha256sums"],
934 env={'RSYNC_PASSWORD': rsync_bin_key},
935 logEnviron = False,
936 haltOnFailure = False,
937 flunkOnFailure = False,
938 warnOnFailure = False,
939 ))
940
941 # build list of files to upload
942 factory.addStep(FileDownload(
943 name = "dlsha2rsyncpl",
944 mastersrc = "sha2rsync.pl",
945 slavedest = "../sha2rsync.pl",
946 mode = 0755,
947 ))
948
949 factory.addStep(ShellCommand(
950 name = "buildlist",
951 description = "Building list of files to upload",
952 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
953 haltOnFailure = True,
954 ))
955
956 factory.addStep(FileDownload(
957 name = "dlrsync.sh",
958 mastersrc = "rsync.sh",
959 slavedest = "../rsync.sh",
960 mode = 0755
961 ))
962
963 # upload new files and update existing ones
964 factory.addStep(ShellCommand(
965 name = "targetupload",
966 description = "Uploading target files",
967 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
968 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
969 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
970 env={'RSYNC_PASSWORD': rsync_bin_key},
971 haltOnFailure = True,
972 logEnviron = False,
973 ))
974
975 # delete files which don't exist locally
976 factory.addStep(ShellCommand(
977 name = "targetprune",
978 description = "Pruning target files",
979 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
980 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
981 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
982 env={'RSYNC_PASSWORD': rsync_bin_key},
983 haltOnFailure = True,
984 logEnviron = False,
985 ))
986
987 if enable_kmod_archive:
988 factory.addStep(ShellCommand(
989 name = "kmodupload",
990 description = "Uploading kmod archive",
991 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
992 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
993 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
994 env={'RSYNC_PASSWORD': rsync_bin_key},
995 haltOnFailure = True,
996 logEnviron = False,
997 ))
998
999 if rsync_src_url is not None:
1000 factory.addStep(ShellCommand(
1001 name = "sourceupload",
1002 description = "Uploading source archives",
1003 command=["../rsync.sh", "--size-only", "--delay-updates"] + rsync_defopts +
1004 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1005 env={'RSYNC_PASSWORD': rsync_src_key},
1006 haltOnFailure = True,
1007 logEnviron = False,
1008 ))
1009
1010 if False:
1011 factory.addStep(ShellCommand(
1012 name = "packageupload",
1013 description = "Uploading package files",
1014 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1015 env={'RSYNC_PASSWORD': rsync_bin_key},
1016 haltOnFailure = False,
1017 logEnviron = False,
1018 ))
1019
1020 # logs
1021 if False:
1022 factory.addStep(ShellCommand(
1023 name = "upload",
1024 description = "Uploading logs",
1025 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1026 env={'RSYNC_PASSWORD': rsync_bin_key},
1027 haltOnFailure = False,
1028 alwaysRun = True,
1029 logEnviron = False,
1030 ))
1031
1032 factory.addStep(ShellCommand(
1033 name = "df",
1034 description = "Reporting disk usage",
1035 command=["df", "-h", "."],
1036 env={'LC_ALL': 'C'},
1037 haltOnFailure = False,
1038 alwaysRun = True
1039 ))
1040
1041 factory.addStep(ShellCommand(
1042 name = "ccachestat",
1043 description = "Reporting ccache stats",
1044 command=["ccache", "-s"],
1045 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1046 want_stderr = False,
1047 haltOnFailure = False,
1048 flunkOnFailure = False,
1049 warnOnFailure = False,
1050 alwaysRun = True,
1051 ))
1052
1053 from buildbot.config import BuilderConfig
1054
1055 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1056
1057
1058 ####### STATUS TARGETS
1059
1060 # 'status' is a list of Status Targets. The results of each build will be
1061 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1062 # including web pages, email senders, and IRC bots.
1063
1064 c['status'] = []
1065
1066 from buildbot.status import html
1067 from buildbot.status.web import authz, auth
1068
1069 if ini.has_option("status", "bind"):
1070 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1071 authz_cfg=authz.Authz(
1072 # change any of these to True to enable; see the manual for more
1073 # options
1074 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1075 gracefulShutdown = 'auth',
1076 forceBuild = 'auth', # use this to test your slave once it is set up
1077 forceAllBuilds = 'auth',
1078 pingBuilder = False,
1079 stopBuild = 'auth',
1080 stopAllBuilds = 'auth',
1081 cancelPendingBuild = 'auth',
1082 )
1083 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1084 else:
1085 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1086
1087
1088 from buildbot.status import words
1089
1090 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1091 irc_host = ini.get("irc", "host")
1092 irc_port = 6667
1093 irc_chan = ini.get("irc", "channel")
1094 irc_nick = ini.get("irc", "nickname")
1095 irc_pass = None
1096
1097 if ini.has_option("irc", "port"):
1098 irc_port = ini.getint("irc", "port")
1099
1100 if ini.has_option("irc", "password"):
1101 irc_pass = ini.get("irc", "password")
1102
1103 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1104 channels = [{ "channel": irc_chan }],
1105 notify_events = {
1106 'exception': 1,
1107 'successToFailure': 1,
1108 'failureToSuccess': 1
1109 }
1110 )
1111
1112 c['status'].append(irc)
1113
1114 ####### DB URL
1115
1116 c['db'] = {
1117 # This specifies what database buildbot uses to store its state. You can leave
1118 # this at its default for all but the largest installations.
1119 'db_url' : "sqlite:///state.sqlite",
1120 }