789876034bedd1e0adecb4f34fe001ffaad947a1
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import ConfigParser
9
10 from buildbot import locks
11
12 # This is a sample buildmaster config file. It must be installed as
13 # 'master.cfg' in your buildmaster's base directory.
14
15 ini = ConfigParser.ConfigParser()
16 ini.read("./config.ini")
17
18 # This is the dictionary that the buildmaster pays attention to. We also use
19 # a shorter alias to save typing.
20 c = BuildmasterConfig = {}
21
22 ####### PROJECT IDENTITY
23
24 # the 'title' string will appear at the top of this buildbot
25 # installation's html.WebStatus home page (linked to the
26 # 'titleURL') and is embedded in the title of the waterfall HTML page.
27
28 c['title'] = ini.get("general", "title")
29 c['titleURL'] = ini.get("general", "title_url")
30
31 # the 'buildbotURL' string should point to the location where the buildbot's
32 # internal web server (usually the html.WebStatus page) is visible. This
33 # typically uses the port number set in the Waterfall 'status' entry, but
34 # with an externally-visible host name which the buildbot cannot figure out
35 # without some help.
36
37 c['buildbotURL'] = ini.get("general", "buildbot_url")
38
39 ####### BUILDSLAVES
40
41 # The 'slaves' list defines the set of recognized buildslaves. Each element is
42 # a BuildSlave object, specifying a unique slave name and password. The same
43 # slave name and password must be configured on the slave.
44 from buildbot.buildslave import BuildSlave
45
46 slave_port = 9989
47
48 if ini.has_option("general", "port"):
49 slave_port = ini.getint("general", "port")
50
51 c['slaves'] = []
52 NetLocks = dict()
53
54 for section in ini.sections():
55 if section.startswith("slave "):
56 if ini.has_option(section, "name") and ini.has_option(section, "password"):
57 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
58 name = ini.get(section, "name")
59 password = ini.get(section, "password")
60 max_builds = 1
61 if ini.has_option(section, "builds"):
62 max_builds = ini.getint(section, "builds")
63 sl_props['max_builds'] = max_builds
64 if ini.has_option(section, "cleanup"):
65 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
66 if ini.has_option(section, "dl_lock"):
67 lockname = ini.get(section, "dl_lock")
68 sl_props['dl_lock'] = lockname
69 if lockname not in NetLocks:
70 NetLocks[lockname] = locks.MasterLock(lockname)
71 if ini.has_option(section, "ul_lock"):
72 lockname = ini.get(section, "dl_lock")
73 sl_props['ul_lock'] = lockname
74 if lockname not in NetLocks:
75 NetLocks[lockname] = locks.MasterLock(lockname)
76 if ini.has_option(section, "shared_wd"):
77 shared_wd = ini.getboolean(section, "shared_wd")
78 sl_props['shared_wd'] = shared_wd
79 if shared_wd and (max_builds != 1):
80 raise ValueError('max_builds must be 1 with shared workdir!')
81 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
82
83 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
84 # This must match the value configured into the buildslaves (with their
85 # --master option)
86 c['slavePortnum'] = slave_port
87
88 # coalesce builds
89 c['mergeRequests'] = True
90
91 # Reduce amount of backlog data
92 c['buildHorizon'] = 30
93 c['logHorizon'] = 20
94
95 ####### CHANGESOURCES
96
97 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
98 scripts_dir = os.path.abspath("../scripts")
99 tree_expire = 0
100 other_builds = 0
101 cc_version = None
102
103 cc_command = "gcc"
104 cxx_command = "g++"
105
106 git_ssh = False
107 git_ssh_key = None
108
109 if ini.has_option("general", "expire"):
110 tree_expire = ini.getint("general", "expire")
111
112 if ini.has_option("general", "other_builds"):
113 other_builds = ini.getint("general", "other_builds")
114
115 if ini.has_option("general", "cc_version"):
116 cc_version = ini.get("general", "cc_version").split()
117 if len(cc_version) == 1:
118 cc_version = ["eq", cc_version[0]]
119
120 if ini.has_option("general", "git_ssh"):
121 git_ssh = ini.getboolean("general", "git_ssh")
122
123 if ini.has_option("general", "git_ssh_key"):
124 git_ssh_key = ini.get("general", "git_ssh_key")
125 else:
126 git_ssh = False
127
128 repo_url = ini.get("repo", "url")
129 repo_branch = "master"
130
131 if ini.has_option("repo", "branch"):
132 repo_branch = ini.get("repo", "branch")
133
134 rsync_bin_url = ini.get("rsync", "binary_url")
135 rsync_bin_key = ini.get("rsync", "binary_password")
136
137 rsync_src_url = None
138 rsync_src_key = None
139
140 if ini.has_option("rsync", "source_url"):
141 rsync_src_url = ini.get("rsync", "source_url")
142 rsync_src_key = ini.get("rsync", "source_password")
143
144 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
145
146 gpg_key = None
147 gpg_passphrase = None
148 gpg_comment = repo_branch.replace("-", " ").title() + " key"
149
150 if ini.has_option("gpg", "key"):
151 gpg_key = ini.get("gpg", "key")
152
153 if ini.has_option("gpg", "passphrase"):
154 gpg_passphrase = ini.get("gpg", "passphrase")
155
156 if ini.has_option("gpg", "comment"):
157 gpg_comment = ini.get("gpg", "comment")
158
159 usign_key = None
160 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
161
162 if ini.has_option("usign", "key"):
163 usign_key = ini.get("usign", "key")
164
165 if ini.has_option("usign", "comment"):
166 usign_comment = ini.get("usign", "comment")
167
168 enable_kmod_archive = True
169
170
171 # find targets
172 targets = [ ]
173
174 if not os.path.isdir(work_dir+'/source.git'):
175 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
176 else:
177 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
178
179 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
180 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
181
182 while True:
183 line = findtargets.stdout.readline()
184 if not line:
185 break
186 ta = line.strip().split(' ')
187 targets.append(ta[0])
188
189
190 # the 'change_source' setting tells the buildmaster how it should find out
191 # about source code changes. Here we point to the buildbot clone of pyflakes.
192
193 from buildbot.changes.gitpoller import GitPoller
194 c['change_source'] = []
195 c['change_source'].append(GitPoller(
196 repo_url,
197 workdir=work_dir+'/work.git', branch=repo_branch,
198 pollinterval=300))
199
200 ####### SCHEDULERS
201
202 # Configure the Schedulers, which decide how to react to incoming changes. In this
203 # case, just kick off a 'basebuild' build
204
205 from buildbot.schedulers.basic import SingleBranchScheduler
206 from buildbot.schedulers.forcesched import ForceScheduler
207 from buildbot.changes import filter
208 c['schedulers'] = []
209 c['schedulers'].append(SingleBranchScheduler(
210 name="all",
211 change_filter=filter.ChangeFilter(branch=repo_branch),
212 treeStableTimer=60,
213 builderNames=targets))
214
215 c['schedulers'].append(ForceScheduler(
216 name="force",
217 builderNames=targets))
218
219 ####### BUILDERS
220
221 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
222 # what steps, and which slaves can execute them. Note that any particular build will
223 # only take place on one slave.
224
225 from buildbot.process.factory import BuildFactory
226 from buildbot.steps.source.git import Git
227 from buildbot.steps.shell import ShellCommand
228 from buildbot.steps.shell import SetPropertyFromCommand
229 from buildbot.steps.transfer import FileUpload
230 from buildbot.steps.transfer import FileDownload
231 from buildbot.steps.transfer import StringDownload
232 from buildbot.steps.master import MasterShellCommand
233 from buildbot.process.properties import Interpolate
234 from buildbot.process import properties
235
236
237 CleanTargetMap = [
238 [ "tools", "tools/clean" ],
239 [ "chain", "toolchain/clean" ],
240 [ "linux", "target/linux/clean" ],
241 [ "dir", "dirclean" ],
242 [ "dist", "distclean" ]
243 ]
244
245 def IsMakeCleanRequested(pattern):
246 def CheckCleanProperty(step):
247 val = step.getProperty("clean")
248 if val and re.match(pattern, val):
249 return True
250 else:
251 return False
252
253 return CheckCleanProperty
254
255 def IsCleanupRequested(step):
256 shared_wd = step.getProperty("shared_wd")
257 if shared_wd:
258 return False
259 do_cleanup = step.getProperty("do_cleanup")
260 if do_cleanup:
261 return True
262 else:
263 return False
264
265 def IsExpireRequested(step):
266 shared_wd = step.getProperty("shared_wd")
267 if shared_wd:
268 return False
269 else:
270 return not IsCleanupRequested(step)
271
272 def IsGitFreshRequested(step):
273 do_cleanup = step.getProperty("do_cleanup")
274 if do_cleanup:
275 return True
276 else:
277 return False
278
279 def IsGitCleanRequested(step):
280 return not IsGitFreshRequested(step)
281
282 def IsTaggingRequested(step):
283 val = step.getProperty("tag")
284 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
285 return True
286 else:
287 return False
288
289 def IsNoTaggingRequested(step):
290 return not IsTaggingRequested(step)
291
292 def IsNoMasterBuild(step):
293 return repo_branch != "master"
294
295 def GetBaseVersion():
296 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
297 return repo_branch.split('-')[1]
298 else:
299 return "master"
300
301 @properties.renderer
302 def GetVersionPrefix(props):
303 basever = GetBaseVersion()
304 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
305 return "%s/" % props["tag"]
306 elif basever != "master":
307 return "%s-SNAPSHOT/" % basever
308 else:
309 return ""
310
311 @properties.renderer
312 def GetNumJobs(props):
313 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
314 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
315 else:
316 return "1"
317
318 @properties.renderer
319 def GetCC(props):
320 if props.hasProperty("cc_command"):
321 return props["cc_command"]
322 else:
323 return "gcc"
324
325 @properties.renderer
326 def GetCXX(props):
327 if props.hasProperty("cxx_command"):
328 return props["cxx_command"]
329 else:
330 return "g++"
331
332 @properties.renderer
333 def GetCwd(props):
334 if props.hasProperty("builddir"):
335 return props["builddir"]
336 elif props.hasProperty("workdir"):
337 return props["workdir"]
338 else:
339 return "/"
340
341 @properties.renderer
342 def GetCCache(props):
343 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
344 return props["ccache_command"]
345 else:
346 return ""
347
348 def GetNextBuild(builder, requests):
349 for r in requests:
350 if r.properties and r.properties.hasProperty("tag"):
351 return r
352 return requests[0]
353
354 def MakeEnv(overrides=None, tryccache=False):
355 env = {
356 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
357 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
358 }
359 if tryccache:
360 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
361 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
362 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
363 else:
364 env['CC'] = env['CCC']
365 env['CXX'] = env['CCXX']
366 env['CCACHE'] = ''
367 if overrides is not None:
368 env.update(overrides)
369 return env
370
371 @properties.renderer
372 def NetLockDl(props):
373 lock = None
374 if props.hasProperty("dl_lock"):
375 lock = NetLocks[props["dl_lock"]]
376 if lock is not None:
377 return [lock.access('exclusive')]
378 else:
379 return []
380
381 @properties.renderer
382 def NetLockUl(props):
383 lock = None
384 if props.hasProperty("ul_lock"):
385 lock = NetLocks[props["ul_lock"]]
386 if lock is not None:
387 return [lock.access('exclusive')]
388 else:
389 return []
390
391 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
392 try:
393 seckey = base64.b64decode(seckey)
394 except:
395 return None
396
397 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
398 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
399
400
401 c['builders'] = []
402
403 dlLock = locks.SlaveLock("slave_dl")
404
405 checkBuiltin = re.sub('[\t\n ]+', ' ', """
406 checkBuiltin() {
407 local symbol op path file;
408 for file in $CHANGED_FILES; do
409 case "$file" in
410 package/*/*) : ;;
411 *) return 0 ;;
412 esac;
413 done;
414 while read symbol op path; do
415 case "$symbol" in package-*)
416 symbol="${symbol##*(}";
417 symbol="${symbol%)}";
418 for file in $CHANGED_FILES; do
419 case "$file" in "package/$path/"*)
420 grep -qsx "$symbol=y" .config && return 0
421 ;; esac;
422 done;
423 esac;
424 done < tmp/.packagedeps;
425 return 1;
426 }
427 """).strip()
428
429
430 class IfBuiltinShellCommand(ShellCommand):
431 def _quote(self, str):
432 if re.search("[^a-zA-Z0-9/_.-]", str):
433 return "'%s'" %(re.sub("'", "'\"'\"'", str))
434 return str
435
436 def setCommand(self, command):
437 if not isinstance(command, (str, unicode)):
438 command = ' '.join(map(self._quote, command))
439 self.command = [
440 '/bin/sh', '-c',
441 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
442 ]
443
444 def setupEnvironment(self, cmd):
445 slaveEnv = self.slaveEnvironment
446 if slaveEnv is None:
447 slaveEnv = { }
448 changedFiles = { }
449 for request in self.build.requests:
450 for source in request.sources:
451 for change in source.changes:
452 for file in change.files:
453 changedFiles[file] = True
454 fullSlaveEnv = slaveEnv.copy()
455 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
456 cmd.args['env'] = fullSlaveEnv
457
458 slaveNames = [ ]
459
460 for slave in c['slaves']:
461 slaveNames.append(slave.slavename)
462
463 for target in targets:
464 ts = target.split('/')
465
466 factory = BuildFactory()
467
468 # find number of cores
469 factory.addStep(SetPropertyFromCommand(
470 name = "nproc",
471 property = "nproc",
472 description = "Finding number of CPUs",
473 command = ["nproc"]))
474
475 # find gcc and g++ compilers
476 if cc_version is not None:
477 factory.addStep(FileDownload(
478 name = "dlfindbinpl",
479 mastersrc = scripts_dir + '/findbin.pl',
480 slavedest = "../findbin.pl",
481 mode = 0755))
482
483 factory.addStep(SetPropertyFromCommand(
484 name = "gcc",
485 property = "cc_command",
486 description = "Finding gcc command",
487 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
488 haltOnFailure = True))
489
490 factory.addStep(SetPropertyFromCommand(
491 name = "g++",
492 property = "cxx_command",
493 description = "Finding g++ command",
494 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
495 haltOnFailure = True))
496
497 # see if ccache is available
498 factory.addStep(SetPropertyFromCommand(
499 property = "ccache_command",
500 command = ["which", "ccache"],
501 description = "Testing for ccache command",
502 haltOnFailure = False,
503 flunkOnFailure = False,
504 warnOnFailure = False,
505 ))
506
507 # expire tree if needed
508 if tree_expire > 0:
509 factory.addStep(FileDownload(
510 name = "dlexpiresh",
511 doStepIf = IsExpireRequested,
512 mastersrc = scripts_dir + '/expire.sh',
513 slavedest = "../expire.sh",
514 mode = 0755))
515
516 factory.addStep(ShellCommand(
517 name = "expire",
518 description = "Checking for build tree expiry",
519 command = ["./expire.sh", str(tree_expire)],
520 workdir = ".",
521 haltOnFailure = True,
522 doStepIf = IsExpireRequested,
523 timeout = 2400))
524
525 # cleanup.sh if needed
526 factory.addStep(FileDownload(
527 name = "dlcleanupsh",
528 mastersrc = "cleanup.sh",
529 slavedest = "../cleanup.sh",
530 mode = 0755,
531 doStepIf = IsCleanupRequested))
532
533 factory.addStep(ShellCommand(
534 name = "cleanold",
535 description = "Cleaning previous builds",
536 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
537 workdir = ".",
538 haltOnFailure = True,
539 doStepIf = IsCleanupRequested,
540 timeout = 2400))
541
542 factory.addStep(ShellCommand(
543 name = "cleanup",
544 description = "Cleaning work area",
545 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
546 workdir = ".",
547 haltOnFailure = True,
548 doStepIf = IsCleanupRequested,
549 timeout = 2400))
550
551 # user-requested clean targets
552 for tuple in CleanTargetMap:
553 factory.addStep(ShellCommand(
554 name = tuple[1],
555 description = 'User-requested "make %s"' % tuple[1],
556 command = ["make", tuple[1], "V=s"],
557 env = MakeEnv(),
558 doStepIf = IsMakeCleanRequested(tuple[0])
559 ))
560
561 # Workaround bug when switching from a checked out tag back to a branch
562 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
563 factory.addStep(ShellCommand(
564 name = "gitcheckout",
565 description = "Ensure that Git HEAD is sane",
566 command = "if [ -d .git ]; then git checkout master; else exit 0; fi",
567 haltOnFailure = True))
568
569 # check out the source
570 # Git() runs:
571 # if repo doesn't exist: 'git clone repourl'
572 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
573 # 'git fetch -t repourl branch; git reset --hard revision'
574 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
575 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
576 factory.addStep(Git(
577 name = "gitclean",
578 repourl = repo_url,
579 branch = repo_branch,
580 mode = 'full',
581 method = 'clean',
582 haltOnFailure = True,
583 doStepIf = IsGitCleanRequested,
584 ))
585
586 factory.addStep(Git(
587 name = "gitfresh",
588 repourl = repo_url,
589 branch = repo_branch,
590 mode = 'full',
591 method = 'fresh',
592 haltOnFailure = True,
593 doStepIf = IsGitFreshRequested,
594 ))
595
596 # update remote refs
597 factory.addStep(ShellCommand(
598 name = "fetchrefs",
599 description = "Fetching Git remote refs",
600 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
601 haltOnFailure = True
602 ))
603
604 # switch to tag
605 factory.addStep(ShellCommand(
606 name = "switchtag",
607 description = "Checking out Git tag",
608 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
609 haltOnFailure = True,
610 doStepIf = IsTaggingRequested
611 ))
612
613 # Verify that Git HEAD points to a tag or branch
614 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
615 factory.addStep(ShellCommand(
616 name = "gitverify",
617 description = "Ensure that Git HEAD is pointing to a branch or tag",
618 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
619 haltOnFailure = True))
620
621 factory.addStep(ShellCommand(
622 name = "rmtmp",
623 description = "Remove tmp folder",
624 command=["rm", "-rf", "tmp/"]))
625
626 # feed
627 # factory.addStep(ShellCommand(
628 # name = "feedsconf",
629 # description = "Copy the feeds.conf",
630 # command='''cp ~/feeds.conf ./feeds.conf''' ))
631
632 # feed
633 factory.addStep(ShellCommand(
634 name = "rmfeedlinks",
635 description = "Remove feed symlinks",
636 command=["rm", "-rf", "package/feeds/"]))
637
638 factory.addStep(StringDownload(
639 name = "ccachecc",
640 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
641 slavedest = "../ccache_cc.sh",
642 mode = 0755,
643 ))
644
645 factory.addStep(StringDownload(
646 name = "ccachecxx",
647 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
648 slavedest = "../ccache_cxx.sh",
649 mode = 0755,
650 ))
651
652 # Git SSH
653 if git_ssh:
654 factory.addStep(StringDownload(
655 name = "dlgitclonekey",
656 s = git_ssh_key,
657 slavedest = "../git-clone.key",
658 mode = 0600,
659 ))
660
661 factory.addStep(ShellCommand(
662 name = "patchfeedsconf",
663 description = "Patching feeds.conf",
664 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
665 haltOnFailure = True
666 ))
667
668 # feed
669 factory.addStep(ShellCommand(
670 name = "updatefeeds",
671 description = "Updating feeds",
672 command=["./scripts/feeds", "update"],
673 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
674 haltOnFailure = True
675 ))
676
677 # Git SSH
678 if git_ssh:
679 factory.addStep(ShellCommand(
680 name = "rmfeedsconf",
681 description = "Removing feeds.conf",
682 command=["rm", "feeds.conf"],
683 haltOnFailure = True
684 ))
685
686 # feed
687 factory.addStep(ShellCommand(
688 name = "installfeeds",
689 description = "Installing feeds",
690 command=["./scripts/feeds", "install", "-a"],
691 env = MakeEnv(tryccache=True),
692 haltOnFailure = True
693 ))
694
695 # seed config
696 factory.addStep(FileDownload(
697 name = "dlconfigseed",
698 mastersrc = "config.seed",
699 slavedest = ".config",
700 mode = 0644
701 ))
702
703 # configure
704 factory.addStep(ShellCommand(
705 name = "newconfig",
706 description = "Seeding .config",
707 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
708 ))
709
710 factory.addStep(ShellCommand(
711 name = "delbin",
712 description = "Removing output directory",
713 command = ["rm", "-rf", "bin/"]
714 ))
715
716 factory.addStep(ShellCommand(
717 name = "defconfig",
718 description = "Populating .config",
719 command = ["make", "defconfig"],
720 env = MakeEnv()
721 ))
722
723 # check arch
724 factory.addStep(ShellCommand(
725 name = "checkarch",
726 description = "Checking architecture",
727 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
728 logEnviron = False,
729 want_stdout = False,
730 want_stderr = False,
731 haltOnFailure = True
732 ))
733
734 # find libc suffix
735 factory.addStep(SetPropertyFromCommand(
736 name = "libc",
737 property = "libc",
738 description = "Finding libc suffix",
739 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
740
741 # install build key
742 if usign_key is not None:
743 factory.addStep(StringDownload(
744 name = "dlkeybuildpub",
745 s = UsignSec2Pub(usign_key, usign_comment),
746 slavedest = "key-build.pub",
747 mode = 0600,
748 ))
749
750 factory.addStep(StringDownload(
751 name = "dlkeybuild",
752 s = "# fake private key",
753 slavedest = "key-build",
754 mode = 0600,
755 ))
756
757 factory.addStep(StringDownload(
758 name = "dlkeybuilducert",
759 s = "# fake certificate",
760 slavedest = "key-build.ucert",
761 mode = 0600,
762 ))
763
764 # prepare dl
765 factory.addStep(ShellCommand(
766 name = "dldir",
767 description = "Preparing dl/",
768 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
769 logEnviron = False,
770 want_stdout = False
771 ))
772
773 # prepare tar
774 factory.addStep(ShellCommand(
775 name = "dltar",
776 description = "Building and installing GNU tar",
777 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
778 env = MakeEnv(tryccache=True),
779 haltOnFailure = True
780 ))
781
782 # populate dl
783 factory.addStep(ShellCommand(
784 name = "dlrun",
785 description = "Populating dl/",
786 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
787 env = MakeEnv(),
788 logEnviron = False,
789 locks = [dlLock.access('exclusive')],
790 ))
791
792 factory.addStep(ShellCommand(
793 name = "cleanbase",
794 description = "Cleaning base-files",
795 command=["make", "package/base-files/clean", "V=s"]
796 ))
797
798 # build
799 factory.addStep(ShellCommand(
800 name = "tools",
801 description = "Building and installing tools",
802 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
803 env = MakeEnv(tryccache=True),
804 haltOnFailure = True
805 ))
806
807 factory.addStep(ShellCommand(
808 name = "toolchain",
809 description = "Building and installing toolchain",
810 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
811 env = MakeEnv(),
812 haltOnFailure = True
813 ))
814
815 factory.addStep(ShellCommand(
816 name = "kmods",
817 description = "Building kmods",
818 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
819 env = MakeEnv(),
820 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
821 haltOnFailure = True
822 ))
823
824 # find kernel version
825 factory.addStep(SetPropertyFromCommand(
826 name = "kernelversion",
827 property = "kernelversion",
828 description = "Finding the effective Kernel version",
829 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
830 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
831 ))
832
833 factory.addStep(ShellCommand(
834 name = "pkgclean",
835 description = "Cleaning up package build",
836 command=["make", "package/cleanup", "V=s"]
837 ))
838
839 factory.addStep(ShellCommand(
840 name = "pkgbuild",
841 description = "Building packages",
842 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
843 env = MakeEnv(),
844 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
845 haltOnFailure = True
846 ))
847
848 # factory.addStep(IfBuiltinShellCommand(
849 factory.addStep(ShellCommand(
850 name = "pkginstall",
851 description = "Installing packages",
852 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
853 env = MakeEnv(),
854 haltOnFailure = True
855 ))
856
857 factory.addStep(ShellCommand(
858 name = "pkgindex",
859 description = "Indexing packages",
860 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
861 env = MakeEnv(),
862 haltOnFailure = True
863 ))
864
865 if enable_kmod_archive:
866 # embed kmod repository. Must happen before 'images'
867
868 # find rootfs staging directory
869 factory.addStep(SetPropertyFromCommand(
870 name = "stageroot",
871 property = "stageroot",
872 description = "Finding the rootfs staging directory",
873 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
874 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
875 ))
876
877 factory.addStep(ShellCommand(
878 name = "filesdir",
879 description = "Creating file overlay directory",
880 command=["mkdir", "-p", "files/etc/opkg"],
881 haltOnFailure = True
882 ))
883
884 factory.addStep(ShellCommand(
885 name = "kmodconfig",
886 description = "Embedding kmod repository configuration",
887 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
888 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
889 haltOnFailure = True
890 ))
891
892 #factory.addStep(IfBuiltinShellCommand(
893 factory.addStep(ShellCommand(
894 name = "images",
895 description = "Building and installing images",
896 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
897 env = MakeEnv(),
898 haltOnFailure = True
899 ))
900
901 factory.addStep(ShellCommand(
902 name = "diffconfig",
903 description = "Generating config.seed",
904 command=["make", "-j1", "diffconfig", "V=s"],
905 env = MakeEnv(),
906 haltOnFailure = True
907 ))
908
909 factory.addStep(ShellCommand(
910 name = "checksums",
911 description = "Calculating checksums",
912 command=["make", "-j1", "checksum", "V=s"],
913 env = MakeEnv(),
914 haltOnFailure = True
915 ))
916
917 if enable_kmod_archive:
918 factory.addStep(ShellCommand(
919 name = "kmoddir",
920 description = "Creating kmod directory",
921 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
922 haltOnFailure = True
923 ))
924
925 factory.addStep(ShellCommand(
926 name = "kmodprepare",
927 description = "Preparing kmod archive",
928 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
929 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
930 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
931 haltOnFailure = True
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "kmodindex",
936 description = "Indexing kmod archive",
937 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
938 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
939 env = MakeEnv(),
940 haltOnFailure = True
941 ))
942
943 # sign
944 if gpg_key is not None or usign_key is not None:
945 factory.addStep(MasterShellCommand(
946 name = "signprepare",
947 description = "Preparing temporary signing directory",
948 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
949 haltOnFailure = True
950 ))
951
952 factory.addStep(ShellCommand(
953 name = "signpack",
954 description = "Packing files to sign",
955 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
956 haltOnFailure = True
957 ))
958
959 factory.addStep(FileUpload(
960 slavesrc = "sign.tar.gz",
961 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
962 haltOnFailure = True
963 ))
964
965 factory.addStep(MasterShellCommand(
966 name = "signfiles",
967 description = "Signing files",
968 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
969 env = {
970 'GPGKEY': gpg_key,
971 'GPGPASS': gpg_passphrase,
972 'GPGCOMMENT': gpg_comment,
973 'USIGNKEY': usign_key,
974 'USIGNCOMMENT': usign_comment
975 },
976 haltOnFailure = True
977 ))
978
979 factory.addStep(FileDownload(
980 name = "dlsigntargz",
981 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
982 slavedest = "sign.tar.gz",
983 haltOnFailure = True
984 ))
985
986 factory.addStep(ShellCommand(
987 name = "signunpack",
988 description = "Unpacking signed files",
989 command = ["tar", "-xzf", "sign.tar.gz"],
990 haltOnFailure = True
991 ))
992
993 # upload
994 factory.addStep(ShellCommand(
995 name = "dirprepare",
996 description = "Preparing upload directory structure",
997 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
998 haltOnFailure = True
999 ))
1000
1001 factory.addStep(ShellCommand(
1002 name = "linkprepare",
1003 description = "Preparing repository symlink",
1004 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1005 doStepIf = IsNoMasterBuild,
1006 haltOnFailure = True
1007 ))
1008
1009 if enable_kmod_archive:
1010 factory.addStep(ShellCommand(
1011 name = "kmoddirprepare",
1012 description = "Preparing kmod archive upload directory",
1013 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1014 haltOnFailure = True
1015 ))
1016
1017 factory.addStep(ShellCommand(
1018 name = "dirupload",
1019 description = "Uploading directory structure",
1020 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1021 env={'RSYNC_PASSWORD': rsync_bin_key},
1022 haltOnFailure = True,
1023 logEnviron = False,
1024 ))
1025
1026 # download remote sha256sums to 'target-sha256sums'
1027 factory.addStep(ShellCommand(
1028 name = "target-sha256sums",
1029 description = "Fetching remote sha256sums for target",
1030 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1031 env={'RSYNC_PASSWORD': rsync_bin_key},
1032 logEnviron = False,
1033 haltOnFailure = False,
1034 flunkOnFailure = False,
1035 warnOnFailure = False,
1036 ))
1037
1038 # build list of files to upload
1039 factory.addStep(FileDownload(
1040 name = "dlsha2rsyncpl",
1041 mastersrc = scripts_dir + '/sha2rsync.pl',
1042 slavedest = "../sha2rsync.pl",
1043 mode = 0755,
1044 ))
1045
1046 factory.addStep(ShellCommand(
1047 name = "buildlist",
1048 description = "Building list of files to upload",
1049 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1050 haltOnFailure = True,
1051 ))
1052
1053 factory.addStep(FileDownload(
1054 name = "dlrsync.sh",
1055 mastersrc = scripts_dir + '/rsync.sh',
1056 slavedest = "../rsync.sh",
1057 mode = 0755
1058 ))
1059
1060 # upload new files and update existing ones
1061 factory.addStep(ShellCommand(
1062 name = "targetupload",
1063 description = "Uploading target files",
1064 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1065 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1066 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1067 env={'RSYNC_PASSWORD': rsync_bin_key},
1068 haltOnFailure = True,
1069 logEnviron = False,
1070 ))
1071
1072 # delete files which don't exist locally
1073 factory.addStep(ShellCommand(
1074 name = "targetprune",
1075 description = "Pruning target files",
1076 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1077 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1078 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1079 env={'RSYNC_PASSWORD': rsync_bin_key},
1080 haltOnFailure = True,
1081 logEnviron = False,
1082 ))
1083
1084 if enable_kmod_archive:
1085 factory.addStep(ShellCommand(
1086 name = "kmodupload",
1087 description = "Uploading kmod archive",
1088 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1089 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1090 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1091 env={'RSYNC_PASSWORD': rsync_bin_key},
1092 haltOnFailure = True,
1093 logEnviron = False,
1094 ))
1095
1096 if rsync_src_url is not None:
1097 factory.addStep(ShellCommand(
1098 name = "sourcelist",
1099 description = "Finding source archives to upload",
1100 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1101 haltOnFailure = True
1102 ))
1103
1104 factory.addStep(ShellCommand(
1105 name = "sourceupload",
1106 description = "Uploading source archives",
1107 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1108 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1109 env={'RSYNC_PASSWORD': rsync_src_key},
1110 haltOnFailure = True,
1111 logEnviron = False,
1112 ))
1113
1114 if False:
1115 factory.addStep(ShellCommand(
1116 name = "packageupload",
1117 description = "Uploading package files",
1118 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1119 env={'RSYNC_PASSWORD': rsync_bin_key},
1120 haltOnFailure = False,
1121 logEnviron = False,
1122 ))
1123
1124 # logs
1125 if False:
1126 factory.addStep(ShellCommand(
1127 name = "upload",
1128 description = "Uploading logs",
1129 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1130 env={'RSYNC_PASSWORD': rsync_bin_key},
1131 haltOnFailure = False,
1132 alwaysRun = True,
1133 logEnviron = False,
1134 ))
1135
1136 factory.addStep(ShellCommand(
1137 name = "df",
1138 description = "Reporting disk usage",
1139 command=["df", "-h", "."],
1140 env={'LC_ALL': 'C'},
1141 haltOnFailure = False,
1142 alwaysRun = True
1143 ))
1144
1145 factory.addStep(ShellCommand(
1146 name = "ccachestat",
1147 description = "Reporting ccache stats",
1148 command=["ccache", "-s"],
1149 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1150 want_stderr = False,
1151 haltOnFailure = False,
1152 flunkOnFailure = False,
1153 warnOnFailure = False,
1154 alwaysRun = True,
1155 ))
1156
1157 from buildbot.config import BuilderConfig
1158
1159 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1160
1161
1162 ####### STATUS TARGETS
1163
1164 # 'status' is a list of Status Targets. The results of each build will be
1165 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1166 # including web pages, email senders, and IRC bots.
1167
1168 c['status'] = []
1169
1170 from buildbot.status import html
1171 from buildbot.status.web import authz, auth
1172
1173 if ini.has_option("status", "bind"):
1174 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1175 authz_cfg=authz.Authz(
1176 # change any of these to True to enable; see the manual for more
1177 # options
1178 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1179 gracefulShutdown = 'auth',
1180 forceBuild = 'auth', # use this to test your slave once it is set up
1181 forceAllBuilds = 'auth',
1182 pingBuilder = False,
1183 stopBuild = 'auth',
1184 stopAllBuilds = 'auth',
1185 cancelPendingBuild = 'auth',
1186 )
1187 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1188 else:
1189 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1190
1191
1192 from buildbot.status import words
1193
1194 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1195 irc_host = ini.get("irc", "host")
1196 irc_port = 6667
1197 irc_chan = ini.get("irc", "channel")
1198 irc_nick = ini.get("irc", "nickname")
1199 irc_pass = None
1200
1201 if ini.has_option("irc", "port"):
1202 irc_port = ini.getint("irc", "port")
1203
1204 if ini.has_option("irc", "password"):
1205 irc_pass = ini.get("irc", "password")
1206
1207 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1208 channels = [{ "channel": irc_chan }],
1209 notify_events = {
1210 'exception': 1,
1211 'successToFailure': 1,
1212 'failureToSuccess': 1
1213 }
1214 )
1215
1216 c['status'].append(irc)
1217
1218 ####### DB URL
1219
1220 c['db'] = {
1221 # This specifies what database buildbot uses to store its state. You can leave
1222 # this at its default for all but the largest installations.
1223 'db_url' : "sqlite:///state.sqlite",
1224 }