phase1: add JSON merge step
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import ConfigParser
9
10 from buildbot import locks
11
12 # This is a sample buildmaster config file. It must be installed as
13 # 'master.cfg' in your buildmaster's base directory.
14
15 ini = ConfigParser.ConfigParser()
16 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
17
18 # This is the dictionary that the buildmaster pays attention to. We also use
19 # a shorter alias to save typing.
20 c = BuildmasterConfig = {}
21
22 ####### PROJECT IDENTITY
23
24 # the 'title' string will appear at the top of this buildbot
25 # installation's html.WebStatus home page (linked to the
26 # 'titleURL') and is embedded in the title of the waterfall HTML page.
27
28 c['title'] = ini.get("general", "title")
29 c['titleURL'] = ini.get("general", "title_url")
30
31 # the 'buildbotURL' string should point to the location where the buildbot's
32 # internal web server (usually the html.WebStatus page) is visible. This
33 # typically uses the port number set in the Waterfall 'status' entry, but
34 # with an externally-visible host name which the buildbot cannot figure out
35 # without some help.
36
37 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
38
39 ####### BUILDSLAVES
40
41 # The 'slaves' list defines the set of recognized buildslaves. Each element is
42 # a BuildSlave object, specifying a unique slave name and password. The same
43 # slave name and password must be configured on the slave.
44 from buildbot.buildslave import BuildSlave
45
46 slave_port = 9989
47
48 if ini.has_option("phase1", "port"):
49 slave_port = ini.getint("phase1", "port")
50
51 c['slaves'] = []
52 NetLocks = dict()
53
54 for section in ini.sections():
55 if section.startswith("slave "):
56 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
57 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
58 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
59 name = ini.get(section, "name")
60 password = ini.get(section, "password")
61 max_builds = 1
62 if ini.has_option(section, "builds"):
63 max_builds = ini.getint(section, "builds")
64 sl_props['max_builds'] = max_builds
65 if max_builds == 1:
66 sl_props['shared_wd'] = True
67 if ini.has_option(section, "cleanup"):
68 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
69 if ini.has_option(section, "dl_lock"):
70 lockname = ini.get(section, "dl_lock")
71 sl_props['dl_lock'] = lockname
72 if lockname not in NetLocks:
73 NetLocks[lockname] = locks.MasterLock(lockname)
74 if ini.has_option(section, "ul_lock"):
75 lockname = ini.get(section, "dl_lock")
76 sl_props['ul_lock'] = lockname
77 if lockname not in NetLocks:
78 NetLocks[lockname] = locks.MasterLock(lockname)
79 if ini.has_option(section, "shared_wd"):
80 shared_wd = ini.getboolean(section, "shared_wd")
81 sl_props['shared_wd'] = shared_wd
82 if shared_wd and (max_builds != 1):
83 raise ValueError('max_builds must be 1 with shared workdir!')
84 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
85
86 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
87 # This must match the value configured into the buildslaves (with their
88 # --master option)
89 c['slavePortnum'] = slave_port
90
91 # coalesce builds
92 c['mergeRequests'] = True
93
94 # Reduce amount of backlog data
95 c['buildHorizon'] = 30
96 c['logHorizon'] = 20
97
98 ####### CHANGESOURCES
99
100 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
101 scripts_dir = os.path.abspath("../scripts")
102 tree_expire = 0
103 other_builds = 0
104 cc_version = None
105
106 cc_command = "gcc"
107 cxx_command = "g++"
108
109 config_seed = ""
110
111 git_ssh = False
112 git_ssh_key = None
113
114 if ini.has_option("phase1", "expire"):
115 tree_expire = ini.getint("phase1", "expire")
116
117 if ini.has_option("phase1", "other_builds"):
118 other_builds = ini.getint("phase1", "other_builds")
119
120 if ini.has_option("phase1", "cc_version"):
121 cc_version = ini.get("phase1", "cc_version").split()
122 if len(cc_version) == 1:
123 cc_version = ["eq", cc_version[0]]
124
125 if ini.has_option("general", "git_ssh"):
126 git_ssh = ini.getboolean("general", "git_ssh")
127
128 if ini.has_option("general", "git_ssh_key"):
129 git_ssh_key = ini.get("general", "git_ssh_key")
130 else:
131 git_ssh = False
132
133 if ini.has_option("phase1", "config_seed"):
134 config_seed = ini.get("phase1", "config_seed")
135
136 repo_url = ini.get("repo", "url")
137 repo_branch = "master"
138
139 if ini.has_option("repo", "branch"):
140 repo_branch = ini.get("repo", "branch")
141
142 rsync_bin_url = ini.get("rsync", "binary_url")
143 rsync_bin_key = ini.get("rsync", "binary_password")
144 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
145
146 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
147 rsync_bin_defopts += ["--contimeout=20"]
148
149 rsync_src_url = None
150 rsync_src_key = None
151 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
152
153 if ini.has_option("rsync", "source_url"):
154 rsync_src_url = ini.get("rsync", "source_url")
155 rsync_src_key = ini.get("rsync", "source_password")
156
157 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
158 rsync_src_defopts += ["--contimeout=20"]
159
160 usign_key = None
161 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
162
163 if ini.has_option("usign", "key"):
164 usign_key = ini.get("usign", "key")
165
166 if ini.has_option("usign", "comment"):
167 usign_comment = ini.get("usign", "comment")
168
169 enable_kmod_archive = True
170
171
172 # find targets
173 targets = [ ]
174
175 if not os.path.isdir(work_dir+'/source.git'):
176 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
177 else:
178 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
179
180 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
181 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
182
183 while True:
184 line = findtargets.stdout.readline()
185 if not line:
186 break
187 ta = line.strip().split(' ')
188 targets.append(ta[0])
189
190
191 # the 'change_source' setting tells the buildmaster how it should find out
192 # about source code changes. Here we point to the buildbot clone of pyflakes.
193
194 from buildbot.changes.gitpoller import GitPoller
195 c['change_source'] = []
196 c['change_source'].append(GitPoller(
197 repo_url,
198 workdir=work_dir+'/work.git', branch=repo_branch,
199 pollinterval=300))
200
201 ####### SCHEDULERS
202
203 # Configure the Schedulers, which decide how to react to incoming changes. In this
204 # case, just kick off a 'basebuild' build
205
206 from buildbot.schedulers.basic import SingleBranchScheduler
207 from buildbot.schedulers.forcesched import ForceScheduler
208 from buildbot.changes import filter
209 c['schedulers'] = []
210 c['schedulers'].append(SingleBranchScheduler(
211 name="all",
212 change_filter=filter.ChangeFilter(branch=repo_branch),
213 treeStableTimer=60,
214 builderNames=targets))
215
216 c['schedulers'].append(ForceScheduler(
217 name="force",
218 builderNames=targets))
219
220 ####### BUILDERS
221
222 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
223 # what steps, and which slaves can execute them. Note that any particular build will
224 # only take place on one slave.
225
226 from buildbot.process.factory import BuildFactory
227 from buildbot.steps.source.git import Git
228 from buildbot.steps.shell import ShellCommand
229 from buildbot.steps.shell import SetPropertyFromCommand
230 from buildbot.steps.transfer import FileUpload
231 from buildbot.steps.transfer import FileDownload
232 from buildbot.steps.transfer import StringDownload
233 from buildbot.steps.master import MasterShellCommand
234 from buildbot.process.properties import Interpolate
235 from buildbot.process import properties
236
237
238 CleanTargetMap = [
239 [ "tools", "tools/clean" ],
240 [ "chain", "toolchain/clean" ],
241 [ "linux", "target/linux/clean" ],
242 [ "dir", "dirclean" ],
243 [ "dist", "distclean" ]
244 ]
245
246 def IsMakeCleanRequested(pattern):
247 def CheckCleanProperty(step):
248 val = step.getProperty("clean")
249 if val and re.match(pattern, val):
250 return True
251 else:
252 return False
253
254 return CheckCleanProperty
255
256 def IsSharedWorkdir(step):
257 return bool(step.getProperty("shared_wd"))
258
259 def IsCleanupRequested(step):
260 if IsSharedWorkdir(step):
261 return False
262 do_cleanup = step.getProperty("do_cleanup")
263 if do_cleanup:
264 return True
265 else:
266 return False
267
268 def IsExpireRequested(step):
269 if IsSharedWorkdir(step):
270 return False
271 else:
272 return not IsCleanupRequested(step)
273
274 def IsGitFreshRequested(step):
275 do_cleanup = step.getProperty("do_cleanup")
276 if do_cleanup:
277 return True
278 else:
279 return False
280
281 def IsGitCleanRequested(step):
282 return not IsGitFreshRequested(step)
283
284 def IsTaggingRequested(step):
285 val = step.getProperty("tag")
286 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
287 return True
288 else:
289 return False
290
291 def IsNoTaggingRequested(step):
292 return not IsTaggingRequested(step)
293
294 def IsNoMasterBuild(step):
295 return repo_branch != "master"
296
297 def GetBaseVersion():
298 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
299 return repo_branch.split('-')[1]
300 else:
301 return "master"
302
303 @properties.renderer
304 def GetVersionPrefix(props):
305 basever = GetBaseVersion()
306 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
307 return "%s/" % props["tag"]
308 elif basever != "master":
309 return "%s-SNAPSHOT/" % basever
310 else:
311 return ""
312
313 @properties.renderer
314 def GetNumJobs(props):
315 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
316 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
317 else:
318 return "1"
319
320 @properties.renderer
321 def GetCC(props):
322 if props.hasProperty("cc_command"):
323 return props["cc_command"]
324 else:
325 return "gcc"
326
327 @properties.renderer
328 def GetCXX(props):
329 if props.hasProperty("cxx_command"):
330 return props["cxx_command"]
331 else:
332 return "g++"
333
334 @properties.renderer
335 def GetCwd(props):
336 if props.hasProperty("builddir"):
337 return props["builddir"]
338 elif props.hasProperty("workdir"):
339 return props["workdir"]
340 else:
341 return "/"
342
343 @properties.renderer
344 def GetCCache(props):
345 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
346 return props["ccache_command"]
347 else:
348 return ""
349
350 def GetNextBuild(builder, requests):
351 for r in requests:
352 if r.properties and r.properties.hasProperty("tag"):
353 return r
354 return requests[0]
355
356 def MakeEnv(overrides=None, tryccache=False):
357 env = {
358 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
359 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
360 }
361 if tryccache:
362 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
363 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
364 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
365 else:
366 env['CC'] = env['CCC']
367 env['CXX'] = env['CCXX']
368 env['CCACHE'] = ''
369 if overrides is not None:
370 env.update(overrides)
371 return env
372
373 @properties.renderer
374 def NetLockDl(props):
375 lock = None
376 if props.hasProperty("dl_lock"):
377 lock = NetLocks[props["dl_lock"]]
378 if lock is not None:
379 return [lock.access('exclusive')]
380 else:
381 return []
382
383 @properties.renderer
384 def NetLockUl(props):
385 lock = None
386 if props.hasProperty("ul_lock"):
387 lock = NetLocks[props["ul_lock"]]
388 if lock is not None:
389 return [lock.access('exclusive')]
390 else:
391 return []
392
393 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
394 try:
395 seckey = base64.b64decode(seckey)
396 except:
397 return None
398
399 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
400 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
401
402
403 c['builders'] = []
404
405 dlLock = locks.SlaveLock("slave_dl")
406
407 checkBuiltin = re.sub('[\t\n ]+', ' ', """
408 checkBuiltin() {
409 local symbol op path file;
410 for file in $CHANGED_FILES; do
411 case "$file" in
412 package/*/*) : ;;
413 *) return 0 ;;
414 esac;
415 done;
416 while read symbol op path; do
417 case "$symbol" in package-*)
418 symbol="${symbol##*(}";
419 symbol="${symbol%)}";
420 for file in $CHANGED_FILES; do
421 case "$file" in "package/$path/"*)
422 grep -qsx "$symbol=y" .config && return 0
423 ;; esac;
424 done;
425 esac;
426 done < tmp/.packagedeps;
427 return 1;
428 }
429 """).strip()
430
431
432 class IfBuiltinShellCommand(ShellCommand):
433 def _quote(self, str):
434 if re.search("[^a-zA-Z0-9/_.-]", str):
435 return "'%s'" %(re.sub("'", "'\"'\"'", str))
436 return str
437
438 def setCommand(self, command):
439 if not isinstance(command, (str, unicode)):
440 command = ' '.join(map(self._quote, command))
441 self.command = [
442 '/bin/sh', '-c',
443 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
444 ]
445
446 def setupEnvironment(self, cmd):
447 slaveEnv = self.slaveEnvironment
448 if slaveEnv is None:
449 slaveEnv = { }
450 changedFiles = { }
451 for request in self.build.requests:
452 for source in request.sources:
453 for change in source.changes:
454 for file in change.files:
455 changedFiles[file] = True
456 fullSlaveEnv = slaveEnv.copy()
457 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
458 cmd.args['env'] = fullSlaveEnv
459
460 slaveNames = [ ]
461
462 for slave in c['slaves']:
463 slaveNames.append(slave.slavename)
464
465 for target in targets:
466 ts = target.split('/')
467
468 factory = BuildFactory()
469
470 # setup shared work directory if required
471 factory.addStep(ShellCommand(
472 name = "sharedwd",
473 description = "Setting up shared work directory",
474 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
475 workdir = ".",
476 haltOnFailure = True,
477 doStepIf = IsSharedWorkdir))
478
479 # find number of cores
480 factory.addStep(SetPropertyFromCommand(
481 name = "nproc",
482 property = "nproc",
483 description = "Finding number of CPUs",
484 command = ["nproc"]))
485
486 # find gcc and g++ compilers
487 factory.addStep(FileDownload(
488 name = "dlfindbinpl",
489 mastersrc = scripts_dir + '/findbin.pl',
490 slavedest = "../findbin.pl",
491 mode = 0755))
492
493 factory.addStep(SetPropertyFromCommand(
494 name = "gcc",
495 property = "cc_command",
496 description = "Finding gcc command",
497 command = [
498 "../findbin.pl", "gcc",
499 cc_version[0] if cc_version is not None else '',
500 cc_version[1] if cc_version is not None else ''
501 ],
502 haltOnFailure = True))
503
504 factory.addStep(SetPropertyFromCommand(
505 name = "g++",
506 property = "cxx_command",
507 description = "Finding g++ command",
508 command = [
509 "../findbin.pl", "g++",
510 cc_version[0] if cc_version is not None else '',
511 cc_version[1] if cc_version is not None else ''
512 ],
513 haltOnFailure = True))
514
515 # see if ccache is available
516 factory.addStep(SetPropertyFromCommand(
517 property = "ccache_command",
518 command = ["which", "ccache"],
519 description = "Testing for ccache command",
520 haltOnFailure = False,
521 flunkOnFailure = False,
522 warnOnFailure = False,
523 ))
524
525 # expire tree if needed
526 if tree_expire > 0:
527 factory.addStep(FileDownload(
528 name = "dlexpiresh",
529 doStepIf = IsExpireRequested,
530 mastersrc = scripts_dir + '/expire.sh',
531 slavedest = "../expire.sh",
532 mode = 0755))
533
534 factory.addStep(ShellCommand(
535 name = "expire",
536 description = "Checking for build tree expiry",
537 command = ["./expire.sh", str(tree_expire)],
538 workdir = ".",
539 haltOnFailure = True,
540 doStepIf = IsExpireRequested,
541 timeout = 2400))
542
543 # cleanup.sh if needed
544 factory.addStep(FileDownload(
545 name = "dlcleanupsh",
546 mastersrc = scripts_dir + '/cleanup.sh',
547 slavedest = "../cleanup.sh",
548 mode = 0755,
549 doStepIf = IsCleanupRequested))
550
551 factory.addStep(ShellCommand(
552 name = "cleanold",
553 description = "Cleaning previous builds",
554 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
555 workdir = ".",
556 haltOnFailure = True,
557 doStepIf = IsCleanupRequested,
558 timeout = 2400))
559
560 factory.addStep(ShellCommand(
561 name = "cleanup",
562 description = "Cleaning work area",
563 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
564 workdir = ".",
565 haltOnFailure = True,
566 doStepIf = IsCleanupRequested,
567 timeout = 2400))
568
569 # user-requested clean targets
570 for tuple in CleanTargetMap:
571 factory.addStep(ShellCommand(
572 name = tuple[1],
573 description = 'User-requested "make %s"' % tuple[1],
574 command = ["make", tuple[1], "V=s"],
575 env = MakeEnv(),
576 doStepIf = IsMakeCleanRequested(tuple[0])
577 ))
578
579 # Workaround bug when switching from a checked out tag back to a branch
580 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
581 factory.addStep(ShellCommand(
582 name = "gitcheckout",
583 description = "Ensure that Git HEAD is sane",
584 command = "if [ -d .git ]; then git checkout -f %s; git branch --set-upstream-to origin/%s; else exit 0; fi" %(repo_branch, repo_branch),
585 haltOnFailure = True))
586
587 # check out the source
588 # Git() runs:
589 # if repo doesn't exist: 'git clone repourl'
590 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
591 # 'git fetch -t repourl branch; git reset --hard revision'
592 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
593 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
594 factory.addStep(Git(
595 name = "gitclean",
596 repourl = repo_url,
597 branch = repo_branch,
598 mode = 'full',
599 method = 'clean',
600 haltOnFailure = True,
601 doStepIf = IsGitCleanRequested,
602 ))
603
604 factory.addStep(Git(
605 name = "gitfresh",
606 repourl = repo_url,
607 branch = repo_branch,
608 mode = 'full',
609 method = 'fresh',
610 haltOnFailure = True,
611 doStepIf = IsGitFreshRequested,
612 ))
613
614 # update remote refs
615 factory.addStep(ShellCommand(
616 name = "fetchrefs",
617 description = "Fetching Git remote refs",
618 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
619 haltOnFailure = True
620 ))
621
622 # switch to tag
623 factory.addStep(ShellCommand(
624 name = "switchtag",
625 description = "Checking out Git tag",
626 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
627 haltOnFailure = True,
628 doStepIf = IsTaggingRequested
629 ))
630
631 # Verify that Git HEAD points to a tag or branch
632 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
633 factory.addStep(ShellCommand(
634 name = "gitverify",
635 description = "Ensure that Git HEAD is pointing to a branch or tag",
636 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
637 haltOnFailure = True))
638
639 factory.addStep(ShellCommand(
640 name = "rmtmp",
641 description = "Remove tmp folder",
642 command=["rm", "-rf", "tmp/"]))
643
644 # feed
645 # factory.addStep(ShellCommand(
646 # name = "feedsconf",
647 # description = "Copy the feeds.conf",
648 # command='''cp ~/feeds.conf ./feeds.conf''' ))
649
650 # feed
651 factory.addStep(ShellCommand(
652 name = "rmfeedlinks",
653 description = "Remove feed symlinks",
654 command=["rm", "-rf", "package/feeds/"]))
655
656 factory.addStep(StringDownload(
657 name = "ccachecc",
658 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
659 slavedest = "../ccache_cc.sh",
660 mode = 0755,
661 ))
662
663 factory.addStep(StringDownload(
664 name = "ccachecxx",
665 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
666 slavedest = "../ccache_cxx.sh",
667 mode = 0755,
668 ))
669
670 # Git SSH
671 if git_ssh:
672 factory.addStep(StringDownload(
673 name = "dlgitclonekey",
674 s = git_ssh_key,
675 slavedest = "../git-clone.key",
676 mode = 0600,
677 ))
678
679 factory.addStep(ShellCommand(
680 name = "patchfeedsconf",
681 description = "Patching feeds.conf",
682 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
683 haltOnFailure = True
684 ))
685
686 # feed
687 factory.addStep(ShellCommand(
688 name = "updatefeeds",
689 description = "Updating feeds",
690 command=["./scripts/feeds", "update"],
691 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
692 haltOnFailure = True
693 ))
694
695 # Git SSH
696 if git_ssh:
697 factory.addStep(ShellCommand(
698 name = "rmfeedsconf",
699 description = "Removing feeds.conf",
700 command=["rm", "feeds.conf"],
701 haltOnFailure = True
702 ))
703
704 # feed
705 factory.addStep(ShellCommand(
706 name = "installfeeds",
707 description = "Installing feeds",
708 command=["./scripts/feeds", "install", "-a"],
709 env = MakeEnv(tryccache=True),
710 haltOnFailure = True
711 ))
712
713 # seed config
714 if config_seed is not None:
715 factory.addStep(StringDownload(
716 name = "dlconfigseed",
717 s = config_seed + '\n',
718 slavedest = ".config",
719 mode = 0644
720 ))
721
722 # configure
723 factory.addStep(ShellCommand(
724 name = "newconfig",
725 description = "Seeding .config",
726 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
727 ))
728
729 factory.addStep(ShellCommand(
730 name = "delbin",
731 description = "Removing output directory",
732 command = ["rm", "-rf", "bin/"]
733 ))
734
735 factory.addStep(ShellCommand(
736 name = "defconfig",
737 description = "Populating .config",
738 command = ["make", "defconfig"],
739 env = MakeEnv()
740 ))
741
742 # check arch
743 factory.addStep(ShellCommand(
744 name = "checkarch",
745 description = "Checking architecture",
746 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
747 logEnviron = False,
748 want_stdout = False,
749 want_stderr = False,
750 haltOnFailure = True
751 ))
752
753 # find libc suffix
754 factory.addStep(SetPropertyFromCommand(
755 name = "libc",
756 property = "libc",
757 description = "Finding libc suffix",
758 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
759
760 # install build key
761 if usign_key is not None:
762 factory.addStep(StringDownload(
763 name = "dlkeybuildpub",
764 s = UsignSec2Pub(usign_key, usign_comment),
765 slavedest = "key-build.pub",
766 mode = 0600,
767 ))
768
769 factory.addStep(StringDownload(
770 name = "dlkeybuild",
771 s = "# fake private key",
772 slavedest = "key-build",
773 mode = 0600,
774 ))
775
776 factory.addStep(StringDownload(
777 name = "dlkeybuilducert",
778 s = "# fake certificate",
779 slavedest = "key-build.ucert",
780 mode = 0600,
781 ))
782
783 # prepare dl
784 factory.addStep(ShellCommand(
785 name = "dldir",
786 description = "Preparing dl/",
787 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
788 logEnviron = False,
789 want_stdout = False
790 ))
791
792 # prepare tar
793 factory.addStep(ShellCommand(
794 name = "dltar",
795 description = "Building and installing GNU tar",
796 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
797 env = MakeEnv(tryccache=True),
798 haltOnFailure = True
799 ))
800
801 # populate dl
802 factory.addStep(ShellCommand(
803 name = "dlrun",
804 description = "Populating dl/",
805 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
806 env = MakeEnv(),
807 logEnviron = False,
808 locks = [dlLock.access('exclusive')],
809 ))
810
811 factory.addStep(ShellCommand(
812 name = "cleanbase",
813 description = "Cleaning base-files",
814 command=["make", "package/base-files/clean", "V=s"]
815 ))
816
817 # build
818 factory.addStep(ShellCommand(
819 name = "tools",
820 description = "Building and installing tools",
821 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
822 env = MakeEnv(tryccache=True),
823 haltOnFailure = True
824 ))
825
826 factory.addStep(ShellCommand(
827 name = "toolchain",
828 description = "Building and installing toolchain",
829 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
830 env = MakeEnv(),
831 haltOnFailure = True
832 ))
833
834 factory.addStep(ShellCommand(
835 name = "kmods",
836 description = "Building kmods",
837 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
838 env = MakeEnv(),
839 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
840 haltOnFailure = True
841 ))
842
843 # find kernel version
844 factory.addStep(SetPropertyFromCommand(
845 name = "kernelversion",
846 property = "kernelversion",
847 description = "Finding the effective Kernel version",
848 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
849 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
850 ))
851
852 factory.addStep(ShellCommand(
853 name = "pkgclean",
854 description = "Cleaning up package build",
855 command=["make", "package/cleanup", "V=s"]
856 ))
857
858 factory.addStep(ShellCommand(
859 name = "pkgbuild",
860 description = "Building packages",
861 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
862 env = MakeEnv(),
863 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
864 haltOnFailure = True
865 ))
866
867 # factory.addStep(IfBuiltinShellCommand(
868 factory.addStep(ShellCommand(
869 name = "pkginstall",
870 description = "Installing packages",
871 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
872 env = MakeEnv(),
873 haltOnFailure = True
874 ))
875
876 factory.addStep(ShellCommand(
877 name = "pkgindex",
878 description = "Indexing packages",
879 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
880 env = MakeEnv(),
881 haltOnFailure = True
882 ))
883
884 if enable_kmod_archive:
885 # embed kmod repository. Must happen before 'images'
886
887 # find rootfs staging directory
888 factory.addStep(SetPropertyFromCommand(
889 name = "stageroot",
890 property = "stageroot",
891 description = "Finding the rootfs staging directory",
892 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
893 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
894 ))
895
896 factory.addStep(ShellCommand(
897 name = "filesdir",
898 description = "Creating file overlay directory",
899 command=["mkdir", "-p", "files/etc/opkg"],
900 haltOnFailure = True
901 ))
902
903 factory.addStep(ShellCommand(
904 name = "kmodconfig",
905 description = "Embedding kmod repository configuration",
906 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
907 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
908 haltOnFailure = True
909 ))
910
911 #factory.addStep(IfBuiltinShellCommand(
912 factory.addStep(ShellCommand(
913 name = "images",
914 description = "Building and installing images",
915 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
916 env = MakeEnv(),
917 haltOnFailure = True
918 ))
919
920 factory.addStep(ShellCommand(
921 name = "buildinfo",
922 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
923 command = "make -j1 buildinfo V=s || true",
924 env = MakeEnv(),
925 haltOnFailure = True
926 ))
927
928 factory.addStep(ShellCommand(
929 name = "checksums",
930 description = "Calculating checksums",
931 command=["make", "-j1", "checksum", "V=s"],
932 env = MakeEnv(),
933 haltOnFailure = True
934 ))
935
936 if enable_kmod_archive:
937 factory.addStep(ShellCommand(
938 name = "kmoddir",
939 description = "Creating kmod directory",
940 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
941 haltOnFailure = True
942 ))
943
944 factory.addStep(ShellCommand(
945 name = "kmodprepare",
946 description = "Preparing kmod archive",
947 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
948 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
949 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
950 haltOnFailure = True
951 ))
952
953 factory.addStep(ShellCommand(
954 name = "kmodindex",
955 description = "Indexing kmod archive",
956 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
957 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
958 env = MakeEnv(),
959 haltOnFailure = True
960 ))
961
962 # sign
963 if ini.has_option("gpg", "key") or usign_key is not None:
964 factory.addStep(MasterShellCommand(
965 name = "signprepare",
966 description = "Preparing temporary signing directory",
967 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
968 haltOnFailure = True
969 ))
970
971 factory.addStep(ShellCommand(
972 name = "signpack",
973 description = "Packing files to sign",
974 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
975 haltOnFailure = True
976 ))
977
978 factory.addStep(FileUpload(
979 slavesrc = "sign.tar.gz",
980 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
981 haltOnFailure = True
982 ))
983
984 factory.addStep(MasterShellCommand(
985 name = "signfiles",
986 description = "Signing files",
987 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
988 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
989 haltOnFailure = True
990 ))
991
992 factory.addStep(FileDownload(
993 name = "dlsigntargz",
994 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
995 slavedest = "sign.tar.gz",
996 haltOnFailure = True
997 ))
998
999 factory.addStep(ShellCommand(
1000 name = "signunpack",
1001 description = "Unpacking signed files",
1002 command = ["tar", "-xzf", "sign.tar.gz"],
1003 haltOnFailure = True
1004 ))
1005
1006 # upload
1007 factory.addStep(ShellCommand(
1008 name = "dirprepare",
1009 description = "Preparing upload directory structure",
1010 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1011 haltOnFailure = True
1012 ))
1013
1014 factory.addStep(ShellCommand(
1015 name = "linkprepare",
1016 description = "Preparing repository symlink",
1017 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1018 doStepIf = IsNoMasterBuild,
1019 haltOnFailure = True
1020 ))
1021
1022 if enable_kmod_archive:
1023 factory.addStep(ShellCommand(
1024 name = "kmoddirprepare",
1025 description = "Preparing kmod archive upload directory",
1026 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1027 haltOnFailure = True
1028 ))
1029
1030 factory.addStep(ShellCommand(
1031 name = "dirupload",
1032 description = "Uploading directory structure",
1033 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1034 env={'RSYNC_PASSWORD': rsync_bin_key},
1035 haltOnFailure = True,
1036 logEnviron = False,
1037 ))
1038
1039 # download remote sha256sums to 'target-sha256sums'
1040 factory.addStep(ShellCommand(
1041 name = "target-sha256sums",
1042 description = "Fetching remote sha256sums for target",
1043 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1044 env={'RSYNC_PASSWORD': rsync_bin_key},
1045 logEnviron = False,
1046 haltOnFailure = False,
1047 flunkOnFailure = False,
1048 warnOnFailure = False,
1049 ))
1050
1051 # build list of files to upload
1052 factory.addStep(FileDownload(
1053 name = "dlsha2rsyncpl",
1054 mastersrc = scripts_dir + '/sha2rsync.pl',
1055 slavedest = "../sha2rsync.pl",
1056 mode = 0755,
1057 ))
1058
1059 factory.addStep(ShellCommand(
1060 name = "buildlist",
1061 description = "Building list of files to upload",
1062 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1063 haltOnFailure = True,
1064 ))
1065
1066 factory.addStep(FileDownload(
1067 name = "dlrsync.sh",
1068 mastersrc = scripts_dir + '/rsync.sh',
1069 slavedest = "../rsync.sh",
1070 mode = 0755
1071 ))
1072
1073 # upload new files and update existing ones
1074 factory.addStep(ShellCommand(
1075 name = "targetupload",
1076 description = "Uploading target files",
1077 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1078 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1079 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1080 env={'RSYNC_PASSWORD': rsync_bin_key},
1081 haltOnFailure = True,
1082 logEnviron = False,
1083 ))
1084
1085 # delete files which don't exist locally
1086 factory.addStep(ShellCommand(
1087 name = "targetprune",
1088 description = "Pruning target files",
1089 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1090 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1091 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1092 env={'RSYNC_PASSWORD': rsync_bin_key},
1093 haltOnFailure = True,
1094 logEnviron = False,
1095 ))
1096
1097 if enable_kmod_archive:
1098 factory.addStep(ShellCommand(
1099 name = "kmodupload",
1100 description = "Uploading kmod archive",
1101 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1102 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1103 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1104 env={'RSYNC_PASSWORD': rsync_bin_key},
1105 haltOnFailure = True,
1106 logEnviron = False,
1107 ))
1108
1109 if rsync_src_url is not None:
1110 factory.addStep(ShellCommand(
1111 name = "sourcelist",
1112 description = "Finding source archives to upload",
1113 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1114 haltOnFailure = True
1115 ))
1116
1117 factory.addStep(ShellCommand(
1118 name = "sourceupload",
1119 description = "Uploading source archives",
1120 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1121 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1122 env={'RSYNC_PASSWORD': rsync_src_key},
1123 haltOnFailure = True,
1124 logEnviron = False,
1125 ))
1126
1127 if False:
1128 factory.addStep(ShellCommand(
1129 name = "packageupload",
1130 description = "Uploading package files",
1131 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1132 env={'RSYNC_PASSWORD': rsync_bin_key},
1133 haltOnFailure = False,
1134 logEnviron = False,
1135 ))
1136
1137 # logs
1138 if False:
1139 factory.addStep(ShellCommand(
1140 name = "upload",
1141 description = "Uploading logs",
1142 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1143 env={'RSYNC_PASSWORD': rsync_bin_key},
1144 haltOnFailure = False,
1145 alwaysRun = True,
1146 logEnviron = False,
1147 ))
1148
1149 factory.addStep(ShellCommand(
1150 name = "df",
1151 description = "Reporting disk usage",
1152 command=["df", "-h", "."],
1153 env={'LC_ALL': 'C'},
1154 haltOnFailure = False,
1155 alwaysRun = True
1156 ))
1157
1158 factory.addStep(ShellCommand(
1159 name = "ccachestat",
1160 description = "Reporting ccache stats",
1161 command=["ccache", "-s"],
1162 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1163 want_stderr = False,
1164 haltOnFailure = False,
1165 flunkOnFailure = False,
1166 warnOnFailure = False,
1167 alwaysRun = True,
1168 ))
1169
1170 from buildbot.config import BuilderConfig
1171
1172 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1173
1174
1175 ####### STATUS TARGETS
1176
1177 # 'status' is a list of Status Targets. The results of each build will be
1178 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1179 # including web pages, email senders, and IRC bots.
1180
1181 c['status'] = []
1182
1183 from buildbot.status import html
1184 from buildbot.status.web import authz, auth
1185
1186 if ini.has_option("phase1", "status_bind"):
1187 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1188 authz_cfg=authz.Authz(
1189 # change any of these to True to enable; see the manual for more
1190 # options
1191 auth=auth.BasicAuth([(ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))]),
1192 gracefulShutdown = 'auth',
1193 forceBuild = 'auth', # use this to test your slave once it is set up
1194 forceAllBuilds = 'auth',
1195 pingBuilder = False,
1196 stopBuild = 'auth',
1197 stopAllBuilds = 'auth',
1198 cancelPendingBuild = 'auth',
1199 )
1200 c['status'].append(html.WebStatus(http_port=ini.get("phase1", "status_bind"), authz=authz_cfg))
1201 else:
1202 c['status'].append(html.WebStatus(http_port=ini.get("phase1", "status_bind")))
1203
1204
1205 from buildbot.status import words
1206
1207 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1208 irc_host = ini.get("irc", "host")
1209 irc_port = 6667
1210 irc_chan = ini.get("irc", "channel")
1211 irc_nick = ini.get("irc", "nickname")
1212 irc_pass = None
1213
1214 if ini.has_option("irc", "port"):
1215 irc_port = ini.getint("irc", "port")
1216
1217 if ini.has_option("irc", "password"):
1218 irc_pass = ini.get("irc", "password")
1219
1220 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1221 channels = [{ "channel": irc_chan }],
1222 notify_events = {
1223 'exception': 1,
1224 'successToFailure': 1,
1225 'failureToSuccess': 1
1226 }
1227 )
1228
1229 c['status'].append(irc)
1230
1231 ####### DB URL
1232
1233 c['db'] = {
1234 # This specifies what database buildbot uses to store its state. You can leave
1235 # this at its default for all but the largest installations.
1236 'db_url' : "sqlite:///state.sqlite",
1237 }