phase1: add separate option for kmod repo embedding
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from buildbot import locks
11 from buildbot.changes import filter
12 from buildbot.changes.gitpoller import GitPoller
13 from buildbot.config import BuilderConfig
14 from buildbot.plugins import reporters
15 from buildbot.plugins import schedulers
16 from buildbot.plugins import steps
17 from buildbot.plugins import util
18 from buildbot.process import properties
19 from buildbot.process.factory import BuildFactory
20 from buildbot.process.properties import Interpolate
21 from buildbot.process.properties import Property
22 from buildbot.schedulers.basic import SingleBranchScheduler
23 from buildbot.schedulers.forcesched import BaseParameter
24 from buildbot.schedulers.forcesched import ForceScheduler
25 from buildbot.schedulers.forcesched import ValidationError
26 from buildbot.steps.master import MasterShellCommand
27 from buildbot.steps.shell import SetPropertyFromCommand
28 from buildbot.steps.shell import ShellCommand
29 from buildbot.steps.source.git import Git
30 from buildbot.steps.transfer import FileDownload
31 from buildbot.steps.transfer import FileUpload
32 from buildbot.steps.transfer import StringDownload
33 from buildbot.worker import Worker
34
35
36 # This is a sample buildmaster config file. It must be installed as
37 # 'master.cfg' in your buildmaster's base directory.
38
39 ini = configparser.ConfigParser()
40 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
41
42 # This is the dictionary that the buildmaster pays attention to. We also use
43 # a shorter alias to save typing.
44 c = BuildmasterConfig = {}
45
46 ####### PROJECT IDENTITY
47
48 # the 'title' string will appear at the top of this buildbot
49 # installation's html.WebStatus home page (linked to the
50 # 'titleURL') and is embedded in the title of the waterfall HTML page.
51
52 c['title'] = ini.get("general", "title")
53 c['titleURL'] = ini.get("general", "title_url")
54
55 # the 'buildbotURL' string should point to the location where the buildbot's
56 # internal web server (usually the html.WebStatus page) is visible. This
57 # typically uses the port number set in the Waterfall 'status' entry, but
58 # with an externally-visible host name which the buildbot cannot figure out
59 # without some help.
60
61 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
62
63 ####### BUILDSLAVES
64
65 # The 'workers' list defines the set of recognized buildslaves. Each element is
66 # a Worker object, specifying a unique slave name and password. The same
67 # slave name and password must be configured on the slave.
68
69 slave_port = 9989
70
71 if ini.has_option("phase1", "port"):
72 slave_port = ini.get("phase1", "port")
73
74 c['workers'] = []
75 NetLocks = dict()
76
77 for section in ini.sections():
78 if section.startswith("slave "):
79 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
80 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
81 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
82 name = ini.get(section, "name")
83 password = ini.get(section, "password")
84 max_builds = 1
85 if ini.has_option(section, "builds"):
86 max_builds = ini.getint(section, "builds")
87 sl_props['max_builds'] = max_builds
88 if max_builds == 1:
89 sl_props['shared_wd'] = True
90 if ini.has_option(section, "cleanup"):
91 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
92 if ini.has_option(section, "dl_lock"):
93 lockname = ini.get(section, "dl_lock")
94 sl_props['dl_lock'] = lockname
95 if lockname not in NetLocks:
96 NetLocks[lockname] = locks.MasterLock(lockname)
97 if ini.has_option(section, "ul_lock"):
98 lockname = ini.get(section, "dl_lock")
99 sl_props['ul_lock'] = lockname
100 if lockname not in NetLocks:
101 NetLocks[lockname] = locks.MasterLock(lockname)
102 if ini.has_option(section, "shared_wd"):
103 shared_wd = ini.getboolean(section, "shared_wd")
104 sl_props['shared_wd'] = shared_wd
105 if shared_wd and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
107 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
108
109 # 'slavePortnum' defines the TCP port to listen on for connections from workers.
110 # This must match the value configured into the buildslaves (with their
111 # --master option)
112 c['protocols'] = {'pb': {'port': slave_port}}
113
114 # coalesce builds
115 c['collapseRequests'] = True
116
117 # Reduce amount of backlog data
118 c['buildHorizon'] = 30
119 c['logHorizon'] = 20
120
121 ####### CHANGESOURCES
122
123 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
124 scripts_dir = os.path.abspath("../scripts")
125 tree_expire = 0
126 other_builds = 0
127 cc_version = None
128
129 cc_command = "gcc"
130 cxx_command = "g++"
131
132 config_seed = ""
133
134 git_ssh = False
135 git_ssh_key = None
136
137 if ini.has_option("phase1", "expire"):
138 tree_expire = ini.getint("phase1", "expire")
139
140 if ini.has_option("phase1", "other_builds"):
141 other_builds = ini.getint("phase1", "other_builds")
142
143 if ini.has_option("phase1", "cc_version"):
144 cc_version = ini.get("phase1", "cc_version").split()
145 if len(cc_version) == 1:
146 cc_version = ["eq", cc_version[0]]
147
148 if ini.has_option("general", "git_ssh"):
149 git_ssh = ini.getboolean("general", "git_ssh")
150
151 if ini.has_option("general", "git_ssh_key"):
152 git_ssh_key = ini.get("general", "git_ssh_key")
153 else:
154 git_ssh = False
155
156 if ini.has_option("phase1", "config_seed"):
157 config_seed = ini.get("phase1", "config_seed")
158
159 repo_url = ini.get("repo", "url")
160 repo_branch = "master"
161
162 if ini.has_option("repo", "branch"):
163 repo_branch = ini.get("repo", "branch")
164
165 rsync_bin_url = ini.get("rsync", "binary_url")
166 rsync_bin_key = ini.get("rsync", "binary_password")
167 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
168
169 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
170 rsync_bin_defopts += ["--contimeout=20"]
171
172 rsync_src_url = None
173 rsync_src_key = None
174 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
175
176 if ini.has_option("rsync", "source_url"):
177 rsync_src_url = ini.get("rsync", "source_url")
178 rsync_src_key = ini.get("rsync", "source_password")
179
180 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
181 rsync_src_defopts += ["--contimeout=20"]
182
183 usign_key = None
184 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
185
186 if ini.has_option("usign", "key"):
187 usign_key = ini.get("usign", "key")
188
189 if ini.has_option("usign", "comment"):
190 usign_comment = ini.get("usign", "comment")
191
192 enable_kmod_archive = False
193 embed_kmod_repository = False
194
195 if ini.has_option("phase1", "kmod_archive"):
196 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
197
198 if ini.has_option("phase1", "kmod_repository"):
199 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
200
201
202 # find targets
203 targets = [ ]
204
205 if not os.path.isdir(work_dir+'/source.git'):
206 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
207 else:
208 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
209
210 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
211 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
212
213 while True:
214 line = findtargets.stdout.readline()
215 if not line:
216 break
217 ta = line.decode().strip().split(' ')
218 targets.append(ta[0])
219
220
221 # the 'change_source' setting tells the buildmaster how it should find out
222 # about source code changes. Here we point to the buildbot clone of pyflakes.
223
224 c['change_source'] = []
225 c['change_source'].append(GitPoller(
226 repo_url,
227 workdir=work_dir+'/work.git', branch=repo_branch,
228 pollinterval=300))
229
230 ####### SCHEDULERS
231
232 # Configure the Schedulers, which decide how to react to incoming changes. In this
233 # case, just kick off a 'basebuild' build
234
235 class TagChoiceParameter(BaseParameter):
236 spec_attributes = ["strict", "choices"]
237 type = "list"
238 strict = True
239
240 def __init__(self, name, label=None, **kw):
241 super().__init__(name, label, **kw)
242 self._choice_list = []
243
244 @property
245 def choices(self):
246 taglist = []
247 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
248
249 if basever:
250 findtags = subprocess.Popen(
251 ['git', 'ls-remote', '--tags', repo_url],
252 stdout = subprocess.PIPE)
253
254 while True:
255 line = findtags.stdout.readline()
256
257 if not line:
258 break
259
260 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
261
262 if tagver and tagver[1].find(basever[1]) == 0:
263 taglist.append(tagver[1])
264
265 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
266 taglist.insert(0, '')
267
268 self._choice_list = taglist
269
270 return self._choice_list
271
272 def parse_from_arg(self, s):
273 if self.strict and s not in self._choice_list:
274 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
275 return s
276
277 c['schedulers'] = []
278 c['schedulers'].append(SingleBranchScheduler(
279 name = "all",
280 change_filter = filter.ChangeFilter(branch=repo_branch),
281 treeStableTimer = 60,
282 builderNames = targets))
283
284 c['schedulers'].append(ForceScheduler(
285 name = "force",
286 buttonName = "Force builds",
287 label = "Force build details",
288 builderNames = [ "00_force_build" ],
289
290 codebases = [
291 util.CodebaseParameter(
292 "",
293 label = "Repository",
294 branch = util.FixedParameter(name = "branch", default = ""),
295 revision = util.FixedParameter(name = "revision", default = ""),
296 repository = util.FixedParameter(name = "repository", default = ""),
297 project = util.FixedParameter(name = "project", default = "")
298 )
299 ],
300
301 reason = util.StringParameter(
302 name = "reason",
303 label = "Reason",
304 default = "Trigger build",
305 required = True,
306 size = 80
307 ),
308
309 properties = [
310 util.NestedParameter(
311 name="options",
312 label="Build Options",
313 layout="vertical",
314 fields=[
315 util.ChoiceStringParameter(
316 name = "target",
317 label = "Build target",
318 default = "all",
319 choices = [ "all" ] + targets
320 ),
321 TagChoiceParameter(
322 name = "tag",
323 label = "Build tag",
324 default = ""
325 )
326 ]
327 )
328 ]
329 ))
330
331 ####### BUILDERS
332
333 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
334 # what steps, and which workers can execute them. Note that any particular build will
335 # only take place on one slave.
336
337 CleanTargetMap = [
338 [ "tools", "tools/clean" ],
339 [ "chain", "toolchain/clean" ],
340 [ "linux", "target/linux/clean" ],
341 [ "dir", "dirclean" ],
342 [ "dist", "distclean" ]
343 ]
344
345 def IsMakeCleanRequested(pattern):
346 def CheckCleanProperty(step):
347 val = step.getProperty("clean")
348 if val and re.match(pattern, val):
349 return True
350 else:
351 return False
352
353 return CheckCleanProperty
354
355 def IsSharedWorkdir(step):
356 return bool(step.getProperty("shared_wd"))
357
358 def IsCleanupRequested(step):
359 if IsSharedWorkdir(step):
360 return False
361 do_cleanup = step.getProperty("do_cleanup")
362 if do_cleanup:
363 return True
364 else:
365 return False
366
367 def IsExpireRequested(step):
368 if IsSharedWorkdir(step):
369 return False
370 else:
371 return not IsCleanupRequested(step)
372
373 def IsGitFreshRequested(step):
374 do_cleanup = step.getProperty("do_cleanup")
375 if do_cleanup:
376 return True
377 else:
378 return False
379
380 def IsGitCleanRequested(step):
381 return not IsGitFreshRequested(step)
382
383 def IsTaggingRequested(step):
384 val = step.getProperty("tag")
385 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
386 return True
387 else:
388 return False
389
390 def IsNoTaggingRequested(step):
391 return not IsTaggingRequested(step)
392
393 def IsNoMasterBuild(step):
394 return repo_branch != "master"
395
396 def GetBaseVersion():
397 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
398 return repo_branch.split('-')[1]
399 else:
400 return "master"
401
402 @properties.renderer
403 def GetVersionPrefix(props):
404 basever = GetBaseVersion()
405 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
406 return "%s/" % props["tag"]
407 elif basever != "master":
408 return "%s-SNAPSHOT/" % basever
409 else:
410 return ""
411
412 @properties.renderer
413 def GetNumJobs(props):
414 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
415 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
416 else:
417 return "1"
418
419 @properties.renderer
420 def GetCC(props):
421 if props.hasProperty("cc_command"):
422 return props["cc_command"]
423 else:
424 return "gcc"
425
426 @properties.renderer
427 def GetCXX(props):
428 if props.hasProperty("cxx_command"):
429 return props["cxx_command"]
430 else:
431 return "g++"
432
433 @properties.renderer
434 def GetCwd(props):
435 if props.hasProperty("builddir"):
436 return props["builddir"]
437 elif props.hasProperty("workdir"):
438 return props["workdir"]
439 else:
440 return "/"
441
442 @properties.renderer
443 def GetCCache(props):
444 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
445 return props["ccache_command"]
446 else:
447 return ""
448
449 def GetNextBuild(builder, requests):
450 for r in requests:
451 if r.properties and r.properties.hasProperty("tag"):
452 return r
453 return requests[0]
454
455 def MakeEnv(overrides=None, tryccache=False):
456 env = {
457 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
458 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
459 }
460 if tryccache:
461 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
462 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
463 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
464 else:
465 env['CC'] = env['CCC']
466 env['CXX'] = env['CCXX']
467 env['CCACHE'] = ''
468 if overrides is not None:
469 env.update(overrides)
470 return env
471
472 @properties.renderer
473 def NetLockDl(props):
474 lock = None
475 if props.hasProperty("dl_lock"):
476 lock = NetLocks[props["dl_lock"]]
477 if lock is not None:
478 return [lock.access('exclusive')]
479 else:
480 return []
481
482 @properties.renderer
483 def NetLockUl(props):
484 lock = None
485 if props.hasProperty("ul_lock"):
486 lock = NetLocks[props["ul_lock"]]
487 if lock is not None:
488 return [lock.access('exclusive')]
489 else:
490 return []
491
492 @util.renderer
493 def TagPropertyValue(props):
494 if props.hasProperty("options"):
495 options = props.getProperty("options")
496 if type(options) is dict:
497 return options.get("tag")
498 return None
499
500 def IsTargetSelected(target):
501 def CheckTargetProperty(step):
502 try:
503 options = step.getProperty("options")
504 if type(options) is dict:
505 selected_target = options.get("target", "all")
506 if selected_target != "all" and selected_target != target:
507 return False
508 except KeyError:
509 pass
510
511 return True
512
513 return CheckTargetProperty
514
515 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
516 try:
517 seckey = base64.b64decode(seckey)
518 except:
519 return None
520
521 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
522 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
523
524
525 c['builders'] = []
526
527 dlLock = locks.WorkerLock("slave_dl")
528
529 checkBuiltin = re.sub('[\t\n ]+', ' ', """
530 checkBuiltin() {
531 local symbol op path file;
532 for file in $CHANGED_FILES; do
533 case "$file" in
534 package/*/*) : ;;
535 *) return 0 ;;
536 esac;
537 done;
538 while read symbol op path; do
539 case "$symbol" in package-*)
540 symbol="${symbol##*(}";
541 symbol="${symbol%)}";
542 for file in $CHANGED_FILES; do
543 case "$file" in "package/$path/"*)
544 grep -qsx "$symbol=y" .config && return 0
545 ;; esac;
546 done;
547 esac;
548 done < tmp/.packagedeps;
549 return 1;
550 }
551 """).strip()
552
553
554 class IfBuiltinShellCommand(ShellCommand):
555 def _quote(self, str):
556 if re.search("[^a-zA-Z0-9/_.-]", str):
557 return "'%s'" %(re.sub("'", "'\"'\"'", str))
558 return str
559
560 def setCommand(self, command):
561 if not isinstance(command, (str, unicode)):
562 command = ' '.join(map(self._quote, command))
563 self.command = [
564 '/bin/sh', '-c',
565 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
566 ]
567
568 def setupEnvironment(self, cmd):
569 slaveEnv = self.slaveEnvironment
570 if slaveEnv is None:
571 slaveEnv = { }
572 changedFiles = { }
573 for request in self.build.requests:
574 for source in request.sources:
575 for change in source.changes:
576 for file in change.files:
577 changedFiles[file] = True
578 fullSlaveEnv = slaveEnv.copy()
579 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
580 cmd.args['env'] = fullSlaveEnv
581
582 slaveNames = [ ]
583
584 for slave in c['workers']:
585 slaveNames.append(slave.workername)
586
587 force_factory = BuildFactory()
588
589 c['builders'].append(BuilderConfig(
590 name = "00_force_build",
591 workernames = slaveNames,
592 factory = force_factory))
593
594 for target in targets:
595 ts = target.split('/')
596
597 factory = BuildFactory()
598
599 # setup shared work directory if required
600 factory.addStep(ShellCommand(
601 name = "sharedwd",
602 description = "Setting up shared work directory",
603 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
604 workdir = ".",
605 haltOnFailure = True,
606 doStepIf = IsSharedWorkdir))
607
608 # find number of cores
609 factory.addStep(SetPropertyFromCommand(
610 name = "nproc",
611 property = "nproc",
612 description = "Finding number of CPUs",
613 command = ["nproc"]))
614
615 # find gcc and g++ compilers
616 factory.addStep(FileDownload(
617 name = "dlfindbinpl",
618 mastersrc = scripts_dir + '/findbin.pl',
619 workerdest = "../findbin.pl",
620 mode = 0o755))
621
622 factory.addStep(SetPropertyFromCommand(
623 name = "gcc",
624 property = "cc_command",
625 description = "Finding gcc command",
626 command = [
627 "../findbin.pl", "gcc",
628 cc_version[0] if cc_version is not None else '',
629 cc_version[1] if cc_version is not None else ''
630 ],
631 haltOnFailure = True))
632
633 factory.addStep(SetPropertyFromCommand(
634 name = "g++",
635 property = "cxx_command",
636 description = "Finding g++ command",
637 command = [
638 "../findbin.pl", "g++",
639 cc_version[0] if cc_version is not None else '',
640 cc_version[1] if cc_version is not None else ''
641 ],
642 haltOnFailure = True))
643
644 # see if ccache is available
645 factory.addStep(SetPropertyFromCommand(
646 property = "ccache_command",
647 command = ["which", "ccache"],
648 description = "Testing for ccache command",
649 haltOnFailure = False,
650 flunkOnFailure = False,
651 warnOnFailure = False,
652 ))
653
654 # expire tree if needed
655 if tree_expire > 0:
656 factory.addStep(FileDownload(
657 name = "dlexpiresh",
658 doStepIf = IsExpireRequested,
659 mastersrc = scripts_dir + '/expire.sh',
660 workerdest = "../expire.sh",
661 mode = 0o755))
662
663 factory.addStep(ShellCommand(
664 name = "expire",
665 description = "Checking for build tree expiry",
666 command = ["./expire.sh", str(tree_expire)],
667 workdir = ".",
668 haltOnFailure = True,
669 doStepIf = IsExpireRequested,
670 timeout = 2400))
671
672 # cleanup.sh if needed
673 factory.addStep(FileDownload(
674 name = "dlcleanupsh",
675 mastersrc = scripts_dir + '/cleanup.sh',
676 workerdest = "../cleanup.sh",
677 mode = 0o755,
678 doStepIf = IsCleanupRequested))
679
680 factory.addStep(ShellCommand(
681 name = "cleanold",
682 description = "Cleaning previous builds",
683 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
684 workdir = ".",
685 haltOnFailure = True,
686 doStepIf = IsCleanupRequested,
687 timeout = 2400))
688
689 factory.addStep(ShellCommand(
690 name = "cleanup",
691 description = "Cleaning work area",
692 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
693 workdir = ".",
694 haltOnFailure = True,
695 doStepIf = IsCleanupRequested,
696 timeout = 2400))
697
698 # user-requested clean targets
699 for tuple in CleanTargetMap:
700 factory.addStep(ShellCommand(
701 name = tuple[1],
702 description = 'User-requested "make %s"' % tuple[1],
703 command = ["make", tuple[1], "V=s"],
704 env = MakeEnv(),
705 doStepIf = IsMakeCleanRequested(tuple[0])
706 ))
707
708 # Workaround bug when switching from a checked out tag back to a branch
709 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
710 factory.addStep(ShellCommand(
711 name = "gitcheckout",
712 description = "Ensure that Git HEAD is sane",
713 command = "if [ -d .git ]; then git checkout -f %s; git branch --set-upstream-to origin/%s; else exit 0; fi" %(repo_branch, repo_branch),
714 haltOnFailure = True))
715
716 # check out the source
717 # Git() runs:
718 # if repo doesn't exist: 'git clone repourl'
719 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
720 # 'git fetch -t repourl branch; git reset --hard revision'
721 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
722 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
723 factory.addStep(Git(
724 name = "gitclean",
725 repourl = repo_url,
726 branch = repo_branch,
727 mode = 'full',
728 method = 'clean',
729 haltOnFailure = True,
730 doStepIf = IsGitCleanRequested,
731 ))
732
733 factory.addStep(Git(
734 name = "gitfresh",
735 repourl = repo_url,
736 branch = repo_branch,
737 mode = 'full',
738 method = 'fresh',
739 haltOnFailure = True,
740 doStepIf = IsGitFreshRequested,
741 ))
742
743 # update remote refs
744 factory.addStep(ShellCommand(
745 name = "fetchrefs",
746 description = "Fetching Git remote refs",
747 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
748 haltOnFailure = True
749 ))
750
751 # switch to tag
752 factory.addStep(ShellCommand(
753 name = "switchtag",
754 description = "Checking out Git tag",
755 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
756 haltOnFailure = True,
757 doStepIf = IsTaggingRequested
758 ))
759
760 # Verify that Git HEAD points to a tag or branch
761 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
762 factory.addStep(ShellCommand(
763 name = "gitverify",
764 description = "Ensure that Git HEAD is pointing to a branch or tag",
765 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
766 haltOnFailure = True))
767
768 factory.addStep(ShellCommand(
769 name = "rmtmp",
770 description = "Remove tmp folder",
771 command=["rm", "-rf", "tmp/"]))
772
773 # feed
774 # factory.addStep(ShellCommand(
775 # name = "feedsconf",
776 # description = "Copy the feeds.conf",
777 # command='''cp ~/feeds.conf ./feeds.conf''' ))
778
779 # feed
780 factory.addStep(ShellCommand(
781 name = "rmfeedlinks",
782 description = "Remove feed symlinks",
783 command=["rm", "-rf", "package/feeds/"]))
784
785 factory.addStep(StringDownload(
786 name = "ccachecc",
787 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
788 workerdest = "../ccache_cc.sh",
789 mode = 0o755,
790 ))
791
792 factory.addStep(StringDownload(
793 name = "ccachecxx",
794 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
795 workerdest = "../ccache_cxx.sh",
796 mode = 0o755,
797 ))
798
799 # Git SSH
800 if git_ssh:
801 factory.addStep(StringDownload(
802 name = "dlgitclonekey",
803 s = git_ssh_key,
804 workerdest = "../git-clone.key",
805 mode = 0o600,
806 ))
807
808 factory.addStep(ShellCommand(
809 name = "patchfeedsconf",
810 description = "Patching feeds.conf",
811 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
812 haltOnFailure = True
813 ))
814
815 # feed
816 factory.addStep(ShellCommand(
817 name = "updatefeeds",
818 description = "Updating feeds",
819 command=["./scripts/feeds", "update"],
820 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
821 haltOnFailure = True
822 ))
823
824 # Git SSH
825 if git_ssh:
826 factory.addStep(ShellCommand(
827 name = "rmfeedsconf",
828 description = "Removing feeds.conf",
829 command=["rm", "feeds.conf"],
830 haltOnFailure = True
831 ))
832
833 # feed
834 factory.addStep(ShellCommand(
835 name = "installfeeds",
836 description = "Installing feeds",
837 command=["./scripts/feeds", "install", "-a"],
838 env = MakeEnv(tryccache=True),
839 haltOnFailure = True
840 ))
841
842 # seed config
843 if config_seed is not None:
844 factory.addStep(StringDownload(
845 name = "dlconfigseed",
846 s = config_seed + '\n',
847 workerdest = ".config",
848 mode = 0o644
849 ))
850
851 # configure
852 factory.addStep(ShellCommand(
853 name = "newconfig",
854 description = "Seeding .config",
855 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
856 ))
857
858 factory.addStep(ShellCommand(
859 name = "delbin",
860 description = "Removing output directory",
861 command = ["rm", "-rf", "bin/"]
862 ))
863
864 factory.addStep(ShellCommand(
865 name = "defconfig",
866 description = "Populating .config",
867 command = ["make", "defconfig"],
868 env = MakeEnv()
869 ))
870
871 # check arch
872 factory.addStep(ShellCommand(
873 name = "checkarch",
874 description = "Checking architecture",
875 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
876 logEnviron = False,
877 want_stdout = False,
878 want_stderr = False,
879 haltOnFailure = True
880 ))
881
882 # find libc suffix
883 factory.addStep(SetPropertyFromCommand(
884 name = "libc",
885 property = "libc",
886 description = "Finding libc suffix",
887 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
888
889 # install build key
890 if usign_key is not None:
891 factory.addStep(StringDownload(
892 name = "dlkeybuildpub",
893 s = UsignSec2Pub(usign_key, usign_comment),
894 workerdest = "key-build.pub",
895 mode = 0o600,
896 ))
897
898 factory.addStep(StringDownload(
899 name = "dlkeybuild",
900 s = "# fake private key",
901 workerdest = "key-build",
902 mode = 0o600,
903 ))
904
905 factory.addStep(StringDownload(
906 name = "dlkeybuilducert",
907 s = "# fake certificate",
908 workerdest = "key-build.ucert",
909 mode = 0o600,
910 ))
911
912 # prepare dl
913 factory.addStep(ShellCommand(
914 name = "dldir",
915 description = "Preparing dl/",
916 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
917 logEnviron = False,
918 want_stdout = False
919 ))
920
921 # prepare tar
922 factory.addStep(ShellCommand(
923 name = "dltar",
924 description = "Building and installing GNU tar",
925 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
926 env = MakeEnv(tryccache=True),
927 haltOnFailure = True
928 ))
929
930 # populate dl
931 factory.addStep(ShellCommand(
932 name = "dlrun",
933 description = "Populating dl/",
934 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
935 env = MakeEnv(),
936 logEnviron = False,
937 locks = [dlLock.access('exclusive')],
938 ))
939
940 factory.addStep(ShellCommand(
941 name = "cleanbase",
942 description = "Cleaning base-files",
943 command=["make", "package/base-files/clean", "V=s"]
944 ))
945
946 # build
947 factory.addStep(ShellCommand(
948 name = "tools",
949 description = "Building and installing tools",
950 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
951 env = MakeEnv(tryccache=True),
952 haltOnFailure = True
953 ))
954
955 factory.addStep(ShellCommand(
956 name = "toolchain",
957 description = "Building and installing toolchain",
958 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
959 env = MakeEnv(),
960 haltOnFailure = True
961 ))
962
963 factory.addStep(ShellCommand(
964 name = "kmods",
965 description = "Building kmods",
966 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
967 env = MakeEnv(),
968 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
969 haltOnFailure = True
970 ))
971
972 # find kernel version
973 factory.addStep(SetPropertyFromCommand(
974 name = "kernelversion",
975 property = "kernelversion",
976 description = "Finding the effective Kernel version",
977 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
978 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
979 ))
980
981 factory.addStep(ShellCommand(
982 name = "pkgclean",
983 description = "Cleaning up package build",
984 command=["make", "package/cleanup", "V=s"]
985 ))
986
987 factory.addStep(ShellCommand(
988 name = "pkgbuild",
989 description = "Building packages",
990 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
991 env = MakeEnv(),
992 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
993 haltOnFailure = True
994 ))
995
996 # factory.addStep(IfBuiltinShellCommand(
997 factory.addStep(ShellCommand(
998 name = "pkginstall",
999 description = "Installing packages",
1000 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1001 env = MakeEnv(),
1002 haltOnFailure = True
1003 ))
1004
1005 factory.addStep(ShellCommand(
1006 name = "pkgindex",
1007 description = "Indexing packages",
1008 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1009 env = MakeEnv(),
1010 haltOnFailure = True
1011 ))
1012
1013 if enable_kmod_archive and embed_kmod_repository:
1014 # embed kmod repository. Must happen before 'images'
1015
1016 # find rootfs staging directory
1017 factory.addStep(SetPropertyFromCommand(
1018 name = "stageroot",
1019 property = "stageroot",
1020 description = "Finding the rootfs staging directory",
1021 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1022 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1023 ))
1024
1025 factory.addStep(ShellCommand(
1026 name = "filesdir",
1027 description = "Creating file overlay directory",
1028 command=["mkdir", "-p", "files/etc/opkg"],
1029 haltOnFailure = True
1030 ))
1031
1032 factory.addStep(ShellCommand(
1033 name = "kmodconfig",
1034 description = "Embedding kmod repository configuration",
1035 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1036 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1037 haltOnFailure = True
1038 ))
1039
1040 #factory.addStep(IfBuiltinShellCommand(
1041 factory.addStep(ShellCommand(
1042 name = "images",
1043 description = "Building and installing images",
1044 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1045 env = MakeEnv(),
1046 haltOnFailure = True
1047 ))
1048
1049 factory.addStep(ShellCommand(
1050 name = "buildinfo",
1051 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1052 command = "make -j1 buildinfo V=s || true",
1053 env = MakeEnv(),
1054 haltOnFailure = True
1055 ))
1056
1057 factory.addStep(ShellCommand(
1058 name = "json_overview_image_info",
1059 description = "Generate profiles.json in target folder",
1060 command = "make -j1 json_overview_image_info V=s || true",
1061 env = MakeEnv(),
1062 haltOnFailure = True
1063 ))
1064
1065 factory.addStep(ShellCommand(
1066 name = "checksums",
1067 description = "Calculating checksums",
1068 command=["make", "-j1", "checksum", "V=s"],
1069 env = MakeEnv(),
1070 haltOnFailure = True
1071 ))
1072
1073 if enable_kmod_archive:
1074 factory.addStep(ShellCommand(
1075 name = "kmoddir",
1076 description = "Creating kmod directory",
1077 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1078 haltOnFailure = True
1079 ))
1080
1081 factory.addStep(ShellCommand(
1082 name = "kmodprepare",
1083 description = "Preparing kmod archive",
1084 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1085 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1086 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1087 haltOnFailure = True
1088 ))
1089
1090 factory.addStep(ShellCommand(
1091 name = "kmodindex",
1092 description = "Indexing kmod archive",
1093 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1094 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1095 env = MakeEnv(),
1096 haltOnFailure = True
1097 ))
1098
1099 # sign
1100 if ini.has_option("gpg", "key") or usign_key is not None:
1101 factory.addStep(MasterShellCommand(
1102 name = "signprepare",
1103 description = "Preparing temporary signing directory",
1104 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1105 haltOnFailure = True
1106 ))
1107
1108 factory.addStep(ShellCommand(
1109 name = "signpack",
1110 description = "Packing files to sign",
1111 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1112 haltOnFailure = True
1113 ))
1114
1115 factory.addStep(FileUpload(
1116 workersrc = "sign.tar.gz",
1117 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1118 haltOnFailure = True
1119 ))
1120
1121 factory.addStep(MasterShellCommand(
1122 name = "signfiles",
1123 description = "Signing files",
1124 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1125 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1126 haltOnFailure = True
1127 ))
1128
1129 factory.addStep(FileDownload(
1130 name = "dlsigntargz",
1131 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1132 workerdest = "sign.tar.gz",
1133 haltOnFailure = True
1134 ))
1135
1136 factory.addStep(ShellCommand(
1137 name = "signunpack",
1138 description = "Unpacking signed files",
1139 command = ["tar", "-xzf", "sign.tar.gz"],
1140 haltOnFailure = True
1141 ))
1142
1143 # upload
1144 factory.addStep(ShellCommand(
1145 name = "dirprepare",
1146 description = "Preparing upload directory structure",
1147 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1148 haltOnFailure = True
1149 ))
1150
1151 factory.addStep(ShellCommand(
1152 name = "linkprepare",
1153 description = "Preparing repository symlink",
1154 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1155 doStepIf = IsNoMasterBuild,
1156 haltOnFailure = True
1157 ))
1158
1159 if enable_kmod_archive:
1160 factory.addStep(ShellCommand(
1161 name = "kmoddirprepare",
1162 description = "Preparing kmod archive upload directory",
1163 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1164 haltOnFailure = True
1165 ))
1166
1167 factory.addStep(ShellCommand(
1168 name = "dirupload",
1169 description = "Uploading directory structure",
1170 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1171 env={'RSYNC_PASSWORD': rsync_bin_key},
1172 haltOnFailure = True,
1173 logEnviron = False,
1174 ))
1175
1176 # download remote sha256sums to 'target-sha256sums'
1177 factory.addStep(ShellCommand(
1178 name = "target-sha256sums",
1179 description = "Fetching remote sha256sums for target",
1180 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1181 env={'RSYNC_PASSWORD': rsync_bin_key},
1182 logEnviron = False,
1183 haltOnFailure = False,
1184 flunkOnFailure = False,
1185 warnOnFailure = False,
1186 ))
1187
1188 # build list of files to upload
1189 factory.addStep(FileDownload(
1190 name = "dlsha2rsyncpl",
1191 mastersrc = scripts_dir + '/sha2rsync.pl',
1192 workerdest = "../sha2rsync.pl",
1193 mode = 0o755,
1194 ))
1195
1196 factory.addStep(ShellCommand(
1197 name = "buildlist",
1198 description = "Building list of files to upload",
1199 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1200 haltOnFailure = True,
1201 ))
1202
1203 factory.addStep(FileDownload(
1204 name = "dlrsync.sh",
1205 mastersrc = scripts_dir + '/rsync.sh',
1206 workerdest = "../rsync.sh",
1207 mode = 0o755
1208 ))
1209
1210 # upload new files and update existing ones
1211 factory.addStep(ShellCommand(
1212 name = "targetupload",
1213 description = "Uploading target files",
1214 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1215 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1216 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1217 env={'RSYNC_PASSWORD': rsync_bin_key},
1218 haltOnFailure = True,
1219 logEnviron = False,
1220 ))
1221
1222 # delete files which don't exist locally
1223 factory.addStep(ShellCommand(
1224 name = "targetprune",
1225 description = "Pruning target files",
1226 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1227 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1228 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1229 env={'RSYNC_PASSWORD': rsync_bin_key},
1230 haltOnFailure = True,
1231 logEnviron = False,
1232 ))
1233
1234 if enable_kmod_archive:
1235 factory.addStep(ShellCommand(
1236 name = "kmodupload",
1237 description = "Uploading kmod archive",
1238 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1239 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1240 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1241 env={'RSYNC_PASSWORD': rsync_bin_key},
1242 haltOnFailure = True,
1243 logEnviron = False,
1244 ))
1245
1246 if rsync_src_url is not None:
1247 factory.addStep(ShellCommand(
1248 name = "sourcelist",
1249 description = "Finding source archives to upload",
1250 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1251 haltOnFailure = True
1252 ))
1253
1254 factory.addStep(ShellCommand(
1255 name = "sourceupload",
1256 description = "Uploading source archives",
1257 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1258 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1259 env={'RSYNC_PASSWORD': rsync_src_key},
1260 haltOnFailure = True,
1261 logEnviron = False,
1262 ))
1263
1264 if False:
1265 factory.addStep(ShellCommand(
1266 name = "packageupload",
1267 description = "Uploading package files",
1268 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1269 env={'RSYNC_PASSWORD': rsync_bin_key},
1270 haltOnFailure = False,
1271 logEnviron = False,
1272 ))
1273
1274 # logs
1275 if False:
1276 factory.addStep(ShellCommand(
1277 name = "upload",
1278 description = "Uploading logs",
1279 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1280 env={'RSYNC_PASSWORD': rsync_bin_key},
1281 haltOnFailure = False,
1282 alwaysRun = True,
1283 logEnviron = False,
1284 ))
1285
1286 factory.addStep(ShellCommand(
1287 name = "df",
1288 description = "Reporting disk usage",
1289 command=["df", "-h", "."],
1290 env={'LC_ALL': 'C'},
1291 haltOnFailure = False,
1292 alwaysRun = True
1293 ))
1294
1295 factory.addStep(ShellCommand(
1296 name = "ccachestat",
1297 description = "Reporting ccache stats",
1298 command=["ccache", "-s"],
1299 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1300 want_stderr = False,
1301 haltOnFailure = False,
1302 flunkOnFailure = False,
1303 warnOnFailure = False,
1304 alwaysRun = True,
1305 ))
1306
1307 c['builders'].append(BuilderConfig(name=target, workernames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1308
1309 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1310 force_factory.addStep(steps.Trigger(
1311 name = "trigger_%s" % target,
1312 description = "Triggering %s build" % target,
1313 schedulerNames = [ "trigger_%s" % target ],
1314 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1315 doStepIf = IsTargetSelected(target)
1316 ))
1317
1318
1319 ####### STATUS TARGETS
1320
1321 # 'status' is a list of Status Targets. The results of each build will be
1322 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1323 # including web pages, email senders, and IRC bots.
1324
1325 if ini.has_option("phase1", "status_bind"):
1326 c['www'] = {
1327 'port': ini.get("phase1", "status_bind"),
1328 'plugins': {
1329 'waterfall_view': True,
1330 'console_view': True,
1331 'grid_view': True
1332 }
1333 }
1334
1335 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1336 c['www']['auth'] = util.UserPasswordAuth([
1337 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1338 ])
1339 c['www']['authz'] = util.Authz(
1340 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1341 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1342 )
1343
1344
1345 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1346 irc_host = ini.get("irc", "host")
1347 irc_port = 6667
1348 irc_chan = ini.get("irc", "channel")
1349 irc_nick = ini.get("irc", "nickname")
1350 irc_pass = None
1351
1352 if ini.has_option("irc", "port"):
1353 irc_port = ini.getint("irc", "port")
1354
1355 if ini.has_option("irc", "password"):
1356 irc_pass = ini.get("irc", "password")
1357
1358 irc = reporters.IRC(irc_host, irc_nick,
1359 port = irc_port,
1360 password = irc_pass,
1361 channels = [ irc_chan ],
1362 notify_events = [ 'exception', 'problem', 'recovery' ]
1363 )
1364
1365 c['services'].append(irc)
1366
1367 ####### DB URL
1368
1369 c['db'] = {
1370 # This specifies what database buildbot uses to store its state. You can leave
1371 # this at its default for all but the largest installations.
1372 'db_url' : "sqlite:///state.sqlite",
1373 }
1374
1375 c['buildbotNetUsageData'] = None