phase1,phase2: shuffle order of builds
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import random
8 import subprocess
9 import configparser
10
11 from datetime import timedelta
12
13 from buildbot import locks
14 from buildbot.changes import filter
15 from buildbot.changes.gitpoller import GitPoller
16 from buildbot.config import BuilderConfig
17 from buildbot.plugins import reporters
18 from buildbot.plugins import schedulers
19 from buildbot.plugins import steps
20 from buildbot.plugins import util
21 from buildbot.process import properties
22 from buildbot.process.factory import BuildFactory
23 from buildbot.process.properties import Interpolate
24 from buildbot.process.properties import Property
25 from buildbot.schedulers.basic import SingleBranchScheduler
26 from buildbot.schedulers.forcesched import BaseParameter
27 from buildbot.schedulers.forcesched import ForceScheduler
28 from buildbot.schedulers.forcesched import ValidationError
29 from buildbot.steps.master import MasterShellCommand
30 from buildbot.steps.shell import SetPropertyFromCommand
31 from buildbot.steps.shell import ShellCommand
32 from buildbot.steps.source.git import Git
33 from buildbot.steps.transfer import FileDownload
34 from buildbot.steps.transfer import FileUpload
35 from buildbot.steps.transfer import StringDownload
36 from buildbot.worker import Worker
37
38
39 # This is a sample buildmaster config file. It must be installed as
40 # 'master.cfg' in your buildmaster's base directory.
41
42 ini = configparser.ConfigParser()
43 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
44
45 # This is the dictionary that the buildmaster pays attention to. We also use
46 # a shorter alias to save typing.
47 c = BuildmasterConfig = {}
48
49 ####### PROJECT IDENTITY
50
51 # the 'title' string will appear at the top of this buildbot
52 # installation's html.WebStatus home page (linked to the
53 # 'titleURL') and is embedded in the title of the waterfall HTML page.
54
55 c['title'] = ini.get("general", "title")
56 c['titleURL'] = ini.get("general", "title_url")
57
58 # the 'buildbotURL' string should point to the location where the buildbot's
59 # internal web server (usually the html.WebStatus page) is visible. This
60 # typically uses the port number set in the Waterfall 'status' entry, but
61 # with an externally-visible host name which the buildbot cannot figure out
62 # without some help.
63
64 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
65
66 ####### BUILDWORKERS
67
68 # The 'workers' list defines the set of recognized buildworkers. Each element is
69 # a Worker object, specifying a unique worker name and password. The same
70 # worker name and password must be configured on the worker.
71
72 worker_port = 9989
73
74 if ini.has_option("phase1", "port"):
75 worker_port = ini.get("phase1", "port")
76
77 c['workers'] = []
78 NetLocks = dict()
79
80 for section in ini.sections():
81 if section.startswith("worker "):
82 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
83 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
84 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
85 name = ini.get(section, "name")
86 password = ini.get(section, "password")
87 max_builds = 1
88 if ini.has_option(section, "builds"):
89 max_builds = ini.getint(section, "builds")
90 sl_props['max_builds'] = max_builds
91 if max_builds == 1:
92 sl_props['shared_wd'] = True
93 if ini.has_option(section, "cleanup"):
94 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
95 if ini.has_option(section, "dl_lock"):
96 lockname = ini.get(section, "dl_lock")
97 sl_props['dl_lock'] = lockname
98 if lockname not in NetLocks:
99 NetLocks[lockname] = locks.MasterLock(lockname)
100 if ini.has_option(section, "ul_lock"):
101 lockname = ini.get(section, "dl_lock")
102 sl_props['ul_lock'] = lockname
103 if lockname not in NetLocks:
104 NetLocks[lockname] = locks.MasterLock(lockname)
105 if ini.has_option(section, "shared_wd"):
106 shared_wd = ini.getboolean(section, "shared_wd")
107 sl_props['shared_wd'] = shared_wd
108 if shared_wd and (max_builds != 1):
109 raise ValueError('max_builds must be 1 with shared workdir!')
110 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
111
112 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
113 # This must match the value configured into the buildworkers (with their
114 # --master option)
115 c['protocols'] = {'pb': {'port': worker_port}}
116
117 # coalesce builds
118 c['collapseRequests'] = True
119
120 # Reduce amount of backlog data
121 c['configurators'] = [util.JanitorConfigurator(
122 logHorizon=timedelta(days=3),
123 hour=6,
124 )]
125
126 ####### CHANGESOURCES
127
128 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
129 scripts_dir = os.path.abspath("../scripts")
130 tree_expire = 0
131 other_builds = 0
132 cc_version = None
133
134 cc_command = "gcc"
135 cxx_command = "g++"
136
137 config_seed = ""
138
139 git_ssh = False
140 git_ssh_key = None
141
142 if ini.has_option("phase1", "expire"):
143 tree_expire = ini.getint("phase1", "expire")
144
145 if ini.has_option("phase1", "other_builds"):
146 other_builds = ini.getint("phase1", "other_builds")
147
148 if ini.has_option("phase1", "cc_version"):
149 cc_version = ini.get("phase1", "cc_version").split()
150 if len(cc_version) == 1:
151 cc_version = ["eq", cc_version[0]]
152
153 if ini.has_option("general", "git_ssh"):
154 git_ssh = ini.getboolean("general", "git_ssh")
155
156 if ini.has_option("general", "git_ssh_key"):
157 git_ssh_key = ini.get("general", "git_ssh_key")
158 else:
159 git_ssh = False
160
161 if ini.has_option("phase1", "config_seed"):
162 config_seed = ini.get("phase1", "config_seed")
163
164 repo_url = ini.get("repo", "url")
165 repo_branch = "master"
166
167 if ini.has_option("repo", "branch"):
168 repo_branch = ini.get("repo", "branch")
169
170 rsync_bin_url = ini.get("rsync", "binary_url")
171 rsync_bin_key = ini.get("rsync", "binary_password")
172 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
173
174 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
175 rsync_bin_defopts += ["--contimeout=20"]
176
177 rsync_src_url = None
178 rsync_src_key = None
179 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
180
181 if ini.has_option("rsync", "source_url"):
182 rsync_src_url = ini.get("rsync", "source_url")
183 rsync_src_key = ini.get("rsync", "source_password")
184
185 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
186 rsync_src_defopts += ["--contimeout=20"]
187
188 usign_key = None
189 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
190
191 if ini.has_option("usign", "key"):
192 usign_key = ini.get("usign", "key")
193
194 if ini.has_option("usign", "comment"):
195 usign_comment = ini.get("usign", "comment")
196
197 enable_kmod_archive = False
198 embed_kmod_repository = False
199
200 if ini.has_option("phase1", "kmod_archive"):
201 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
202
203 if ini.has_option("phase1", "kmod_repository"):
204 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
205
206
207 # find targets
208 targets = [ ]
209
210 if not os.path.isdir(work_dir+'/source.git'):
211 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
212 else:
213 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
214
215 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
216 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
217 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
218
219 while True:
220 line = findtargets.stdout.readline()
221 if not line:
222 break
223 ta = line.decode().strip().split(' ')
224 targets.append(ta[0])
225
226
227 # the 'change_source' setting tells the buildmaster how it should find out
228 # about source code changes. Here we point to the buildbot clone of pyflakes.
229
230 c['change_source'] = []
231 c['change_source'].append(GitPoller(
232 repo_url,
233 workdir=work_dir+'/work.git', branch=repo_branch,
234 pollinterval=300))
235
236 ####### SCHEDULERS
237
238 # Configure the Schedulers, which decide how to react to incoming changes. In this
239 # case, just kick off a 'basebuild' build
240
241 class TagChoiceParameter(BaseParameter):
242 spec_attributes = ["strict", "choices"]
243 type = "list"
244 strict = True
245
246 def __init__(self, name, label=None, **kw):
247 super().__init__(name, label, **kw)
248 self._choice_list = []
249
250 @property
251 def choices(self):
252 taglist = []
253 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
254
255 if basever:
256 findtags = subprocess.Popen(
257 ['git', 'ls-remote', '--tags', repo_url],
258 stdout = subprocess.PIPE)
259
260 while True:
261 line = findtags.stdout.readline()
262
263 if not line:
264 break
265
266 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
267
268 if tagver and tagver[1].find(basever[1]) == 0:
269 taglist.append(tagver[1])
270
271 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
272 taglist.insert(0, '')
273
274 self._choice_list = taglist
275
276 return self._choice_list
277
278 def parse_from_arg(self, s):
279 if self.strict and s not in self._choice_list:
280 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
281 return s
282
283 c['schedulers'] = []
284 c['schedulers'].append(SingleBranchScheduler(
285 name = "all",
286 change_filter = filter.ChangeFilter(branch=repo_branch),
287 treeStableTimer = 60,
288 builderNames = targets))
289
290 c['schedulers'].append(ForceScheduler(
291 name = "force",
292 buttonName = "Force builds",
293 label = "Force build details",
294 builderNames = [ "00_force_build" ],
295
296 codebases = [
297 util.CodebaseParameter(
298 "",
299 label = "Repository",
300 branch = util.FixedParameter(name = "branch", default = ""),
301 revision = util.FixedParameter(name = "revision", default = ""),
302 repository = util.FixedParameter(name = "repository", default = ""),
303 project = util.FixedParameter(name = "project", default = "")
304 )
305 ],
306
307 reason = util.StringParameter(
308 name = "reason",
309 label = "Reason",
310 default = "Trigger build",
311 required = True,
312 size = 80
313 ),
314
315 properties = [
316 util.NestedParameter(
317 name="options",
318 label="Build Options",
319 layout="vertical",
320 fields=[
321 util.ChoiceStringParameter(
322 name = "target",
323 label = "Build target",
324 default = "all",
325 choices = [ "all" ] + targets
326 ),
327 TagChoiceParameter(
328 name = "tag",
329 label = "Build tag",
330 default = ""
331 )
332 ]
333 )
334 ]
335 ))
336
337 ####### BUILDERS
338
339 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
340 # what steps, and which workers can execute them. Note that any particular build will
341 # only take place on one worker.
342
343 CleanTargetMap = [
344 [ "tools", "tools/clean" ],
345 [ "chain", "toolchain/clean" ],
346 [ "linux", "target/linux/clean" ],
347 [ "dir", "dirclean" ],
348 [ "dist", "distclean" ]
349 ]
350
351 def IsMakeCleanRequested(pattern):
352 def CheckCleanProperty(step):
353 val = step.getProperty("clean")
354 if val and re.match(pattern, val):
355 return True
356 else:
357 return False
358
359 return CheckCleanProperty
360
361 def IsSharedWorkdir(step):
362 return bool(step.getProperty("shared_wd"))
363
364 def IsCleanupRequested(step):
365 if IsSharedWorkdir(step):
366 return False
367 do_cleanup = step.getProperty("do_cleanup")
368 if do_cleanup:
369 return True
370 else:
371 return False
372
373 def IsExpireRequested(step):
374 if IsSharedWorkdir(step):
375 return False
376 else:
377 return not IsCleanupRequested(step)
378
379 def IsGitFreshRequested(step):
380 do_cleanup = step.getProperty("do_cleanup")
381 if do_cleanup:
382 return True
383 else:
384 return False
385
386 def IsGitCleanRequested(step):
387 return not IsGitFreshRequested(step)
388
389 def IsTaggingRequested(step):
390 val = step.getProperty("tag")
391 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
392 return True
393 else:
394 return False
395
396 def IsNoTaggingRequested(step):
397 return not IsTaggingRequested(step)
398
399 def IsNoMasterBuild(step):
400 return repo_branch != "master"
401
402 def GetBaseVersion():
403 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
404 return repo_branch.split('-')[1]
405 else:
406 return "master"
407
408 @properties.renderer
409 def GetVersionPrefix(props):
410 basever = GetBaseVersion()
411 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
412 return "%s/" % props["tag"]
413 elif basever != "master":
414 return "%s-SNAPSHOT/" % basever
415 else:
416 return ""
417
418 @properties.renderer
419 def GetNumJobs(props):
420 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
421 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
422 else:
423 return "1"
424
425 @properties.renderer
426 def GetCC(props):
427 if props.hasProperty("cc_command"):
428 return props["cc_command"]
429 else:
430 return "gcc"
431
432 @properties.renderer
433 def GetCXX(props):
434 if props.hasProperty("cxx_command"):
435 return props["cxx_command"]
436 else:
437 return "g++"
438
439 @properties.renderer
440 def GetCwd(props):
441 if props.hasProperty("builddir"):
442 return props["builddir"]
443 elif props.hasProperty("workdir"):
444 return props["workdir"]
445 else:
446 return "/"
447
448 @properties.renderer
449 def GetCCache(props):
450 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
451 return props["ccache_command"]
452 else:
453 return ""
454
455 def GetNextBuild(builder, requests):
456 for r in requests:
457 if r.properties and r.properties.hasProperty("tag"):
458 return r
459 return requests[0]
460
461 def prioritizeBuilders(buildmaster, builders):
462 random.shuffle(builders)
463 return builders
464
465 c['prioritizeBuilders'] = prioritizeBuilders
466
467 def MakeEnv(overrides=None, tryccache=False):
468 env = {
469 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
470 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
471 }
472 if tryccache:
473 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
474 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
475 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
476 else:
477 env['CC'] = env['CCC']
478 env['CXX'] = env['CCXX']
479 env['CCACHE'] = ''
480 if overrides is not None:
481 env.update(overrides)
482 return env
483
484 @properties.renderer
485 def NetLockDl(props):
486 lock = None
487 if props.hasProperty("dl_lock"):
488 lock = NetLocks[props["dl_lock"]]
489 if lock is not None:
490 return [lock.access('exclusive')]
491 else:
492 return []
493
494 @properties.renderer
495 def NetLockUl(props):
496 lock = None
497 if props.hasProperty("ul_lock"):
498 lock = NetLocks[props["ul_lock"]]
499 if lock is not None:
500 return [lock.access('exclusive')]
501 else:
502 return []
503
504 @util.renderer
505 def TagPropertyValue(props):
506 if props.hasProperty("options"):
507 options = props.getProperty("options")
508 if type(options) is dict:
509 return options.get("tag")
510 return None
511
512 def IsTargetSelected(target):
513 def CheckTargetProperty(step):
514 try:
515 options = step.getProperty("options")
516 if type(options) is dict:
517 selected_target = options.get("target", "all")
518 if selected_target != "all" and selected_target != target:
519 return False
520 except KeyError:
521 pass
522
523 return True
524
525 return CheckTargetProperty
526
527 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
528 try:
529 seckey = base64.b64decode(seckey)
530 except:
531 return None
532
533 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
534 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
535
536
537 c['builders'] = []
538
539 dlLock = locks.WorkerLock("worker_dl")
540
541 checkBuiltin = re.sub('[\t\n ]+', ' ', """
542 checkBuiltin() {
543 local symbol op path file;
544 for file in $CHANGED_FILES; do
545 case "$file" in
546 package/*/*) : ;;
547 *) return 0 ;;
548 esac;
549 done;
550 while read symbol op path; do
551 case "$symbol" in package-*)
552 symbol="${symbol##*(}";
553 symbol="${symbol%)}";
554 for file in $CHANGED_FILES; do
555 case "$file" in "package/$path/"*)
556 grep -qsx "$symbol=y" .config && return 0
557 ;; esac;
558 done;
559 esac;
560 done < tmp/.packagedeps;
561 return 1;
562 }
563 """).strip()
564
565
566 class IfBuiltinShellCommand(ShellCommand):
567 def _quote(self, str):
568 if re.search("[^a-zA-Z0-9/_.-]", str):
569 return "'%s'" %(re.sub("'", "'\"'\"'", str))
570 return str
571
572 def setCommand(self, command):
573 if not isinstance(command, (str, unicode)):
574 command = ' '.join(map(self._quote, command))
575 self.command = [
576 '/bin/sh', '-c',
577 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
578 ]
579
580 def setupEnvironment(self, cmd):
581 workerEnv = self.workerEnvironment
582 if workerEnv is None:
583 workerEnv = { }
584 changedFiles = { }
585 for request in self.build.requests:
586 for source in request.sources:
587 for change in source.changes:
588 for file in change.files:
589 changedFiles[file] = True
590 fullSlaveEnv = workerEnv.copy()
591 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
592 cmd.args['env'] = fullSlaveEnv
593
594 workerNames = [ ]
595
596 for worker in c['workers']:
597 workerNames.append(worker.workername)
598
599 force_factory = BuildFactory()
600
601 c['builders'].append(BuilderConfig(
602 name = "00_force_build",
603 workernames = workerNames,
604 factory = force_factory))
605
606 for target in targets:
607 ts = target.split('/')
608
609 factory = BuildFactory()
610
611 # setup shared work directory if required
612 factory.addStep(ShellCommand(
613 name = "sharedwd",
614 description = "Setting up shared work directory",
615 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
616 workdir = ".",
617 haltOnFailure = True,
618 doStepIf = IsSharedWorkdir))
619
620 # find number of cores
621 factory.addStep(SetPropertyFromCommand(
622 name = "nproc",
623 property = "nproc",
624 description = "Finding number of CPUs",
625 command = ["nproc"]))
626
627 # find gcc and g++ compilers
628 factory.addStep(FileDownload(
629 name = "dlfindbinpl",
630 mastersrc = scripts_dir + '/findbin.pl',
631 workerdest = "../findbin.pl",
632 mode = 0o755))
633
634 factory.addStep(SetPropertyFromCommand(
635 name = "gcc",
636 property = "cc_command",
637 description = "Finding gcc command",
638 command = [
639 "../findbin.pl", "gcc",
640 cc_version[0] if cc_version is not None else '',
641 cc_version[1] if cc_version is not None else ''
642 ],
643 haltOnFailure = True))
644
645 factory.addStep(SetPropertyFromCommand(
646 name = "g++",
647 property = "cxx_command",
648 description = "Finding g++ command",
649 command = [
650 "../findbin.pl", "g++",
651 cc_version[0] if cc_version is not None else '',
652 cc_version[1] if cc_version is not None else ''
653 ],
654 haltOnFailure = True))
655
656 # see if ccache is available
657 factory.addStep(SetPropertyFromCommand(
658 property = "ccache_command",
659 command = ["which", "ccache"],
660 description = "Testing for ccache command",
661 haltOnFailure = False,
662 flunkOnFailure = False,
663 warnOnFailure = False,
664 ))
665
666 # expire tree if needed
667 if tree_expire > 0:
668 factory.addStep(FileDownload(
669 name = "dlexpiresh",
670 doStepIf = IsExpireRequested,
671 mastersrc = scripts_dir + '/expire.sh',
672 workerdest = "../expire.sh",
673 mode = 0o755))
674
675 factory.addStep(ShellCommand(
676 name = "expire",
677 description = "Checking for build tree expiry",
678 command = ["./expire.sh", str(tree_expire)],
679 workdir = ".",
680 haltOnFailure = True,
681 doStepIf = IsExpireRequested,
682 timeout = 2400))
683
684 # cleanup.sh if needed
685 factory.addStep(FileDownload(
686 name = "dlcleanupsh",
687 mastersrc = scripts_dir + '/cleanup.sh',
688 workerdest = "../cleanup.sh",
689 mode = 0o755,
690 doStepIf = IsCleanupRequested))
691
692 factory.addStep(ShellCommand(
693 name = "cleanold",
694 description = "Cleaning previous builds",
695 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
696 workdir = ".",
697 haltOnFailure = True,
698 doStepIf = IsCleanupRequested,
699 timeout = 2400))
700
701 factory.addStep(ShellCommand(
702 name = "cleanup",
703 description = "Cleaning work area",
704 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
705 workdir = ".",
706 haltOnFailure = True,
707 doStepIf = IsCleanupRequested,
708 timeout = 2400))
709
710 # user-requested clean targets
711 for tuple in CleanTargetMap:
712 factory.addStep(ShellCommand(
713 name = tuple[1],
714 description = 'User-requested "make %s"' % tuple[1],
715 command = ["make", tuple[1], "V=s"],
716 env = MakeEnv(),
717 doStepIf = IsMakeCleanRequested(tuple[0])
718 ))
719
720 # Workaround bug when switching from a checked out tag back to a branch
721 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
722 factory.addStep(ShellCommand(
723 name = "gitcheckout",
724 description = "Ensure that Git HEAD is sane",
725 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
726 haltOnFailure = True))
727
728 # check out the source
729 # Git() runs:
730 # if repo doesn't exist: 'git clone repourl'
731 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
732 # 'git fetch -t repourl branch; git reset --hard revision'
733 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
734 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
735 factory.addStep(Git(
736 name = "gitclean",
737 repourl = repo_url,
738 branch = repo_branch,
739 mode = 'full',
740 method = 'clean',
741 haltOnFailure = True,
742 doStepIf = IsGitCleanRequested,
743 ))
744
745 factory.addStep(Git(
746 name = "gitfresh",
747 repourl = repo_url,
748 branch = repo_branch,
749 mode = 'full',
750 method = 'fresh',
751 haltOnFailure = True,
752 doStepIf = IsGitFreshRequested,
753 ))
754
755 # update remote refs
756 factory.addStep(ShellCommand(
757 name = "fetchrefs",
758 description = "Fetching Git remote refs",
759 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
760 haltOnFailure = True
761 ))
762
763 # switch to tag
764 factory.addStep(ShellCommand(
765 name = "switchtag",
766 description = "Checking out Git tag",
767 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
768 haltOnFailure = True,
769 doStepIf = IsTaggingRequested
770 ))
771
772 # Verify that Git HEAD points to a tag or branch
773 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
774 factory.addStep(ShellCommand(
775 name = "gitverify",
776 description = "Ensure that Git HEAD is pointing to a branch or tag",
777 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
778 haltOnFailure = True))
779
780 factory.addStep(ShellCommand(
781 name = "rmtmp",
782 description = "Remove tmp folder",
783 command=["rm", "-rf", "tmp/"]))
784
785 # feed
786 # factory.addStep(ShellCommand(
787 # name = "feedsconf",
788 # description = "Copy the feeds.conf",
789 # command='''cp ~/feeds.conf ./feeds.conf''' ))
790
791 # feed
792 factory.addStep(ShellCommand(
793 name = "rmfeedlinks",
794 description = "Remove feed symlinks",
795 command=["rm", "-rf", "package/feeds/"]))
796
797 factory.addStep(StringDownload(
798 name = "ccachecc",
799 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
800 workerdest = "../ccache_cc.sh",
801 mode = 0o755,
802 ))
803
804 factory.addStep(StringDownload(
805 name = "ccachecxx",
806 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
807 workerdest = "../ccache_cxx.sh",
808 mode = 0o755,
809 ))
810
811 # Git SSH
812 if git_ssh:
813 factory.addStep(StringDownload(
814 name = "dlgitclonekey",
815 s = git_ssh_key,
816 workerdest = "../git-clone.key",
817 mode = 0o600,
818 ))
819
820 factory.addStep(ShellCommand(
821 name = "patchfeedsconf",
822 description = "Patching feeds.conf",
823 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
824 haltOnFailure = True
825 ))
826
827 # feed
828 factory.addStep(ShellCommand(
829 name = "updatefeeds",
830 description = "Updating feeds",
831 command=["./scripts/feeds", "update"],
832 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
833 haltOnFailure = True
834 ))
835
836 # Git SSH
837 if git_ssh:
838 factory.addStep(ShellCommand(
839 name = "rmfeedsconf",
840 description = "Removing feeds.conf",
841 command=["rm", "feeds.conf"],
842 haltOnFailure = True
843 ))
844
845 # feed
846 factory.addStep(ShellCommand(
847 name = "installfeeds",
848 description = "Installing feeds",
849 command=["./scripts/feeds", "install", "-a"],
850 env = MakeEnv(tryccache=True),
851 haltOnFailure = True
852 ))
853
854 # seed config
855 if config_seed is not None:
856 factory.addStep(StringDownload(
857 name = "dlconfigseed",
858 s = config_seed + '\n',
859 workerdest = ".config",
860 mode = 0o644
861 ))
862
863 # configure
864 factory.addStep(ShellCommand(
865 name = "newconfig",
866 description = "Seeding .config",
867 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
868 ))
869
870 factory.addStep(ShellCommand(
871 name = "delbin",
872 description = "Removing output directory",
873 command = ["rm", "-rf", "bin/"]
874 ))
875
876 factory.addStep(ShellCommand(
877 name = "defconfig",
878 description = "Populating .config",
879 command = ["make", "defconfig"],
880 env = MakeEnv()
881 ))
882
883 # check arch
884 factory.addStep(ShellCommand(
885 name = "checkarch",
886 description = "Checking architecture",
887 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
888 logEnviron = False,
889 want_stdout = False,
890 want_stderr = False,
891 haltOnFailure = True
892 ))
893
894 # find libc suffix
895 factory.addStep(SetPropertyFromCommand(
896 name = "libc",
897 property = "libc",
898 description = "Finding libc suffix",
899 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
900
901 # install build key
902 if usign_key is not None:
903 factory.addStep(StringDownload(
904 name = "dlkeybuildpub",
905 s = UsignSec2Pub(usign_key, usign_comment),
906 workerdest = "key-build.pub",
907 mode = 0o600,
908 ))
909
910 factory.addStep(StringDownload(
911 name = "dlkeybuild",
912 s = "# fake private key",
913 workerdest = "key-build",
914 mode = 0o600,
915 ))
916
917 factory.addStep(StringDownload(
918 name = "dlkeybuilducert",
919 s = "# fake certificate",
920 workerdest = "key-build.ucert",
921 mode = 0o600,
922 ))
923
924 # prepare dl
925 factory.addStep(ShellCommand(
926 name = "dldir",
927 description = "Preparing dl/",
928 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
929 logEnviron = False,
930 want_stdout = False
931 ))
932
933 # prepare tar
934 factory.addStep(ShellCommand(
935 name = "dltar",
936 description = "Building and installing GNU tar",
937 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
938 env = MakeEnv(tryccache=True),
939 haltOnFailure = True
940 ))
941
942 # populate dl
943 factory.addStep(ShellCommand(
944 name = "dlrun",
945 description = "Populating dl/",
946 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
947 env = MakeEnv(),
948 logEnviron = False,
949 locks = [dlLock.access('exclusive')],
950 ))
951
952 factory.addStep(ShellCommand(
953 name = "cleanbase",
954 description = "Cleaning base-files",
955 command=["make", "package/base-files/clean", "V=s"]
956 ))
957
958 # build
959 factory.addStep(ShellCommand(
960 name = "tools",
961 description = "Building and installing tools",
962 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
963 env = MakeEnv(tryccache=True),
964 haltOnFailure = True
965 ))
966
967 factory.addStep(ShellCommand(
968 name = "toolchain",
969 description = "Building and installing toolchain",
970 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
971 env = MakeEnv(),
972 haltOnFailure = True
973 ))
974
975 factory.addStep(ShellCommand(
976 name = "kmods",
977 description = "Building kmods",
978 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
979 env = MakeEnv(),
980 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
981 haltOnFailure = True
982 ))
983
984 # find kernel version
985 factory.addStep(SetPropertyFromCommand(
986 name = "kernelversion",
987 property = "kernelversion",
988 description = "Finding the effective Kernel version",
989 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
990 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
991 ))
992
993 factory.addStep(ShellCommand(
994 name = "pkgclean",
995 description = "Cleaning up package build",
996 command=["make", "package/cleanup", "V=s"]
997 ))
998
999 factory.addStep(ShellCommand(
1000 name = "pkgbuild",
1001 description = "Building packages",
1002 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1003 env = MakeEnv(),
1004 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1005 haltOnFailure = True
1006 ))
1007
1008 # factory.addStep(IfBuiltinShellCommand(
1009 factory.addStep(ShellCommand(
1010 name = "pkginstall",
1011 description = "Installing packages",
1012 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1013 env = MakeEnv(),
1014 haltOnFailure = True
1015 ))
1016
1017 factory.addStep(ShellCommand(
1018 name = "pkgindex",
1019 description = "Indexing packages",
1020 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1021 env = MakeEnv(),
1022 haltOnFailure = True
1023 ))
1024
1025 if enable_kmod_archive and embed_kmod_repository:
1026 # embed kmod repository. Must happen before 'images'
1027
1028 # find rootfs staging directory
1029 factory.addStep(SetPropertyFromCommand(
1030 name = "stageroot",
1031 property = "stageroot",
1032 description = "Finding the rootfs staging directory",
1033 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1034 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1035 want_stderr = False
1036 ))
1037
1038 factory.addStep(ShellCommand(
1039 name = "filesdir",
1040 description = "Creating file overlay directory",
1041 command=["mkdir", "-p", "files/etc/opkg"],
1042 haltOnFailure = True
1043 ))
1044
1045 factory.addStep(ShellCommand(
1046 name = "kmodconfig",
1047 description = "Embedding kmod repository configuration",
1048 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1049 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1050 haltOnFailure = True
1051 ))
1052
1053 #factory.addStep(IfBuiltinShellCommand(
1054 factory.addStep(ShellCommand(
1055 name = "images",
1056 description = "Building and installing images",
1057 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1058 env = MakeEnv(),
1059 haltOnFailure = True
1060 ))
1061
1062 factory.addStep(ShellCommand(
1063 name = "buildinfo",
1064 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1065 command = "make -j1 buildinfo V=s || true",
1066 env = MakeEnv(),
1067 haltOnFailure = True
1068 ))
1069
1070 factory.addStep(ShellCommand(
1071 name = "json_overview_image_info",
1072 description = "Generate profiles.json in target folder",
1073 command = "make -j1 json_overview_image_info V=s || true",
1074 env = MakeEnv(),
1075 haltOnFailure = True
1076 ))
1077
1078 factory.addStep(ShellCommand(
1079 name = "checksums",
1080 description = "Calculating checksums",
1081 command=["make", "-j1", "checksum", "V=s"],
1082 env = MakeEnv(),
1083 haltOnFailure = True
1084 ))
1085
1086 if enable_kmod_archive:
1087 factory.addStep(ShellCommand(
1088 name = "kmoddir",
1089 description = "Creating kmod directory",
1090 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1091 haltOnFailure = True
1092 ))
1093
1094 factory.addStep(ShellCommand(
1095 name = "kmodprepare",
1096 description = "Preparing kmod archive",
1097 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1098 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1099 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1100 haltOnFailure = True
1101 ))
1102
1103 factory.addStep(ShellCommand(
1104 name = "kmodindex",
1105 description = "Indexing kmod archive",
1106 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1107 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1108 env = MakeEnv(),
1109 haltOnFailure = True
1110 ))
1111
1112 # sign
1113 if ini.has_option("gpg", "key") or usign_key is not None:
1114 factory.addStep(MasterShellCommand(
1115 name = "signprepare",
1116 description = "Preparing temporary signing directory",
1117 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1118 haltOnFailure = True
1119 ))
1120
1121 factory.addStep(ShellCommand(
1122 name = "signpack",
1123 description = "Packing files to sign",
1124 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1125 haltOnFailure = True
1126 ))
1127
1128 factory.addStep(FileUpload(
1129 workersrc = "sign.tar.gz",
1130 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1131 haltOnFailure = True
1132 ))
1133
1134 factory.addStep(MasterShellCommand(
1135 name = "signfiles",
1136 description = "Signing files",
1137 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1138 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1139 haltOnFailure = True
1140 ))
1141
1142 factory.addStep(FileDownload(
1143 name = "dlsigntargz",
1144 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1145 workerdest = "sign.tar.gz",
1146 haltOnFailure = True
1147 ))
1148
1149 factory.addStep(ShellCommand(
1150 name = "signunpack",
1151 description = "Unpacking signed files",
1152 command = ["tar", "-xzf", "sign.tar.gz"],
1153 haltOnFailure = True
1154 ))
1155
1156 # upload
1157 factory.addStep(ShellCommand(
1158 name = "dirprepare",
1159 description = "Preparing upload directory structure",
1160 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1161 haltOnFailure = True
1162 ))
1163
1164 factory.addStep(ShellCommand(
1165 name = "linkprepare",
1166 description = "Preparing repository symlink",
1167 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1168 doStepIf = IsNoMasterBuild,
1169 haltOnFailure = True
1170 ))
1171
1172 if enable_kmod_archive:
1173 factory.addStep(ShellCommand(
1174 name = "kmoddirprepare",
1175 description = "Preparing kmod archive upload directory",
1176 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1177 haltOnFailure = True
1178 ))
1179
1180 factory.addStep(ShellCommand(
1181 name = "dirupload",
1182 description = "Uploading directory structure",
1183 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1184 env={'RSYNC_PASSWORD': rsync_bin_key},
1185 haltOnFailure = True,
1186 logEnviron = False,
1187 ))
1188
1189 # download remote sha256sums to 'target-sha256sums'
1190 factory.addStep(ShellCommand(
1191 name = "target-sha256sums",
1192 description = "Fetching remote sha256sums for target",
1193 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1194 env={'RSYNC_PASSWORD': rsync_bin_key},
1195 logEnviron = False,
1196 haltOnFailure = False,
1197 flunkOnFailure = False,
1198 warnOnFailure = False,
1199 ))
1200
1201 # build list of files to upload
1202 factory.addStep(FileDownload(
1203 name = "dlsha2rsyncpl",
1204 mastersrc = scripts_dir + '/sha2rsync.pl',
1205 workerdest = "../sha2rsync.pl",
1206 mode = 0o755,
1207 ))
1208
1209 factory.addStep(ShellCommand(
1210 name = "buildlist",
1211 description = "Building list of files to upload",
1212 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1213 haltOnFailure = True,
1214 ))
1215
1216 factory.addStep(FileDownload(
1217 name = "dlrsync.sh",
1218 mastersrc = scripts_dir + '/rsync.sh',
1219 workerdest = "../rsync.sh",
1220 mode = 0o755
1221 ))
1222
1223 # upload new files and update existing ones
1224 factory.addStep(ShellCommand(
1225 name = "targetupload",
1226 description = "Uploading target files",
1227 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1228 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1229 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1230 env={'RSYNC_PASSWORD': rsync_bin_key},
1231 haltOnFailure = True,
1232 logEnviron = False,
1233 ))
1234
1235 # delete files which don't exist locally
1236 factory.addStep(ShellCommand(
1237 name = "targetprune",
1238 description = "Pruning target files",
1239 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1240 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1241 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1242 env={'RSYNC_PASSWORD': rsync_bin_key},
1243 haltOnFailure = True,
1244 logEnviron = False,
1245 ))
1246
1247 if enable_kmod_archive:
1248 factory.addStep(ShellCommand(
1249 name = "kmodupload",
1250 description = "Uploading kmod archive",
1251 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1252 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1253 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1254 env={'RSYNC_PASSWORD': rsync_bin_key},
1255 haltOnFailure = True,
1256 logEnviron = False,
1257 ))
1258
1259 if rsync_src_url is not None:
1260 factory.addStep(ShellCommand(
1261 name = "sourcelist",
1262 description = "Finding source archives to upload",
1263 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1264 haltOnFailure = True
1265 ))
1266
1267 factory.addStep(ShellCommand(
1268 name = "sourceupload",
1269 description = "Uploading source archives",
1270 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1271 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1272 env={'RSYNC_PASSWORD': rsync_src_key},
1273 haltOnFailure = True,
1274 logEnviron = False,
1275 ))
1276
1277 if False:
1278 factory.addStep(ShellCommand(
1279 name = "packageupload",
1280 description = "Uploading package files",
1281 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1282 env={'RSYNC_PASSWORD': rsync_bin_key},
1283 haltOnFailure = False,
1284 logEnviron = False,
1285 ))
1286
1287 # logs
1288 if False:
1289 factory.addStep(ShellCommand(
1290 name = "upload",
1291 description = "Uploading logs",
1292 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1293 env={'RSYNC_PASSWORD': rsync_bin_key},
1294 haltOnFailure = False,
1295 alwaysRun = True,
1296 logEnviron = False,
1297 ))
1298
1299 factory.addStep(ShellCommand(
1300 name = "df",
1301 description = "Reporting disk usage",
1302 command=["df", "-h", "."],
1303 env={'LC_ALL': 'C'},
1304 haltOnFailure = False,
1305 alwaysRun = True
1306 ))
1307
1308 factory.addStep(ShellCommand(
1309 name = "ccachestat",
1310 description = "Reporting ccache stats",
1311 command=["ccache", "-s"],
1312 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1313 want_stderr = False,
1314 haltOnFailure = False,
1315 flunkOnFailure = False,
1316 warnOnFailure = False,
1317 alwaysRun = True,
1318 ))
1319
1320 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1321
1322 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1323 force_factory.addStep(steps.Trigger(
1324 name = "trigger_%s" % target,
1325 description = "Triggering %s build" % target,
1326 schedulerNames = [ "trigger_%s" % target ],
1327 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1328 doStepIf = IsTargetSelected(target)
1329 ))
1330
1331
1332 ####### STATUS TARGETS
1333
1334 # 'status' is a list of Status Targets. The results of each build will be
1335 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1336 # including web pages, email senders, and IRC bots.
1337
1338 if ini.has_option("phase1", "status_bind"):
1339 c['www'] = {
1340 'port': ini.get("phase1", "status_bind"),
1341 'plugins': {
1342 'waterfall_view': True,
1343 'console_view': True,
1344 'grid_view': True
1345 }
1346 }
1347
1348 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1349 c['www']['auth'] = util.UserPasswordAuth([
1350 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1351 ])
1352 c['www']['authz'] = util.Authz(
1353 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1354 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1355 )
1356
1357 c['services'] = []
1358 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1359 irc_host = ini.get("irc", "host")
1360 irc_port = 6667
1361 irc_chan = ini.get("irc", "channel")
1362 irc_nick = ini.get("irc", "nickname")
1363 irc_pass = None
1364
1365 if ini.has_option("irc", "port"):
1366 irc_port = ini.getint("irc", "port")
1367
1368 if ini.has_option("irc", "password"):
1369 irc_pass = ini.get("irc", "password")
1370
1371 irc = reporters.IRC(irc_host, irc_nick,
1372 port = irc_port,
1373 password = irc_pass,
1374 channels = [ irc_chan ],
1375 notify_events = [ 'exception', 'problem', 'recovery' ]
1376 )
1377
1378 c['services'].append(irc)
1379
1380 c['revlink'] = util.RevlinkMatch([
1381 r'https://git.openwrt.org/openwrt/(.*).git'
1382 ],
1383 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1384
1385 ####### DB URL
1386
1387 c['db'] = {
1388 # This specifies what database buildbot uses to store its state. You can leave
1389 # this at its default for all but the largest installations.
1390 'db_url' : "sqlite:///state.sqlite",
1391 }
1392
1393 c['buildbotNetUsageData'] = None