phase1,phase2: improve round robin builds
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
57
58 ####### PROJECT IDENTITY
59
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
63
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
66
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
71 # without some help.
72
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
74
75 ####### BUILDWORKERS
76
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
80
81 worker_port = 9989
82
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
123 # --master option)
124 c['protocols'] = {'pb': {'port': worker_port}}
125
126 # coalesce builds
127 c['collapseRequests'] = True
128
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
132 hour=6,
133 )]
134
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
142
143 @returns: datetime instance or None, via Deferred
144 """
145
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
149 [
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 ],
153 order=['-complete_at'], limit=1)
154 if not completed:
155 return
156
157 complete_at = completed[0]['complete_at']
158
159 last_build = yield bldr.master.data.get(
160 ('builds', ),
161 [
162 resultspec.Filter('builderid', 'eq', [bldrid]),
163 ],
164 order=['-started_at'], limit=1)
165
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
170
171 return complete_at
172
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
176 not skipped build.
177
178 @returns: list of sorted builders
179 """
180
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
183
184 def bldr_info(bldr):
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
187 return d
188
189 def bldr_sort(item):
190 (complete_at, bldr) = item
191
192 if not complete_at:
193 date = datetime.min
194 complete_at = date.replace(tzinfo=tzutc())
195
196 if is_building(bldr):
197 date = datetime.max
198 complete_at = date.replace(tzinfo=tzutc())
199
200 return (complete_at, bldr.name)
201
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
204
205 for r in results:
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207
208 return [r[1] for r in results]
209
210 c['prioritizeBuilders'] = prioritizeBuilders
211
212 ####### CHANGESOURCES
213
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = 0
217 other_builds = 0
218 cc_version = None
219
220 cc_command = "gcc"
221 cxx_command = "g++"
222
223 config_seed = ""
224
225 git_ssh = False
226 git_ssh_key = None
227
228 if ini.has_option("phase1", "expire"):
229 tree_expire = ini.getint("phase1", "expire")
230
231 if ini.has_option("phase1", "other_builds"):
232 other_builds = ini.getint("phase1", "other_builds")
233
234 if ini.has_option("phase1", "cc_version"):
235 cc_version = ini.get("phase1", "cc_version").split()
236 if len(cc_version) == 1:
237 cc_version = ["eq", cc_version[0]]
238
239 if ini.has_option("general", "git_ssh"):
240 git_ssh = ini.getboolean("general", "git_ssh")
241
242 if ini.has_option("general", "git_ssh_key"):
243 git_ssh_key = ini.get("general", "git_ssh_key")
244 else:
245 git_ssh = False
246
247 if ini.has_option("phase1", "config_seed"):
248 config_seed = ini.get("phase1", "config_seed")
249
250 repo_url = ini.get("repo", "url")
251 repo_branch = "master"
252
253 if ini.has_option("repo", "branch"):
254 repo_branch = ini.get("repo", "branch")
255
256 rsync_bin_url = ini.get("rsync", "binary_url")
257 rsync_bin_key = ini.get("rsync", "binary_password")
258 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
259
260 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
261 rsync_bin_defopts += ["--contimeout=20"]
262
263 rsync_src_url = None
264 rsync_src_key = None
265 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
266
267 if ini.has_option("rsync", "source_url"):
268 rsync_src_url = ini.get("rsync", "source_url")
269 rsync_src_key = ini.get("rsync", "source_password")
270
271 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
272 rsync_src_defopts += ["--contimeout=20"]
273
274 usign_key = None
275 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
276
277 if ini.has_option("usign", "key"):
278 usign_key = ini.get("usign", "key")
279
280 if ini.has_option("usign", "comment"):
281 usign_comment = ini.get("usign", "comment")
282
283 enable_kmod_archive = False
284 embed_kmod_repository = False
285
286 if ini.has_option("phase1", "kmod_archive"):
287 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
288
289 if ini.has_option("phase1", "kmod_repository"):
290 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
291
292
293 # find targets
294 targets = [ ]
295
296 if not os.path.isdir(work_dir+'/source.git'):
297 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
298 else:
299 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
300
301 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
302 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
303 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
304
305 while True:
306 line = findtargets.stdout.readline()
307 if not line:
308 break
309 ta = line.decode().strip().split(' ')
310 targets.append(ta[0])
311
312
313 # the 'change_source' setting tells the buildmaster how it should find out
314 # about source code changes. Here we point to the buildbot clone of pyflakes.
315
316 c['change_source'] = []
317 c['change_source'].append(GitPoller(
318 repo_url,
319 workdir=work_dir+'/work.git', branch=repo_branch,
320 pollinterval=300))
321
322 ####### SCHEDULERS
323
324 # Configure the Schedulers, which decide how to react to incoming changes. In this
325 # case, just kick off a 'basebuild' build
326
327 class TagChoiceParameter(BaseParameter):
328 spec_attributes = ["strict", "choices"]
329 type = "list"
330 strict = True
331
332 def __init__(self, name, label=None, **kw):
333 super().__init__(name, label, **kw)
334 self._choice_list = []
335
336 @property
337 def choices(self):
338 taglist = []
339 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
340
341 if basever:
342 findtags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
345
346 while True:
347 line = findtags.stdout.readline()
348
349 if not line:
350 break
351
352 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
353
354 if tagver and tagver[1].find(basever[1]) == 0:
355 taglist.append(tagver[1])
356
357 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
358 taglist.insert(0, '')
359
360 self._choice_list = taglist
361
362 return self._choice_list
363
364 def parse_from_arg(self, s):
365 if self.strict and s not in self._choice_list:
366 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
367 return s
368
369 c['schedulers'] = []
370 c['schedulers'].append(SingleBranchScheduler(
371 name = "all",
372 change_filter = filter.ChangeFilter(branch=repo_branch),
373 treeStableTimer = 60,
374 builderNames = targets))
375
376 c['schedulers'].append(ForceScheduler(
377 name = "force",
378 buttonName = "Force builds",
379 label = "Force build details",
380 builderNames = [ "00_force_build" ],
381
382 codebases = [
383 util.CodebaseParameter(
384 "",
385 label = "Repository",
386 branch = util.FixedParameter(name = "branch", default = ""),
387 revision = util.FixedParameter(name = "revision", default = ""),
388 repository = util.FixedParameter(name = "repository", default = ""),
389 project = util.FixedParameter(name = "project", default = "")
390 )
391 ],
392
393 reason = util.StringParameter(
394 name = "reason",
395 label = "Reason",
396 default = "Trigger build",
397 required = True,
398 size = 80
399 ),
400
401 properties = [
402 util.NestedParameter(
403 name="options",
404 label="Build Options",
405 layout="vertical",
406 fields=[
407 util.ChoiceStringParameter(
408 name = "target",
409 label = "Build target",
410 default = "all",
411 choices = [ "all" ] + targets
412 ),
413 TagChoiceParameter(
414 name = "tag",
415 label = "Build tag",
416 default = ""
417 )
418 ]
419 )
420 ]
421 ))
422
423 ####### BUILDERS
424
425 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
426 # what steps, and which workers can execute them. Note that any particular build will
427 # only take place on one worker.
428
429 CleanTargetMap = [
430 [ "tools", "tools/clean" ],
431 [ "chain", "toolchain/clean" ],
432 [ "linux", "target/linux/clean" ],
433 [ "dir", "dirclean" ],
434 [ "dist", "distclean" ]
435 ]
436
437 def IsMakeCleanRequested(pattern):
438 def CheckCleanProperty(step):
439 val = step.getProperty("clean")
440 if val and re.match(pattern, val):
441 return True
442 else:
443 return False
444
445 return CheckCleanProperty
446
447 def IsSharedWorkdir(step):
448 return bool(step.getProperty("shared_wd"))
449
450 def IsCleanupRequested(step):
451 if IsSharedWorkdir(step):
452 return False
453 do_cleanup = step.getProperty("do_cleanup")
454 if do_cleanup:
455 return True
456 else:
457 return False
458
459 def IsExpireRequested(step):
460 if IsSharedWorkdir(step):
461 return False
462 else:
463 return not IsCleanupRequested(step)
464
465 def IsGitFreshRequested(step):
466 do_cleanup = step.getProperty("do_cleanup")
467 if do_cleanup:
468 return True
469 else:
470 return False
471
472 def IsGitCleanRequested(step):
473 return not IsGitFreshRequested(step)
474
475 def IsTaggingRequested(step):
476 val = step.getProperty("tag")
477 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
478 return True
479 else:
480 return False
481
482 def IsNoTaggingRequested(step):
483 return not IsTaggingRequested(step)
484
485 def IsNoMasterBuild(step):
486 return repo_branch != "master"
487
488 def GetBaseVersion():
489 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
490 return repo_branch.split('-')[1]
491 else:
492 return "master"
493
494 @properties.renderer
495 def GetVersionPrefix(props):
496 basever = GetBaseVersion()
497 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
498 return "%s/" % props["tag"]
499 elif basever != "master":
500 return "%s-SNAPSHOT/" % basever
501 else:
502 return ""
503
504 @properties.renderer
505 def GetNumJobs(props):
506 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
507 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
508 else:
509 return "1"
510
511 @properties.renderer
512 def GetCC(props):
513 if props.hasProperty("cc_command"):
514 return props["cc_command"]
515 else:
516 return "gcc"
517
518 @properties.renderer
519 def GetCXX(props):
520 if props.hasProperty("cxx_command"):
521 return props["cxx_command"]
522 else:
523 return "g++"
524
525 @properties.renderer
526 def GetCwd(props):
527 if props.hasProperty("builddir"):
528 return props["builddir"]
529 elif props.hasProperty("workdir"):
530 return props["workdir"]
531 else:
532 return "/"
533
534 @properties.renderer
535 def GetCCache(props):
536 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
537 return props["ccache_command"]
538 else:
539 return ""
540
541 def GetNextBuild(builder, requests):
542 for r in requests:
543 if r.properties and r.properties.hasProperty("tag"):
544 return r
545
546 r = requests[0]
547 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
548 return r
549
550 def MakeEnv(overrides=None, tryccache=False):
551 env = {
552 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
553 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
554 }
555 if tryccache:
556 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
557 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
558 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
559 else:
560 env['CC'] = env['CCC']
561 env['CXX'] = env['CCXX']
562 env['CCACHE'] = ''
563 if overrides is not None:
564 env.update(overrides)
565 return env
566
567 @properties.renderer
568 def NetLockDl(props):
569 lock = None
570 if props.hasProperty("dl_lock"):
571 lock = NetLocks[props["dl_lock"]]
572 if lock is not None:
573 return [lock.access('exclusive')]
574 else:
575 return []
576
577 @properties.renderer
578 def NetLockUl(props):
579 lock = None
580 if props.hasProperty("ul_lock"):
581 lock = NetLocks[props["ul_lock"]]
582 if lock is not None:
583 return [lock.access('exclusive')]
584 else:
585 return []
586
587 @util.renderer
588 def TagPropertyValue(props):
589 if props.hasProperty("options"):
590 options = props.getProperty("options")
591 if type(options) is dict:
592 return options.get("tag")
593 return None
594
595 def IsTargetSelected(target):
596 def CheckTargetProperty(step):
597 try:
598 options = step.getProperty("options")
599 if type(options) is dict:
600 selected_target = options.get("target", "all")
601 if selected_target != "all" and selected_target != target:
602 return False
603 except KeyError:
604 pass
605
606 return True
607
608 return CheckTargetProperty
609
610 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
611 try:
612 seckey = base64.b64decode(seckey)
613 except:
614 return None
615
616 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
617 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
618
619
620 c['builders'] = []
621
622 dlLock = locks.WorkerLock("worker_dl")
623
624 checkBuiltin = re.sub('[\t\n ]+', ' ', """
625 checkBuiltin() {
626 local symbol op path file;
627 for file in $CHANGED_FILES; do
628 case "$file" in
629 package/*/*) : ;;
630 *) return 0 ;;
631 esac;
632 done;
633 while read symbol op path; do
634 case "$symbol" in package-*)
635 symbol="${symbol##*(}";
636 symbol="${symbol%)}";
637 for file in $CHANGED_FILES; do
638 case "$file" in "package/$path/"*)
639 grep -qsx "$symbol=y" .config && return 0
640 ;; esac;
641 done;
642 esac;
643 done < tmp/.packagedeps;
644 return 1;
645 }
646 """).strip()
647
648
649 class IfBuiltinShellCommand(ShellCommand):
650 def _quote(self, str):
651 if re.search("[^a-zA-Z0-9/_.-]", str):
652 return "'%s'" %(re.sub("'", "'\"'\"'", str))
653 return str
654
655 def setCommand(self, command):
656 if not isinstance(command, (str, unicode)):
657 command = ' '.join(map(self._quote, command))
658 self.command = [
659 '/bin/sh', '-c',
660 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
661 ]
662
663 def setupEnvironment(self, cmd):
664 workerEnv = self.workerEnvironment
665 if workerEnv is None:
666 workerEnv = { }
667 changedFiles = { }
668 for request in self.build.requests:
669 for source in request.sources:
670 for change in source.changes:
671 for file in change.files:
672 changedFiles[file] = True
673 fullSlaveEnv = workerEnv.copy()
674 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
675 cmd.args['env'] = fullSlaveEnv
676
677 workerNames = [ ]
678
679 for worker in c['workers']:
680 workerNames.append(worker.workername)
681
682 force_factory = BuildFactory()
683
684 c['builders'].append(BuilderConfig(
685 name = "00_force_build",
686 workernames = workerNames,
687 factory = force_factory))
688
689 for target in targets:
690 ts = target.split('/')
691
692 factory = BuildFactory()
693
694 # setup shared work directory if required
695 factory.addStep(ShellCommand(
696 name = "sharedwd",
697 description = "Setting up shared work directory",
698 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
699 workdir = ".",
700 haltOnFailure = True,
701 doStepIf = IsSharedWorkdir))
702
703 # find number of cores
704 factory.addStep(SetPropertyFromCommand(
705 name = "nproc",
706 property = "nproc",
707 description = "Finding number of CPUs",
708 command = ["nproc"]))
709
710 # find gcc and g++ compilers
711 factory.addStep(FileDownload(
712 name = "dlfindbinpl",
713 mastersrc = scripts_dir + '/findbin.pl',
714 workerdest = "../findbin.pl",
715 mode = 0o755))
716
717 factory.addStep(SetPropertyFromCommand(
718 name = "gcc",
719 property = "cc_command",
720 description = "Finding gcc command",
721 command = [
722 "../findbin.pl", "gcc",
723 cc_version[0] if cc_version is not None else '',
724 cc_version[1] if cc_version is not None else ''
725 ],
726 haltOnFailure = True))
727
728 factory.addStep(SetPropertyFromCommand(
729 name = "g++",
730 property = "cxx_command",
731 description = "Finding g++ command",
732 command = [
733 "../findbin.pl", "g++",
734 cc_version[0] if cc_version is not None else '',
735 cc_version[1] if cc_version is not None else ''
736 ],
737 haltOnFailure = True))
738
739 # see if ccache is available
740 factory.addStep(SetPropertyFromCommand(
741 property = "ccache_command",
742 command = ["which", "ccache"],
743 description = "Testing for ccache command",
744 haltOnFailure = False,
745 flunkOnFailure = False,
746 warnOnFailure = False,
747 ))
748
749 # expire tree if needed
750 if tree_expire > 0:
751 factory.addStep(FileDownload(
752 name = "dlexpiresh",
753 doStepIf = IsExpireRequested,
754 mastersrc = scripts_dir + '/expire.sh',
755 workerdest = "../expire.sh",
756 mode = 0o755))
757
758 factory.addStep(ShellCommand(
759 name = "expire",
760 description = "Checking for build tree expiry",
761 command = ["./expire.sh", str(tree_expire)],
762 workdir = ".",
763 haltOnFailure = True,
764 doStepIf = IsExpireRequested,
765 timeout = 2400))
766
767 # cleanup.sh if needed
768 factory.addStep(FileDownload(
769 name = "dlcleanupsh",
770 mastersrc = scripts_dir + '/cleanup.sh',
771 workerdest = "../cleanup.sh",
772 mode = 0o755,
773 doStepIf = IsCleanupRequested))
774
775 factory.addStep(ShellCommand(
776 name = "cleanold",
777 description = "Cleaning previous builds",
778 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
779 workdir = ".",
780 haltOnFailure = True,
781 doStepIf = IsCleanupRequested,
782 timeout = 2400))
783
784 factory.addStep(ShellCommand(
785 name = "cleanup",
786 description = "Cleaning work area",
787 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
788 workdir = ".",
789 haltOnFailure = True,
790 doStepIf = IsCleanupRequested,
791 timeout = 2400))
792
793 # user-requested clean targets
794 for tuple in CleanTargetMap:
795 factory.addStep(ShellCommand(
796 name = tuple[1],
797 description = 'User-requested "make %s"' % tuple[1],
798 command = ["make", tuple[1], "V=s"],
799 env = MakeEnv(),
800 doStepIf = IsMakeCleanRequested(tuple[0])
801 ))
802
803 # Workaround bug when switching from a checked out tag back to a branch
804 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
805 factory.addStep(ShellCommand(
806 name = "gitcheckout",
807 description = "Ensure that Git HEAD is sane",
808 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
809 haltOnFailure = True))
810
811 # check out the source
812 # Git() runs:
813 # if repo doesn't exist: 'git clone repourl'
814 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
815 # 'git fetch -t repourl branch; git reset --hard revision'
816 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
817 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
818 factory.addStep(Git(
819 name = "gitclean",
820 repourl = repo_url,
821 branch = repo_branch,
822 mode = 'full',
823 method = 'clean',
824 haltOnFailure = True,
825 doStepIf = IsGitCleanRequested,
826 ))
827
828 factory.addStep(Git(
829 name = "gitfresh",
830 repourl = repo_url,
831 branch = repo_branch,
832 mode = 'full',
833 method = 'fresh',
834 haltOnFailure = True,
835 doStepIf = IsGitFreshRequested,
836 ))
837
838 # update remote refs
839 factory.addStep(ShellCommand(
840 name = "fetchrefs",
841 description = "Fetching Git remote refs",
842 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
843 haltOnFailure = True
844 ))
845
846 # switch to tag
847 factory.addStep(ShellCommand(
848 name = "switchtag",
849 description = "Checking out Git tag",
850 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
851 haltOnFailure = True,
852 doStepIf = IsTaggingRequested
853 ))
854
855 # Verify that Git HEAD points to a tag or branch
856 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
857 factory.addStep(ShellCommand(
858 name = "gitverify",
859 description = "Ensure that Git HEAD is pointing to a branch or tag",
860 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
861 haltOnFailure = True))
862
863 factory.addStep(ShellCommand(
864 name = "rmtmp",
865 description = "Remove tmp folder",
866 command=["rm", "-rf", "tmp/"]))
867
868 # feed
869 # factory.addStep(ShellCommand(
870 # name = "feedsconf",
871 # description = "Copy the feeds.conf",
872 # command='''cp ~/feeds.conf ./feeds.conf''' ))
873
874 # feed
875 factory.addStep(ShellCommand(
876 name = "rmfeedlinks",
877 description = "Remove feed symlinks",
878 command=["rm", "-rf", "package/feeds/"]))
879
880 factory.addStep(StringDownload(
881 name = "ccachecc",
882 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
883 workerdest = "../ccache_cc.sh",
884 mode = 0o755,
885 ))
886
887 factory.addStep(StringDownload(
888 name = "ccachecxx",
889 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
890 workerdest = "../ccache_cxx.sh",
891 mode = 0o755,
892 ))
893
894 # Git SSH
895 if git_ssh:
896 factory.addStep(StringDownload(
897 name = "dlgitclonekey",
898 s = git_ssh_key,
899 workerdest = "../git-clone.key",
900 mode = 0o600,
901 ))
902
903 factory.addStep(ShellCommand(
904 name = "patchfeedsconf",
905 description = "Patching feeds.conf",
906 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
907 haltOnFailure = True
908 ))
909
910 # feed
911 factory.addStep(ShellCommand(
912 name = "updatefeeds",
913 description = "Updating feeds",
914 command=["./scripts/feeds", "update"],
915 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
916 haltOnFailure = True
917 ))
918
919 # Git SSH
920 if git_ssh:
921 factory.addStep(ShellCommand(
922 name = "rmfeedsconf",
923 description = "Removing feeds.conf",
924 command=["rm", "feeds.conf"],
925 haltOnFailure = True
926 ))
927
928 # feed
929 factory.addStep(ShellCommand(
930 name = "installfeeds",
931 description = "Installing feeds",
932 command=["./scripts/feeds", "install", "-a"],
933 env = MakeEnv(tryccache=True),
934 haltOnFailure = True
935 ))
936
937 # seed config
938 if config_seed is not None:
939 factory.addStep(StringDownload(
940 name = "dlconfigseed",
941 s = config_seed + '\n',
942 workerdest = ".config",
943 mode = 0o644
944 ))
945
946 # configure
947 factory.addStep(ShellCommand(
948 name = "newconfig",
949 description = "Seeding .config",
950 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
951 ))
952
953 factory.addStep(ShellCommand(
954 name = "delbin",
955 description = "Removing output directory",
956 command = ["rm", "-rf", "bin/"]
957 ))
958
959 factory.addStep(ShellCommand(
960 name = "defconfig",
961 description = "Populating .config",
962 command = ["make", "defconfig"],
963 env = MakeEnv()
964 ))
965
966 # check arch
967 factory.addStep(ShellCommand(
968 name = "checkarch",
969 description = "Checking architecture",
970 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
971 logEnviron = False,
972 want_stdout = False,
973 want_stderr = False,
974 haltOnFailure = True
975 ))
976
977 # find libc suffix
978 factory.addStep(SetPropertyFromCommand(
979 name = "libc",
980 property = "libc",
981 description = "Finding libc suffix",
982 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
983
984 # install build key
985 if usign_key is not None:
986 factory.addStep(StringDownload(
987 name = "dlkeybuildpub",
988 s = UsignSec2Pub(usign_key, usign_comment),
989 workerdest = "key-build.pub",
990 mode = 0o600,
991 ))
992
993 factory.addStep(StringDownload(
994 name = "dlkeybuild",
995 s = "# fake private key",
996 workerdest = "key-build",
997 mode = 0o600,
998 ))
999
1000 factory.addStep(StringDownload(
1001 name = "dlkeybuilducert",
1002 s = "# fake certificate",
1003 workerdest = "key-build.ucert",
1004 mode = 0o600,
1005 ))
1006
1007 # prepare dl
1008 factory.addStep(ShellCommand(
1009 name = "dldir",
1010 description = "Preparing dl/",
1011 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1012 logEnviron = False,
1013 want_stdout = False
1014 ))
1015
1016 # prepare tar
1017 factory.addStep(ShellCommand(
1018 name = "dltar",
1019 description = "Building and installing GNU tar",
1020 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1021 env = MakeEnv(tryccache=True),
1022 haltOnFailure = True
1023 ))
1024
1025 # populate dl
1026 factory.addStep(ShellCommand(
1027 name = "dlrun",
1028 description = "Populating dl/",
1029 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1030 env = MakeEnv(),
1031 logEnviron = False,
1032 locks = [dlLock.access('exclusive')],
1033 ))
1034
1035 factory.addStep(ShellCommand(
1036 name = "cleanbase",
1037 description = "Cleaning base-files",
1038 command=["make", "package/base-files/clean", "V=s"]
1039 ))
1040
1041 # build
1042 factory.addStep(ShellCommand(
1043 name = "tools",
1044 description = "Building and installing tools",
1045 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1046 env = MakeEnv(tryccache=True),
1047 haltOnFailure = True
1048 ))
1049
1050 factory.addStep(ShellCommand(
1051 name = "toolchain",
1052 description = "Building and installing toolchain",
1053 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1054 env = MakeEnv(),
1055 haltOnFailure = True
1056 ))
1057
1058 factory.addStep(ShellCommand(
1059 name = "kmods",
1060 description = "Building kmods",
1061 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1062 env = MakeEnv(),
1063 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1064 haltOnFailure = True
1065 ))
1066
1067 # find kernel version
1068 factory.addStep(SetPropertyFromCommand(
1069 name = "kernelversion",
1070 property = "kernelversion",
1071 description = "Finding the effective Kernel version",
1072 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1073 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1074 ))
1075
1076 factory.addStep(ShellCommand(
1077 name = "pkgclean",
1078 description = "Cleaning up package build",
1079 command=["make", "package/cleanup", "V=s"]
1080 ))
1081
1082 factory.addStep(ShellCommand(
1083 name = "pkgbuild",
1084 description = "Building packages",
1085 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1086 env = MakeEnv(),
1087 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1088 haltOnFailure = True
1089 ))
1090
1091 # factory.addStep(IfBuiltinShellCommand(
1092 factory.addStep(ShellCommand(
1093 name = "pkginstall",
1094 description = "Installing packages",
1095 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1096 env = MakeEnv(),
1097 haltOnFailure = True
1098 ))
1099
1100 factory.addStep(ShellCommand(
1101 name = "pkgindex",
1102 description = "Indexing packages",
1103 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1104 env = MakeEnv(),
1105 haltOnFailure = True
1106 ))
1107
1108 if enable_kmod_archive and embed_kmod_repository:
1109 # embed kmod repository. Must happen before 'images'
1110
1111 # find rootfs staging directory
1112 factory.addStep(SetPropertyFromCommand(
1113 name = "stageroot",
1114 property = "stageroot",
1115 description = "Finding the rootfs staging directory",
1116 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1117 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1118 want_stderr = False
1119 ))
1120
1121 factory.addStep(ShellCommand(
1122 name = "filesdir",
1123 description = "Creating file overlay directory",
1124 command=["mkdir", "-p", "files/etc/opkg"],
1125 haltOnFailure = True
1126 ))
1127
1128 factory.addStep(ShellCommand(
1129 name = "kmodconfig",
1130 description = "Embedding kmod repository configuration",
1131 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1132 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1133 haltOnFailure = True
1134 ))
1135
1136 #factory.addStep(IfBuiltinShellCommand(
1137 factory.addStep(ShellCommand(
1138 name = "images",
1139 description = "Building and installing images",
1140 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1141 env = MakeEnv(),
1142 haltOnFailure = True
1143 ))
1144
1145 factory.addStep(ShellCommand(
1146 name = "buildinfo",
1147 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1148 command = "make -j1 buildinfo V=s || true",
1149 env = MakeEnv(),
1150 haltOnFailure = True
1151 ))
1152
1153 factory.addStep(ShellCommand(
1154 name = "json_overview_image_info",
1155 description = "Generate profiles.json in target folder",
1156 command = "make -j1 json_overview_image_info V=s || true",
1157 env = MakeEnv(),
1158 haltOnFailure = True
1159 ))
1160
1161 factory.addStep(ShellCommand(
1162 name = "checksums",
1163 description = "Calculating checksums",
1164 command=["make", "-j1", "checksum", "V=s"],
1165 env = MakeEnv(),
1166 haltOnFailure = True
1167 ))
1168
1169 if enable_kmod_archive:
1170 factory.addStep(ShellCommand(
1171 name = "kmoddir",
1172 description = "Creating kmod directory",
1173 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1174 haltOnFailure = True
1175 ))
1176
1177 factory.addStep(ShellCommand(
1178 name = "kmodprepare",
1179 description = "Preparing kmod archive",
1180 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1181 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1182 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1183 haltOnFailure = True
1184 ))
1185
1186 factory.addStep(ShellCommand(
1187 name = "kmodindex",
1188 description = "Indexing kmod archive",
1189 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1190 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1191 env = MakeEnv(),
1192 haltOnFailure = True
1193 ))
1194
1195 # sign
1196 if ini.has_option("gpg", "key") or usign_key is not None:
1197 factory.addStep(MasterShellCommand(
1198 name = "signprepare",
1199 description = "Preparing temporary signing directory",
1200 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1201 haltOnFailure = True
1202 ))
1203
1204 factory.addStep(ShellCommand(
1205 name = "signpack",
1206 description = "Packing files to sign",
1207 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1208 haltOnFailure = True
1209 ))
1210
1211 factory.addStep(FileUpload(
1212 workersrc = "sign.tar.gz",
1213 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1214 haltOnFailure = True
1215 ))
1216
1217 factory.addStep(MasterShellCommand(
1218 name = "signfiles",
1219 description = "Signing files",
1220 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1221 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1222 haltOnFailure = True
1223 ))
1224
1225 factory.addStep(FileDownload(
1226 name = "dlsigntargz",
1227 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1228 workerdest = "sign.tar.gz",
1229 haltOnFailure = True
1230 ))
1231
1232 factory.addStep(ShellCommand(
1233 name = "signunpack",
1234 description = "Unpacking signed files",
1235 command = ["tar", "-xzf", "sign.tar.gz"],
1236 haltOnFailure = True
1237 ))
1238
1239 # upload
1240 factory.addStep(ShellCommand(
1241 name = "dirprepare",
1242 description = "Preparing upload directory structure",
1243 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1244 haltOnFailure = True
1245 ))
1246
1247 factory.addStep(ShellCommand(
1248 name = "linkprepare",
1249 description = "Preparing repository symlink",
1250 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1251 doStepIf = IsNoMasterBuild,
1252 haltOnFailure = True
1253 ))
1254
1255 if enable_kmod_archive:
1256 factory.addStep(ShellCommand(
1257 name = "kmoddirprepare",
1258 description = "Preparing kmod archive upload directory",
1259 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1260 haltOnFailure = True
1261 ))
1262
1263 factory.addStep(ShellCommand(
1264 name = "dirupload",
1265 description = "Uploading directory structure",
1266 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1267 env={'RSYNC_PASSWORD': rsync_bin_key},
1268 haltOnFailure = True,
1269 logEnviron = False,
1270 ))
1271
1272 # download remote sha256sums to 'target-sha256sums'
1273 factory.addStep(ShellCommand(
1274 name = "target-sha256sums",
1275 description = "Fetching remote sha256sums for target",
1276 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1277 env={'RSYNC_PASSWORD': rsync_bin_key},
1278 logEnviron = False,
1279 haltOnFailure = False,
1280 flunkOnFailure = False,
1281 warnOnFailure = False,
1282 ))
1283
1284 # build list of files to upload
1285 factory.addStep(FileDownload(
1286 name = "dlsha2rsyncpl",
1287 mastersrc = scripts_dir + '/sha2rsync.pl',
1288 workerdest = "../sha2rsync.pl",
1289 mode = 0o755,
1290 ))
1291
1292 factory.addStep(ShellCommand(
1293 name = "buildlist",
1294 description = "Building list of files to upload",
1295 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1296 haltOnFailure = True,
1297 ))
1298
1299 factory.addStep(FileDownload(
1300 name = "dlrsync.sh",
1301 mastersrc = scripts_dir + '/rsync.sh',
1302 workerdest = "../rsync.sh",
1303 mode = 0o755
1304 ))
1305
1306 # upload new files and update existing ones
1307 factory.addStep(ShellCommand(
1308 name = "targetupload",
1309 description = "Uploading target files",
1310 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1311 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1312 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1313 env={'RSYNC_PASSWORD': rsync_bin_key},
1314 haltOnFailure = True,
1315 logEnviron = False,
1316 ))
1317
1318 # delete files which don't exist locally
1319 factory.addStep(ShellCommand(
1320 name = "targetprune",
1321 description = "Pruning target files",
1322 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1323 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1324 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1325 env={'RSYNC_PASSWORD': rsync_bin_key},
1326 haltOnFailure = True,
1327 logEnviron = False,
1328 ))
1329
1330 if enable_kmod_archive:
1331 factory.addStep(ShellCommand(
1332 name = "kmodupload",
1333 description = "Uploading kmod archive",
1334 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1335 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1336 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1337 env={'RSYNC_PASSWORD': rsync_bin_key},
1338 haltOnFailure = True,
1339 logEnviron = False,
1340 ))
1341
1342 if rsync_src_url is not None:
1343 factory.addStep(ShellCommand(
1344 name = "sourcelist",
1345 description = "Finding source archives to upload",
1346 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1347 haltOnFailure = True
1348 ))
1349
1350 factory.addStep(ShellCommand(
1351 name = "sourceupload",
1352 description = "Uploading source archives",
1353 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1354 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1355 env={'RSYNC_PASSWORD': rsync_src_key},
1356 haltOnFailure = True,
1357 logEnviron = False,
1358 ))
1359
1360 if False:
1361 factory.addStep(ShellCommand(
1362 name = "packageupload",
1363 description = "Uploading package files",
1364 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1365 env={'RSYNC_PASSWORD': rsync_bin_key},
1366 haltOnFailure = False,
1367 flunkOnFailure = False,
1368 warnOnFailure = True,
1369 logEnviron = False,
1370 ))
1371
1372 # logs
1373 if False:
1374 factory.addStep(ShellCommand(
1375 name = "upload",
1376 description = "Uploading logs",
1377 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1378 env={'RSYNC_PASSWORD': rsync_bin_key},
1379 haltOnFailure = False,
1380 flunkOnFailure = False,
1381 warnOnFailure = True,
1382 alwaysRun = True,
1383 logEnviron = False,
1384 ))
1385
1386 factory.addStep(ShellCommand(
1387 name = "df",
1388 description = "Reporting disk usage",
1389 command=["df", "-h", "."],
1390 env={'LC_ALL': 'C'},
1391 haltOnFailure = False,
1392 flunkOnFailure = False,
1393 warnOnFailure = False,
1394 alwaysRun = True
1395 ))
1396
1397 factory.addStep(ShellCommand(
1398 name = "du",
1399 description = "Reporting estimated file space usage",
1400 command=["du", "-sh", "."],
1401 env={'LC_ALL': 'C'},
1402 haltOnFailure = False,
1403 flunkOnFailure = False,
1404 warnOnFailure = False,
1405 alwaysRun = True
1406 ))
1407
1408 factory.addStep(ShellCommand(
1409 name = "ccachestat",
1410 description = "Reporting ccache stats",
1411 command=["ccache", "-s"],
1412 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1413 want_stderr = False,
1414 haltOnFailure = False,
1415 flunkOnFailure = False,
1416 warnOnFailure = False,
1417 alwaysRun = True,
1418 ))
1419
1420 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1421
1422 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1423 force_factory.addStep(steps.Trigger(
1424 name = "trigger_%s" % target,
1425 description = "Triggering %s build" % target,
1426 schedulerNames = [ "trigger_%s" % target ],
1427 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1428 doStepIf = IsTargetSelected(target)
1429 ))
1430
1431
1432 ####### STATUS TARGETS
1433
1434 # 'status' is a list of Status Targets. The results of each build will be
1435 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1436 # including web pages, email senders, and IRC bots.
1437
1438 if ini.has_option("phase1", "status_bind"):
1439 c['www'] = {
1440 'port': ini.get("phase1", "status_bind"),
1441 'plugins': {
1442 'waterfall_view': True,
1443 'console_view': True,
1444 'grid_view': True
1445 }
1446 }
1447
1448 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1449 c['www']['auth'] = util.UserPasswordAuth([
1450 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1451 ])
1452 c['www']['authz'] = util.Authz(
1453 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1454 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1455 )
1456
1457 c['services'] = []
1458 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1459 irc_host = ini.get("irc", "host")
1460 irc_port = 6667
1461 irc_chan = ini.get("irc", "channel")
1462 irc_nick = ini.get("irc", "nickname")
1463 irc_pass = None
1464
1465 if ini.has_option("irc", "port"):
1466 irc_port = ini.getint("irc", "port")
1467
1468 if ini.has_option("irc", "password"):
1469 irc_pass = ini.get("irc", "password")
1470
1471 irc = reporters.IRC(irc_host, irc_nick,
1472 port = irc_port,
1473 password = irc_pass,
1474 channels = [ irc_chan ],
1475 notify_events = [ 'exception', 'problem', 'recovery' ]
1476 )
1477
1478 c['services'].append(irc)
1479
1480 c['revlink'] = util.RevlinkMatch([
1481 r'https://git.openwrt.org/openwrt/(.*).git'
1482 ],
1483 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1484
1485 ####### DB URL
1486
1487 c['db'] = {
1488 # This specifies what database buildbot uses to store its state. You can leave
1489 # this at its default for all but the largest installations.
1490 'db_url' : "sqlite:///state.sqlite",
1491 }
1492
1493 c['buildbotNetUsageData'] = None