phase1,phase2: s/master/main for phase{1,2}
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
57
58 ####### PROJECT IDENTITY
59
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
63
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
66
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
71 # without some help.
72
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
74
75 ####### BUILDWORKERS
76
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
80
81 worker_port = 9989
82
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
123 # --master option)
124 c['protocols'] = {'pb': {'port': worker_port}}
125
126 # coalesce builds
127 c['collapseRequests'] = True
128
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
132 hour=6,
133 )]
134
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
142
143 @returns: datetime instance or None, via Deferred
144 """
145
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
149 [
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 ],
153 order=['-complete_at'], limit=1)
154 if not completed:
155 return
156
157 return completed[0]['complete_at']
158
159 @defer.inlineCallbacks
160 def prioritizeBuilders(master, builders):
161 """Returns sorted list of builders by their last timestamp of completed and
162 not skipped build.
163
164 @returns: list of sorted builders
165 """
166
167 def is_building(bldr):
168 return bool(bldr.building) or bool(bldr.old_building)
169
170 def bldr_info(bldr):
171 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
172 d.addCallback(lambda complete_at: (complete_at, bldr))
173 return d
174
175 def bldr_sort(item):
176 (complete_at, bldr) = item
177
178 if not complete_at:
179 date = datetime.min
180 complete_at = date.replace(tzinfo=tzutc())
181
182 if is_building(bldr):
183 date = datetime.max
184 complete_at = date.replace(tzinfo=tzutc())
185
186 return (complete_at, bldr.name)
187
188 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
189 results.sort(key=bldr_sort)
190
191 for r in results:
192 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
193
194 return [r[1] for r in results]
195
196 c['prioritizeBuilders'] = prioritizeBuilders
197
198 ####### CHANGESOURCES
199
200 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
201 scripts_dir = os.path.abspath("../scripts")
202 tree_expire = 0
203 other_builds = 0
204 cc_version = None
205
206 cc_command = "gcc"
207 cxx_command = "g++"
208
209 config_seed = ""
210
211 git_ssh = False
212 git_ssh_key = None
213
214 if ini.has_option("phase1", "expire"):
215 tree_expire = ini.getint("phase1", "expire")
216
217 if ini.has_option("phase1", "other_builds"):
218 other_builds = ini.getint("phase1", "other_builds")
219
220 if ini.has_option("phase1", "cc_version"):
221 cc_version = ini.get("phase1", "cc_version").split()
222 if len(cc_version) == 1:
223 cc_version = ["eq", cc_version[0]]
224
225 if ini.has_option("general", "git_ssh"):
226 git_ssh = ini.getboolean("general", "git_ssh")
227
228 if ini.has_option("general", "git_ssh_key"):
229 git_ssh_key = ini.get("general", "git_ssh_key")
230 else:
231 git_ssh = False
232
233 if ini.has_option("phase1", "config_seed"):
234 config_seed = ini.get("phase1", "config_seed")
235
236 repo_url = ini.get("repo", "url")
237 repo_branch = "master"
238
239 if ini.has_option("repo", "branch"):
240 repo_branch = ini.get("repo", "branch")
241
242 rsync_bin_url = ini.get("rsync", "binary_url")
243 rsync_bin_key = ini.get("rsync", "binary_password")
244 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
245
246 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
247 rsync_bin_defopts += ["--contimeout=20"]
248
249 rsync_src_url = None
250 rsync_src_key = None
251 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
252
253 if ini.has_option("rsync", "source_url"):
254 rsync_src_url = ini.get("rsync", "source_url")
255 rsync_src_key = ini.get("rsync", "source_password")
256
257 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
258 rsync_src_defopts += ["--contimeout=20"]
259
260 usign_key = None
261 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
262
263 if ini.has_option("usign", "key"):
264 usign_key = ini.get("usign", "key")
265
266 if ini.has_option("usign", "comment"):
267 usign_comment = ini.get("usign", "comment")
268
269 enable_kmod_archive = False
270 embed_kmod_repository = False
271
272 if ini.has_option("phase1", "kmod_archive"):
273 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
274
275 if ini.has_option("phase1", "kmod_repository"):
276 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
277
278
279 # find targets
280 targets = [ ]
281
282 if not os.path.isdir(work_dir+'/source.git'):
283 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
284 else:
285 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
286
287 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
288 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
289 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
290
291 while True:
292 line = findtargets.stdout.readline()
293 if not line:
294 break
295 ta = line.decode().strip().split(' ')
296 targets.append(ta[0])
297
298
299 # the 'change_source' setting tells the buildmaster how it should find out
300 # about source code changes. Here we point to the buildbot clone of pyflakes.
301
302 c['change_source'] = []
303 c['change_source'].append(GitPoller(
304 repo_url,
305 workdir=work_dir+'/work.git', branch=repo_branch,
306 pollinterval=300))
307
308 ####### SCHEDULERS
309
310 # Configure the Schedulers, which decide how to react to incoming changes. In this
311 # case, just kick off a 'basebuild' build
312
313 class TagChoiceParameter(BaseParameter):
314 spec_attributes = ["strict", "choices"]
315 type = "list"
316 strict = True
317
318 def __init__(self, name, label=None, **kw):
319 super().__init__(name, label, **kw)
320 self._choice_list = []
321
322 @property
323 def choices(self):
324 taglist = []
325 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
326
327 if basever:
328 findtags = subprocess.Popen(
329 ['git', 'ls-remote', '--tags', repo_url],
330 stdout = subprocess.PIPE)
331
332 while True:
333 line = findtags.stdout.readline()
334
335 if not line:
336 break
337
338 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
339
340 if tagver and tagver[1].find(basever[1]) == 0:
341 taglist.append(tagver[1])
342
343 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
344 taglist.insert(0, '')
345
346 self._choice_list = taglist
347
348 return self._choice_list
349
350 def parse_from_arg(self, s):
351 if self.strict and s not in self._choice_list:
352 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
353 return s
354
355 c['schedulers'] = []
356 c['schedulers'].append(SingleBranchScheduler(
357 name = "all",
358 change_filter = filter.ChangeFilter(branch=repo_branch),
359 treeStableTimer = 60,
360 builderNames = targets))
361
362 c['schedulers'].append(ForceScheduler(
363 name = "force",
364 buttonName = "Force builds",
365 label = "Force build details",
366 builderNames = [ "00_force_build" ],
367
368 codebases = [
369 util.CodebaseParameter(
370 "",
371 label = "Repository",
372 branch = util.FixedParameter(name = "branch", default = ""),
373 revision = util.FixedParameter(name = "revision", default = ""),
374 repository = util.FixedParameter(name = "repository", default = ""),
375 project = util.FixedParameter(name = "project", default = "")
376 )
377 ],
378
379 reason = util.StringParameter(
380 name = "reason",
381 label = "Reason",
382 default = "Trigger build",
383 required = True,
384 size = 80
385 ),
386
387 properties = [
388 util.NestedParameter(
389 name="options",
390 label="Build Options",
391 layout="vertical",
392 fields=[
393 util.ChoiceStringParameter(
394 name = "target",
395 label = "Build target",
396 default = "all",
397 choices = [ "all" ] + targets
398 ),
399 TagChoiceParameter(
400 name = "tag",
401 label = "Build tag",
402 default = ""
403 )
404 ]
405 )
406 ]
407 ))
408
409 ####### BUILDERS
410
411 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
412 # what steps, and which workers can execute them. Note that any particular build will
413 # only take place on one worker.
414
415 CleanTargetMap = [
416 [ "tools", "tools/clean" ],
417 [ "chain", "toolchain/clean" ],
418 [ "linux", "target/linux/clean" ],
419 [ "dir", "dirclean" ],
420 [ "dist", "distclean" ]
421 ]
422
423 def IsMakeCleanRequested(pattern):
424 def CheckCleanProperty(step):
425 val = step.getProperty("clean")
426 if val and re.match(pattern, val):
427 return True
428 else:
429 return False
430
431 return CheckCleanProperty
432
433 def IsSharedWorkdir(step):
434 return bool(step.getProperty("shared_wd"))
435
436 def IsCleanupRequested(step):
437 if IsSharedWorkdir(step):
438 return False
439 do_cleanup = step.getProperty("do_cleanup")
440 if do_cleanup:
441 return True
442 else:
443 return False
444
445 def IsExpireRequested(step):
446 if IsSharedWorkdir(step):
447 return False
448 else:
449 return not IsCleanupRequested(step)
450
451 def IsGitFreshRequested(step):
452 do_cleanup = step.getProperty("do_cleanup")
453 if do_cleanup:
454 return True
455 else:
456 return False
457
458 def IsGitCleanRequested(step):
459 return not IsGitFreshRequested(step)
460
461 def IsTaggingRequested(step):
462 val = step.getProperty("tag")
463 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
464 return True
465 else:
466 return False
467
468 def IsNoTaggingRequested(step):
469 return not IsTaggingRequested(step)
470
471 def IsNoMasterBuild(step):
472 return repo_branch != "master"
473
474 def GetBaseVersion():
475 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
476 return repo_branch.split('-')[1]
477 else:
478 return "master"
479
480 @properties.renderer
481 def GetVersionPrefix(props):
482 basever = GetBaseVersion()
483 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
484 return "%s/" % props["tag"]
485 elif basever != "master":
486 return "%s-SNAPSHOT/" % basever
487 else:
488 return ""
489
490 @properties.renderer
491 def GetNumJobs(props):
492 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
493 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
494 else:
495 return "1"
496
497 @properties.renderer
498 def GetCC(props):
499 if props.hasProperty("cc_command"):
500 return props["cc_command"]
501 else:
502 return "gcc"
503
504 @properties.renderer
505 def GetCXX(props):
506 if props.hasProperty("cxx_command"):
507 return props["cxx_command"]
508 else:
509 return "g++"
510
511 @properties.renderer
512 def GetCwd(props):
513 if props.hasProperty("builddir"):
514 return props["builddir"]
515 elif props.hasProperty("workdir"):
516 return props["workdir"]
517 else:
518 return "/"
519
520 @properties.renderer
521 def GetCCache(props):
522 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
523 return props["ccache_command"]
524 else:
525 return ""
526
527 def GetNextBuild(builder, requests):
528 for r in requests:
529 if r.properties and r.properties.hasProperty("tag"):
530 return r
531
532 r = requests[0]
533 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
534 return r
535
536 def MakeEnv(overrides=None, tryccache=False):
537 env = {
538 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
539 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
540 }
541 if tryccache:
542 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
543 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
544 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
545 else:
546 env['CC'] = env['CCC']
547 env['CXX'] = env['CCXX']
548 env['CCACHE'] = ''
549 if overrides is not None:
550 env.update(overrides)
551 return env
552
553 @properties.renderer
554 def NetLockDl(props):
555 lock = None
556 if props.hasProperty("dl_lock"):
557 lock = NetLocks[props["dl_lock"]]
558 if lock is not None:
559 return [lock.access('exclusive')]
560 else:
561 return []
562
563 @properties.renderer
564 def NetLockUl(props):
565 lock = None
566 if props.hasProperty("ul_lock"):
567 lock = NetLocks[props["ul_lock"]]
568 if lock is not None:
569 return [lock.access('exclusive')]
570 else:
571 return []
572
573 @util.renderer
574 def TagPropertyValue(props):
575 if props.hasProperty("options"):
576 options = props.getProperty("options")
577 if type(options) is dict:
578 return options.get("tag")
579 return None
580
581 def IsTargetSelected(target):
582 def CheckTargetProperty(step):
583 try:
584 options = step.getProperty("options")
585 if type(options) is dict:
586 selected_target = options.get("target", "all")
587 if selected_target != "all" and selected_target != target:
588 return False
589 except KeyError:
590 pass
591
592 return True
593
594 return CheckTargetProperty
595
596 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
597 try:
598 seckey = base64.b64decode(seckey)
599 except:
600 return None
601
602 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
603 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
604
605
606 c['builders'] = []
607
608 dlLock = locks.WorkerLock("worker_dl")
609
610 checkBuiltin = re.sub('[\t\n ]+', ' ', """
611 checkBuiltin() {
612 local symbol op path file;
613 for file in $CHANGED_FILES; do
614 case "$file" in
615 package/*/*) : ;;
616 *) return 0 ;;
617 esac;
618 done;
619 while read symbol op path; do
620 case "$symbol" in package-*)
621 symbol="${symbol##*(}";
622 symbol="${symbol%)}";
623 for file in $CHANGED_FILES; do
624 case "$file" in "package/$path/"*)
625 grep -qsx "$symbol=y" .config && return 0
626 ;; esac;
627 done;
628 esac;
629 done < tmp/.packagedeps;
630 return 1;
631 }
632 """).strip()
633
634
635 class IfBuiltinShellCommand(ShellCommand):
636 def _quote(self, str):
637 if re.search("[^a-zA-Z0-9/_.-]", str):
638 return "'%s'" %(re.sub("'", "'\"'\"'", str))
639 return str
640
641 def setCommand(self, command):
642 if not isinstance(command, (str, unicode)):
643 command = ' '.join(map(self._quote, command))
644 self.command = [
645 '/bin/sh', '-c',
646 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
647 ]
648
649 def setupEnvironment(self, cmd):
650 workerEnv = self.workerEnvironment
651 if workerEnv is None:
652 workerEnv = { }
653 changedFiles = { }
654 for request in self.build.requests:
655 for source in request.sources:
656 for change in source.changes:
657 for file in change.files:
658 changedFiles[file] = True
659 fullSlaveEnv = workerEnv.copy()
660 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
661 cmd.args['env'] = fullSlaveEnv
662
663 workerNames = [ ]
664
665 for worker in c['workers']:
666 workerNames.append(worker.workername)
667
668 force_factory = BuildFactory()
669
670 c['builders'].append(BuilderConfig(
671 name = "00_force_build",
672 workernames = workerNames,
673 factory = force_factory))
674
675 for target in targets:
676 ts = target.split('/')
677
678 factory = BuildFactory()
679
680 # setup shared work directory if required
681 factory.addStep(ShellCommand(
682 name = "sharedwd",
683 description = "Setting up shared work directory",
684 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
685 workdir = ".",
686 haltOnFailure = True,
687 doStepIf = IsSharedWorkdir))
688
689 # find number of cores
690 factory.addStep(SetPropertyFromCommand(
691 name = "nproc",
692 property = "nproc",
693 description = "Finding number of CPUs",
694 command = ["nproc"]))
695
696 # find gcc and g++ compilers
697 factory.addStep(FileDownload(
698 name = "dlfindbinpl",
699 mastersrc = scripts_dir + '/findbin.pl',
700 workerdest = "../findbin.pl",
701 mode = 0o755))
702
703 factory.addStep(SetPropertyFromCommand(
704 name = "gcc",
705 property = "cc_command",
706 description = "Finding gcc command",
707 command = [
708 "../findbin.pl", "gcc",
709 cc_version[0] if cc_version is not None else '',
710 cc_version[1] if cc_version is not None else ''
711 ],
712 haltOnFailure = True))
713
714 factory.addStep(SetPropertyFromCommand(
715 name = "g++",
716 property = "cxx_command",
717 description = "Finding g++ command",
718 command = [
719 "../findbin.pl", "g++",
720 cc_version[0] if cc_version is not None else '',
721 cc_version[1] if cc_version is not None else ''
722 ],
723 haltOnFailure = True))
724
725 # see if ccache is available
726 factory.addStep(SetPropertyFromCommand(
727 property = "ccache_command",
728 command = ["which", "ccache"],
729 description = "Testing for ccache command",
730 haltOnFailure = False,
731 flunkOnFailure = False,
732 warnOnFailure = False,
733 ))
734
735 # expire tree if needed
736 if tree_expire > 0:
737 factory.addStep(FileDownload(
738 name = "dlexpiresh",
739 doStepIf = IsExpireRequested,
740 mastersrc = scripts_dir + '/expire.sh',
741 workerdest = "../expire.sh",
742 mode = 0o755))
743
744 factory.addStep(ShellCommand(
745 name = "expire",
746 description = "Checking for build tree expiry",
747 command = ["./expire.sh", str(tree_expire)],
748 workdir = ".",
749 haltOnFailure = True,
750 doStepIf = IsExpireRequested,
751 timeout = 2400))
752
753 # cleanup.sh if needed
754 factory.addStep(FileDownload(
755 name = "dlcleanupsh",
756 mastersrc = scripts_dir + '/cleanup.sh',
757 workerdest = "../cleanup.sh",
758 mode = 0o755,
759 doStepIf = IsCleanupRequested))
760
761 factory.addStep(ShellCommand(
762 name = "cleanold",
763 description = "Cleaning previous builds",
764 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
765 workdir = ".",
766 haltOnFailure = True,
767 doStepIf = IsCleanupRequested,
768 timeout = 2400))
769
770 factory.addStep(ShellCommand(
771 name = "cleanup",
772 description = "Cleaning work area",
773 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
774 workdir = ".",
775 haltOnFailure = True,
776 doStepIf = IsCleanupRequested,
777 timeout = 2400))
778
779 # user-requested clean targets
780 for tuple in CleanTargetMap:
781 factory.addStep(ShellCommand(
782 name = tuple[1],
783 description = 'User-requested "make %s"' % tuple[1],
784 command = ["make", tuple[1], "V=s"],
785 env = MakeEnv(),
786 doStepIf = IsMakeCleanRequested(tuple[0])
787 ))
788
789 # Workaround bug when switching from a checked out tag back to a branch
790 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
791 factory.addStep(ShellCommand(
792 name = "gitcheckout",
793 description = "Ensure that Git HEAD is sane",
794 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
795 haltOnFailure = True))
796
797 # check out the source
798 # Git() runs:
799 # if repo doesn't exist: 'git clone repourl'
800 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
801 # 'git fetch -t repourl branch; git reset --hard revision'
802 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
803 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
804 factory.addStep(Git(
805 name = "gitclean",
806 repourl = repo_url,
807 branch = repo_branch,
808 mode = 'full',
809 method = 'clean',
810 haltOnFailure = True,
811 doStepIf = IsGitCleanRequested,
812 ))
813
814 factory.addStep(Git(
815 name = "gitfresh",
816 repourl = repo_url,
817 branch = repo_branch,
818 mode = 'full',
819 method = 'fresh',
820 haltOnFailure = True,
821 doStepIf = IsGitFreshRequested,
822 ))
823
824 # update remote refs
825 factory.addStep(ShellCommand(
826 name = "fetchrefs",
827 description = "Fetching Git remote refs",
828 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
829 haltOnFailure = True
830 ))
831
832 # switch to tag
833 factory.addStep(ShellCommand(
834 name = "switchtag",
835 description = "Checking out Git tag",
836 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
837 haltOnFailure = True,
838 doStepIf = IsTaggingRequested
839 ))
840
841 # Verify that Git HEAD points to a tag or branch
842 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
843 factory.addStep(ShellCommand(
844 name = "gitverify",
845 description = "Ensure that Git HEAD is pointing to a branch or tag",
846 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
847 haltOnFailure = True))
848
849 factory.addStep(ShellCommand(
850 name = "rmtmp",
851 description = "Remove tmp folder",
852 command=["rm", "-rf", "tmp/"]))
853
854 # feed
855 # factory.addStep(ShellCommand(
856 # name = "feedsconf",
857 # description = "Copy the feeds.conf",
858 # command='''cp ~/feeds.conf ./feeds.conf''' ))
859
860 # feed
861 factory.addStep(ShellCommand(
862 name = "rmfeedlinks",
863 description = "Remove feed symlinks",
864 command=["rm", "-rf", "package/feeds/"]))
865
866 factory.addStep(StringDownload(
867 name = "ccachecc",
868 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
869 workerdest = "../ccache_cc.sh",
870 mode = 0o755,
871 ))
872
873 factory.addStep(StringDownload(
874 name = "ccachecxx",
875 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
876 workerdest = "../ccache_cxx.sh",
877 mode = 0o755,
878 ))
879
880 # Git SSH
881 if git_ssh:
882 factory.addStep(StringDownload(
883 name = "dlgitclonekey",
884 s = git_ssh_key,
885 workerdest = "../git-clone.key",
886 mode = 0o600,
887 ))
888
889 factory.addStep(ShellCommand(
890 name = "patchfeedsconf",
891 description = "Patching feeds.conf",
892 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
893 haltOnFailure = True
894 ))
895
896 # feed
897 factory.addStep(ShellCommand(
898 name = "updatefeeds",
899 description = "Updating feeds",
900 command=["./scripts/feeds", "update"],
901 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
902 haltOnFailure = True
903 ))
904
905 # Git SSH
906 if git_ssh:
907 factory.addStep(ShellCommand(
908 name = "rmfeedsconf",
909 description = "Removing feeds.conf",
910 command=["rm", "feeds.conf"],
911 haltOnFailure = True
912 ))
913
914 # feed
915 factory.addStep(ShellCommand(
916 name = "installfeeds",
917 description = "Installing feeds",
918 command=["./scripts/feeds", "install", "-a"],
919 env = MakeEnv(tryccache=True),
920 haltOnFailure = True
921 ))
922
923 # seed config
924 if config_seed is not None:
925 factory.addStep(StringDownload(
926 name = "dlconfigseed",
927 s = config_seed + '\n',
928 workerdest = ".config",
929 mode = 0o644
930 ))
931
932 # configure
933 factory.addStep(ShellCommand(
934 name = "newconfig",
935 description = "Seeding .config",
936 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
937 ))
938
939 factory.addStep(ShellCommand(
940 name = "delbin",
941 description = "Removing output directory",
942 command = ["rm", "-rf", "bin/"]
943 ))
944
945 factory.addStep(ShellCommand(
946 name = "defconfig",
947 description = "Populating .config",
948 command = ["make", "defconfig"],
949 env = MakeEnv()
950 ))
951
952 # check arch
953 factory.addStep(ShellCommand(
954 name = "checkarch",
955 description = "Checking architecture",
956 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
957 logEnviron = False,
958 want_stdout = False,
959 want_stderr = False,
960 haltOnFailure = True
961 ))
962
963 # find libc suffix
964 factory.addStep(SetPropertyFromCommand(
965 name = "libc",
966 property = "libc",
967 description = "Finding libc suffix",
968 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
969
970 # install build key
971 if usign_key is not None:
972 factory.addStep(StringDownload(
973 name = "dlkeybuildpub",
974 s = UsignSec2Pub(usign_key, usign_comment),
975 workerdest = "key-build.pub",
976 mode = 0o600,
977 ))
978
979 factory.addStep(StringDownload(
980 name = "dlkeybuild",
981 s = "# fake private key",
982 workerdest = "key-build",
983 mode = 0o600,
984 ))
985
986 factory.addStep(StringDownload(
987 name = "dlkeybuilducert",
988 s = "# fake certificate",
989 workerdest = "key-build.ucert",
990 mode = 0o600,
991 ))
992
993 # prepare dl
994 factory.addStep(ShellCommand(
995 name = "dldir",
996 description = "Preparing dl/",
997 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
998 logEnviron = False,
999 want_stdout = False
1000 ))
1001
1002 # prepare tar
1003 factory.addStep(ShellCommand(
1004 name = "dltar",
1005 description = "Building and installing GNU tar",
1006 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1007 env = MakeEnv(tryccache=True),
1008 haltOnFailure = True
1009 ))
1010
1011 # populate dl
1012 factory.addStep(ShellCommand(
1013 name = "dlrun",
1014 description = "Populating dl/",
1015 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1016 env = MakeEnv(),
1017 logEnviron = False,
1018 locks = [dlLock.access('exclusive')],
1019 ))
1020
1021 factory.addStep(ShellCommand(
1022 name = "cleanbase",
1023 description = "Cleaning base-files",
1024 command=["make", "package/base-files/clean", "V=s"]
1025 ))
1026
1027 # build
1028 factory.addStep(ShellCommand(
1029 name = "tools",
1030 description = "Building and installing tools",
1031 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1032 env = MakeEnv(tryccache=True),
1033 haltOnFailure = True
1034 ))
1035
1036 factory.addStep(ShellCommand(
1037 name = "toolchain",
1038 description = "Building and installing toolchain",
1039 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1040 env = MakeEnv(),
1041 haltOnFailure = True
1042 ))
1043
1044 factory.addStep(ShellCommand(
1045 name = "kmods",
1046 description = "Building kmods",
1047 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1048 env = MakeEnv(),
1049 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1050 haltOnFailure = True
1051 ))
1052
1053 # find kernel version
1054 factory.addStep(SetPropertyFromCommand(
1055 name = "kernelversion",
1056 property = "kernelversion",
1057 description = "Finding the effective Kernel version",
1058 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1059 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1060 ))
1061
1062 factory.addStep(ShellCommand(
1063 name = "pkgclean",
1064 description = "Cleaning up package build",
1065 command=["make", "package/cleanup", "V=s"]
1066 ))
1067
1068 factory.addStep(ShellCommand(
1069 name = "pkgbuild",
1070 description = "Building packages",
1071 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1072 env = MakeEnv(),
1073 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1074 haltOnFailure = True
1075 ))
1076
1077 # factory.addStep(IfBuiltinShellCommand(
1078 factory.addStep(ShellCommand(
1079 name = "pkginstall",
1080 description = "Installing packages",
1081 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1082 env = MakeEnv(),
1083 haltOnFailure = True
1084 ))
1085
1086 factory.addStep(ShellCommand(
1087 name = "pkgindex",
1088 description = "Indexing packages",
1089 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1090 env = MakeEnv(),
1091 haltOnFailure = True
1092 ))
1093
1094 if enable_kmod_archive and embed_kmod_repository:
1095 # embed kmod repository. Must happen before 'images'
1096
1097 # find rootfs staging directory
1098 factory.addStep(SetPropertyFromCommand(
1099 name = "stageroot",
1100 property = "stageroot",
1101 description = "Finding the rootfs staging directory",
1102 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1103 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1104 want_stderr = False
1105 ))
1106
1107 factory.addStep(ShellCommand(
1108 name = "filesdir",
1109 description = "Creating file overlay directory",
1110 command=["mkdir", "-p", "files/etc/opkg"],
1111 haltOnFailure = True
1112 ))
1113
1114 factory.addStep(ShellCommand(
1115 name = "kmodconfig",
1116 description = "Embedding kmod repository configuration",
1117 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1118 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1119 haltOnFailure = True
1120 ))
1121
1122 #factory.addStep(IfBuiltinShellCommand(
1123 factory.addStep(ShellCommand(
1124 name = "images",
1125 description = "Building and installing images",
1126 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1127 env = MakeEnv(),
1128 haltOnFailure = True
1129 ))
1130
1131 factory.addStep(ShellCommand(
1132 name = "buildinfo",
1133 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1134 command = "make -j1 buildinfo V=s || true",
1135 env = MakeEnv(),
1136 haltOnFailure = True
1137 ))
1138
1139 factory.addStep(ShellCommand(
1140 name = "json_overview_image_info",
1141 description = "Generate profiles.json in target folder",
1142 command = "make -j1 json_overview_image_info V=s || true",
1143 env = MakeEnv(),
1144 haltOnFailure = True
1145 ))
1146
1147 factory.addStep(ShellCommand(
1148 name = "checksums",
1149 description = "Calculating checksums",
1150 command=["make", "-j1", "checksum", "V=s"],
1151 env = MakeEnv(),
1152 haltOnFailure = True
1153 ))
1154
1155 if enable_kmod_archive:
1156 factory.addStep(ShellCommand(
1157 name = "kmoddir",
1158 description = "Creating kmod directory",
1159 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1160 haltOnFailure = True
1161 ))
1162
1163 factory.addStep(ShellCommand(
1164 name = "kmodprepare",
1165 description = "Preparing kmod archive",
1166 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1167 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1168 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1169 haltOnFailure = True
1170 ))
1171
1172 factory.addStep(ShellCommand(
1173 name = "kmodindex",
1174 description = "Indexing kmod archive",
1175 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1176 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1177 env = MakeEnv(),
1178 haltOnFailure = True
1179 ))
1180
1181 # sign
1182 if ini.has_option("gpg", "key") or usign_key is not None:
1183 factory.addStep(MasterShellCommand(
1184 name = "signprepare",
1185 description = "Preparing temporary signing directory",
1186 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1187 haltOnFailure = True
1188 ))
1189
1190 factory.addStep(ShellCommand(
1191 name = "signpack",
1192 description = "Packing files to sign",
1193 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1194 haltOnFailure = True
1195 ))
1196
1197 factory.addStep(FileUpload(
1198 workersrc = "sign.tar.gz",
1199 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1200 haltOnFailure = True
1201 ))
1202
1203 factory.addStep(MasterShellCommand(
1204 name = "signfiles",
1205 description = "Signing files",
1206 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1207 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1208 haltOnFailure = True
1209 ))
1210
1211 factory.addStep(FileDownload(
1212 name = "dlsigntargz",
1213 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1214 workerdest = "sign.tar.gz",
1215 haltOnFailure = True
1216 ))
1217
1218 factory.addStep(ShellCommand(
1219 name = "signunpack",
1220 description = "Unpacking signed files",
1221 command = ["tar", "-xzf", "sign.tar.gz"],
1222 haltOnFailure = True
1223 ))
1224
1225 # upload
1226 factory.addStep(ShellCommand(
1227 name = "dirprepare",
1228 description = "Preparing upload directory structure",
1229 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1230 haltOnFailure = True
1231 ))
1232
1233 factory.addStep(ShellCommand(
1234 name = "linkprepare",
1235 description = "Preparing repository symlink",
1236 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1237 doStepIf = IsNoMasterBuild,
1238 haltOnFailure = True
1239 ))
1240
1241 if enable_kmod_archive:
1242 factory.addStep(ShellCommand(
1243 name = "kmoddirprepare",
1244 description = "Preparing kmod archive upload directory",
1245 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1246 haltOnFailure = True
1247 ))
1248
1249 factory.addStep(ShellCommand(
1250 name = "dirupload",
1251 description = "Uploading directory structure",
1252 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1253 env={'RSYNC_PASSWORD': rsync_bin_key},
1254 haltOnFailure = True,
1255 logEnviron = False,
1256 ))
1257
1258 # download remote sha256sums to 'target-sha256sums'
1259 factory.addStep(ShellCommand(
1260 name = "target-sha256sums",
1261 description = "Fetching remote sha256sums for target",
1262 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1263 env={'RSYNC_PASSWORD': rsync_bin_key},
1264 logEnviron = False,
1265 haltOnFailure = False,
1266 flunkOnFailure = False,
1267 warnOnFailure = False,
1268 ))
1269
1270 # build list of files to upload
1271 factory.addStep(FileDownload(
1272 name = "dlsha2rsyncpl",
1273 mastersrc = scripts_dir + '/sha2rsync.pl',
1274 workerdest = "../sha2rsync.pl",
1275 mode = 0o755,
1276 ))
1277
1278 factory.addStep(ShellCommand(
1279 name = "buildlist",
1280 description = "Building list of files to upload",
1281 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1282 haltOnFailure = True,
1283 ))
1284
1285 factory.addStep(FileDownload(
1286 name = "dlrsync.sh",
1287 mastersrc = scripts_dir + '/rsync.sh',
1288 workerdest = "../rsync.sh",
1289 mode = 0o755
1290 ))
1291
1292 # upload new files and update existing ones
1293 factory.addStep(ShellCommand(
1294 name = "targetupload",
1295 description = "Uploading target files",
1296 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1297 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1298 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1299 env={'RSYNC_PASSWORD': rsync_bin_key},
1300 haltOnFailure = True,
1301 logEnviron = False,
1302 ))
1303
1304 # delete files which don't exist locally
1305 factory.addStep(ShellCommand(
1306 name = "targetprune",
1307 description = "Pruning target files",
1308 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1309 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1310 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1311 env={'RSYNC_PASSWORD': rsync_bin_key},
1312 haltOnFailure = True,
1313 logEnviron = False,
1314 ))
1315
1316 if enable_kmod_archive:
1317 factory.addStep(ShellCommand(
1318 name = "kmodupload",
1319 description = "Uploading kmod archive",
1320 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1321 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1322 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1323 env={'RSYNC_PASSWORD': rsync_bin_key},
1324 haltOnFailure = True,
1325 logEnviron = False,
1326 ))
1327
1328 if rsync_src_url is not None:
1329 factory.addStep(ShellCommand(
1330 name = "sourcelist",
1331 description = "Finding source archives to upload",
1332 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1333 haltOnFailure = True
1334 ))
1335
1336 factory.addStep(ShellCommand(
1337 name = "sourceupload",
1338 description = "Uploading source archives",
1339 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1340 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1341 env={'RSYNC_PASSWORD': rsync_src_key},
1342 haltOnFailure = True,
1343 logEnviron = False,
1344 ))
1345
1346 if False:
1347 factory.addStep(ShellCommand(
1348 name = "packageupload",
1349 description = "Uploading package files",
1350 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1351 env={'RSYNC_PASSWORD': rsync_bin_key},
1352 haltOnFailure = False,
1353 flunkOnFailure = False,
1354 warnOnFailure = True,
1355 logEnviron = False,
1356 ))
1357
1358 # logs
1359 if False:
1360 factory.addStep(ShellCommand(
1361 name = "upload",
1362 description = "Uploading logs",
1363 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1364 env={'RSYNC_PASSWORD': rsync_bin_key},
1365 haltOnFailure = False,
1366 flunkOnFailure = False,
1367 warnOnFailure = True,
1368 alwaysRun = True,
1369 logEnviron = False,
1370 ))
1371
1372 factory.addStep(ShellCommand(
1373 name = "df",
1374 description = "Reporting disk usage",
1375 command=["df", "-h", "."],
1376 env={'LC_ALL': 'C'},
1377 haltOnFailure = False,
1378 flunkOnFailure = False,
1379 warnOnFailure = False,
1380 alwaysRun = True
1381 ))
1382
1383 factory.addStep(ShellCommand(
1384 name = "du",
1385 description = "Reporting estimated file space usage",
1386 command=["du", "-sh", "."],
1387 env={'LC_ALL': 'C'},
1388 haltOnFailure = False,
1389 flunkOnFailure = False,
1390 warnOnFailure = False,
1391 alwaysRun = True
1392 ))
1393
1394 factory.addStep(ShellCommand(
1395 name = "ccachestat",
1396 description = "Reporting ccache stats",
1397 command=["ccache", "-s"],
1398 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1399 want_stderr = False,
1400 haltOnFailure = False,
1401 flunkOnFailure = False,
1402 warnOnFailure = False,
1403 alwaysRun = True,
1404 ))
1405
1406 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1407
1408 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1409 force_factory.addStep(steps.Trigger(
1410 name = "trigger_%s" % target,
1411 description = "Triggering %s build" % target,
1412 schedulerNames = [ "trigger_%s" % target ],
1413 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1414 doStepIf = IsTargetSelected(target)
1415 ))
1416
1417
1418 ####### STATUS TARGETS
1419
1420 # 'status' is a list of Status Targets. The results of each build will be
1421 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1422 # including web pages, email senders, and IRC bots.
1423
1424 if ini.has_option("phase1", "status_bind"):
1425 c['www'] = {
1426 'port': ini.get("phase1", "status_bind"),
1427 'plugins': {
1428 'waterfall_view': True,
1429 'console_view': True,
1430 'grid_view': True
1431 }
1432 }
1433
1434 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1435 c['www']['auth'] = util.UserPasswordAuth([
1436 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1437 ])
1438 c['www']['authz'] = util.Authz(
1439 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1440 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1441 )
1442
1443 c['services'] = []
1444 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1445 irc_host = ini.get("irc", "host")
1446 irc_port = 6667
1447 irc_chan = ini.get("irc", "channel")
1448 irc_nick = ini.get("irc", "nickname")
1449 irc_pass = None
1450
1451 if ini.has_option("irc", "port"):
1452 irc_port = ini.getint("irc", "port")
1453
1454 if ini.has_option("irc", "password"):
1455 irc_pass = ini.get("irc", "password")
1456
1457 irc = reporters.IRC(irc_host, irc_nick,
1458 port = irc_port,
1459 password = irc_pass,
1460 channels = [ irc_chan ],
1461 notify_events = [ 'exception', 'problem', 'recovery' ]
1462 )
1463
1464 c['services'].append(irc)
1465
1466 c['revlink'] = util.RevlinkMatch([
1467 r'https://git.openwrt.org/openwrt/(.*).git'
1468 ],
1469 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1470
1471 ####### DB URL
1472
1473 c['db'] = {
1474 # This specifies what database buildbot uses to store its state. You can leave
1475 # this at its default for all but the largest installations.
1476 'db_url' : "sqlite:///state.sqlite",
1477 }
1478
1479 c['buildbotNetUsageData'] = None